diff --git a/.codecov.yml b/.codecov.yml new file mode 100644 index 0000000..4af5eb2 --- /dev/null +++ b/.codecov.yml @@ -0,0 +1,14 @@ +coverage: + status: + project: # more options at https://docs.codecov.com/docs/commit-status + default: + target: auto # use the coverage from the base commit, fail if coverage is lower + threshold: 0% # allow the coverage to drop by + +comment: + layout: " diff, flags, files" + behavior: default + require_changes: false + require_base: false # [true :: must have a base report to post] + require_head: false # [true :: must have a head report to post] + hide_project_coverage: false # [true :: only show coverage on the git diff aka patch coverage] diff --git a/.codespell/ignore_lines.txt b/.codespell/ignore_lines.txt new file mode 100644 index 0000000..07fa7c8 --- /dev/null +++ b/.codespell/ignore_lines.txt @@ -0,0 +1,2 @@ +;; Please include filenames and explanations for each ignored line. +;; See https://docs.openverse.org/meta/codespell.html for docs. diff --git a/.codespell/ignore_words.txt b/.codespell/ignore_words.txt new file mode 100644 index 0000000..04b4fcf --- /dev/null +++ b/.codespell/ignore_words.txt @@ -0,0 +1,8 @@ +;; Please include explanations for each ignored word (lowercase). +;; See https://docs.openverse.org/meta/codespell.html for docs. + +;; abbreviation for "materials" often used in a journal title +mater + +;; Frobenius norm used in np.linalg.norm +fro diff --git a/.flake8 b/.flake8 new file mode 100644 index 0000000..88077af --- /dev/null +++ b/.flake8 @@ -0,0 +1,13 @@ +# As of now, flake8 does not natively support configuration via pyproject.toml +# https://github.com/microsoft/vscode-flake8/issues/135 +[flake8] +exclude = + .git, + __pycache__, + build, + dist, + docs/source/conf.py +max-line-length = 79 +# Ignore some style 'errors' produced while formatting by 'black' +# https://black.readthedocs.io/en/stable/guides/using_black_with_other_tools.html#labels-why-pycodestyle-warnings +extend-ignore = E203 diff --git a/.github/ISSUE_TEMPLATE/bug_feature.md b/.github/ISSUE_TEMPLATE/bug_feature.md new file mode 100644 index 0000000..b3454de --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_feature.md @@ -0,0 +1,16 @@ +--- +name: Bug Report or Feature Request +about: Report a bug or suggest a new feature! +title: "" +labels: "" +assignees: "" +--- + +### Problem + + + +### Proposed solution diff --git a/.github/ISSUE_TEMPLATE/release_checklist.md b/.github/ISSUE_TEMPLATE/release_checklist.md new file mode 100644 index 0000000..56bcd01 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/release_checklist.md @@ -0,0 +1,46 @@ +--- +name: Release +about: Checklist and communication channel for PyPI and GitHub release +title: "Ready for PyPI/GitHub release" +labels: "release" +assignees: "" +--- + +### PyPI/GitHub rc-release preparation checklist: + +- [ ] All PRs/issues attached to the release are merged. +- [ ] All the badges on the README are passing. +- [ ] License information is verified as correct. If you are unsure, please comment below. +- [ ] Locally rendered documentation contains all appropriate pages, including API references (check no modules are + missing), tutorials, and other human-written text is up-to-date with any changes in the code. +- [ ] Installation instructions in the README, documentation, and the website are updated. +- [ ] Successfully run any tutorial examples or do functional testing with the latest Python version. +- [ ] Grammar and writing quality are checked (no typos). +- [ ] Install `pip install build twine`, run `python -m build` and `twine check dist/*` to ensure that the package can be built and is correctly formatted for PyPI release. + +Please tag the maintainer (e.g., @username) in the comment here when you are ready for the PyPI/GitHub release. Include any additional comments necessary, such as version information and details about the pre-release here: + +### PyPI/GitHub full-release preparation checklist: + +- [ ] Create a new conda environment and install the rc from PyPI (`pip install ==??`) +- [ ] License information on PyPI is correct. +- [ ] Docs are deployed successfully to `https:///`. +- [ ] Successfully run all tests, tutorial examples or do functional testing. + +Please let the maintainer know that all checks are done and the package is ready for full release. + +### conda-forge release preparation checklist: + + + +- [ ] Ensure that the full release has appeared on PyPI successfully. +- [ ] New package dependencies listed in `conda.txt` and `tests.txt` are added to `meta.yaml` in the feedstock. +- [ ] Close any open issues on the feedstock. Reach out to the maintainer if you have questions. +- [ ] Tag the maintainer for conda-forge release. + +### Post-release checklist + + + +- [ ] Run tutorial examples and conduct functional testing using the installation guide in the README. Attach screenshots/results as comments. +- [ ] Documentation (README, tutorials, API references, and websites) is deployed without broken links or missing figures. diff --git a/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md new file mode 100644 index 0000000..1099d86 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE/pull_request_template.md @@ -0,0 +1,15 @@ +### What problem does this PR address? + + + +### What should the reviewer(s) do? + + + + diff --git a/.github/workflows/build-wheel-release-upload.yml b/.github/workflows/build-wheel-release-upload.yml new file mode 100644 index 0000000..9763d03 --- /dev/null +++ b/.github/workflows/build-wheel-release-upload.yml @@ -0,0 +1,18 @@ +name: Release (GitHub/PyPI) and Deploy Docs + +on: + workflow_dispatch: + push: + tags: + - "*" # Trigger on all tags initially, but tag and release privilege are verified in _build-wheel-release-upload.yml + +jobs: + build-release: + uses: scikit-package/release-scripts/.github/workflows/_build-wheel-release-upload.yml@v0 + with: + project: diffpy.srxplanar + c_extension: false + maintainer_GITHUB_username: sbillinge + secrets: + PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }} + PAT_TOKEN: ${{ secrets.PAT_TOKEN }} diff --git a/.github/workflows/check-news-item.yml b/.github/workflows/check-news-item.yml new file mode 100644 index 0000000..c1588da --- /dev/null +++ b/.github/workflows/check-news-item.yml @@ -0,0 +1,12 @@ +name: Check for News + +on: + pull_request_target: + branches: + - main + +jobs: + check-news-item: + uses: scikit-package/release-scripts/.github/workflows/_check-news-item.yml@v0 + with: + project: diffpy.srxplanar diff --git a/.github/workflows/matrix-and-codecov-on-merge-to-main.yml b/.github/workflows/matrix-and-codecov-on-merge-to-main.yml new file mode 100644 index 0000000..d36ff9f --- /dev/null +++ b/.github/workflows/matrix-and-codecov-on-merge-to-main.yml @@ -0,0 +1,21 @@ +name: CI + +on: + push: + branches: + - main + release: + types: + - prereleased + - published + workflow_dispatch: + +jobs: + matrix-coverage: + uses: scikit-package/release-scripts/.github/workflows/_matrix-and-codecov-on-merge-to-main.yml@v0 + with: + project: diffpy.srxplanar + c_extension: false + headless: false + secrets: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.github/workflows/publish-docs-on-release.yml b/.github/workflows/publish-docs-on-release.yml new file mode 100644 index 0000000..625eb6c --- /dev/null +++ b/.github/workflows/publish-docs-on-release.yml @@ -0,0 +1,12 @@ +name: Deploy Documentation on Release + +on: + workflow_dispatch: + +jobs: + docs: + uses: scikit-package/release-scripts/.github/workflows/_publish-docs-on-release.yml@v0 + with: + project: diffpy.srxplanar + c_extension: false + headless: false diff --git a/.github/workflows/tests-on-pr.yml b/.github/workflows/tests-on-pr.yml new file mode 100644 index 0000000..2724204 --- /dev/null +++ b/.github/workflows/tests-on-pr.yml @@ -0,0 +1,16 @@ +name: Tests on PR + +on: + pull_request: + workflow_dispatch: + +jobs: + tests-on-pr: + uses: scikit-package/release-scripts/.github/workflows/_tests-on-pr.yml@v0 + with: + project: diffpy.srxplanar + c_extension: false + headless: false + python_version: "3.13" + secrets: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 0000000..86f162b --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,5 @@ +[settings] +# Keep import statement below line_length character limit +line_length = 79 +multi_line_output = 3 +include_trailing_comma = True diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..0e4a84d --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,66 @@ +default_language_version: + python: python3 +ci: + autofix_commit_msg: | + [pre-commit.ci] auto fixes from pre-commit hooks + autofix_prs: true + autoupdate_branch: "pre-commit-autoupdate" + autoupdate_commit_msg: "[pre-commit.ci] pre-commit autoupdate" + autoupdate_schedule: monthly + skip: [no-commit-to-branch] + submodules: false +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-case-conflict + - id: check-merge-conflict + - id: check-toml + - id: check-added-large-files + - repo: https://github.com/psf/black + rev: 24.4.2 + hooks: + - id: black + - repo: https://github.com/pycqa/flake8 + rev: 7.0.0 + hooks: + - id: flake8 + - repo: https://github.com/pycqa/isort + rev: 5.13.2 + hooks: + - id: isort + args: ["--profile", "black"] + - repo: https://github.com/kynan/nbstripout + rev: 0.7.1 + hooks: + - id: nbstripout + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: no-commit-to-branch + name: Prevent Commit to Main Branch + args: ["--branch", "main"] + stages: [pre-commit] + - repo: https://github.com/codespell-project/codespell + rev: v2.3.0 + hooks: + - id: codespell + additional_dependencies: + - tomli + # prettier - multi formatter for .json, .yml, and .md files + - repo: https://github.com/pre-commit/mirrors-prettier + rev: f12edd9c7be1c20cfa42420fd0e6df71e42b51ea # frozen: v4.0.0-alpha.8 + hooks: + - id: prettier + additional_dependencies: + - "prettier@^3.2.4" + # docformatter - PEP 257 compliant docstring formatter + - repo: https://github.com/s-weigand/docformatter + rev: 5757c5190d95e5449f102ace83df92e7d3b06c6c + hooks: + - id: docformatter + additional_dependencies: [tomli] + args: [--in-place, --config, ./pyproject.toml] diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000..aaa8889 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,13 @@ +version: 2 + +build: + os: "ubuntu-22.04" + tools: + python: "latest" + +python: + install: + - requirements: requirements/docs.txt + +sphinx: + configuration: docs/source/conf.py diff --git a/AUTHORS.rst b/AUTHORS.rst new file mode 100644 index 0000000..a16c5ae --- /dev/null +++ b/AUTHORS.rst @@ -0,0 +1,10 @@ +Authors +======= + +Xiaohao Yang and Billinge Group members + +Contributors +------------ + +For a list of contributors, visit +https://github.com/diffpy/diffpy.srxplanar/graphs/contributors diff --git a/AUTHORS.txt b/AUTHORS.txt deleted file mode 100644 index 4306273..0000000 --- a/AUTHORS.txt +++ /dev/null @@ -1 +0,0 @@ -Xiaohao Yang diff --git a/CHANGELOG.rst b/CHANGELOG.rst new file mode 100644 index 0000000..f29d3b5 --- /dev/null +++ b/CHANGELOG.rst @@ -0,0 +1,5 @@ +============= +Release notes +============= + +.. current developments diff --git a/CODE-OF-CONDUCT.rst b/CODE-OF-CONDUCT.rst new file mode 100644 index 0000000..e8199ca --- /dev/null +++ b/CODE-OF-CONDUCT.rst @@ -0,0 +1,133 @@ +===================================== + Contributor Covenant Code of Conduct +===================================== + +Our Pledge +---------- + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socioeconomic status, +nationality, personal appearance, race, caste, color, religion, or sexual +identity and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +Our Standards +------------- + +Examples of behavior that contributes to a positive environment for our +community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the overall + community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or advances of + any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email address, + without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +Enforcement Responsibilities +---------------------------- + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +Scope +----- + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official email address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +Enforcement +----------- + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +sb2896@columbia.edu. All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +Enforcement Guidelines +---------------------- + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +1. Correction +**************** + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +2. Warning +************* + +**Community Impact**: A violation through a single incident or series of +actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or permanent +ban. + +3. Temporary Ban +****************** + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +4. Permanent Ban +****************** + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within the +community. + +Attribution +----------- + +This Code of Conduct is adapted from the `Contributor Covenant `_. + +Community Impact Guidelines were inspired by `Mozilla's code of conduct enforcement ladder `_. + +For answers to common questions about this code of conduct, see the `FAQ `_. `Translations are available `_ diff --git a/LICENSE.rst b/LICENSE.rst new file mode 100644 index 0000000..db4e4f5 --- /dev/null +++ b/LICENSE.rst @@ -0,0 +1,30 @@ +BSD 3-Clause License + +Copyright (c) 2008-2025, The Trustees of Columbia University in the City of New York. +Copyright (c) 1994-2014, Christoph Gohlke +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/LICENSE.txt b/LICENSE.txt deleted file mode 100644 index f982e53..0000000 --- a/LICENSE.txt +++ /dev/null @@ -1,33 +0,0 @@ -This program is part of the DiffPy and DANSE open-source projects at Columbia -University and is available subject to the conditions and terms laid out below. - -Copyright © 2008-2011, Trustees of Columbia University in the City of New York, -all rights reserved. - -For more information please visit the diffpy web-page at http://diffpy.org or -email Prof. Simon Billinge at sb2896@columbia.edu. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - - * Neither the names of COLUMBIA UNIVERSITY, MICHIGAN STATE UNIVERSITY nor the - names of their contributors may be used to endorse or promote products - derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/LICENSE_tifffiles.txt b/LICENSE_tifffiles.txt deleted file mode 100644 index d0408f9..0000000 --- a/LICENSE_tifffiles.txt +++ /dev/null @@ -1,26 +0,0 @@ -Copyright (c) 1994-2014, Christoph Gohlke -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. -* Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. -* Neither the name of the copyright holders nor the names of any - contributors may be used to endorse or promote products derived - from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -POSSIBILITY OF SUCH DAMAGE. diff --git a/MANIFEST.in b/MANIFEST.in index c237fbd..f1a78ee 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,5 +1,12 @@ -recursive-include diffpy * -include AUTHORS.txt LICENSE.txt README.md -include setup.cfg -exclude MANIFEST.in -recursive-exclude diffpy *.pyc +graft src +graft tests +graft requirements + +include AUTHORS.rst LICENSE*.rst README.rst + +# Exclude all bytecode files and __pycache__ directories +global-exclude *.py[cod] # Exclude all .pyc, .pyo, and .pyd files. +global-exclude .DS_Store # Exclude Mac filesystem artifacts. +global-exclude __pycache__ # Exclude Python cache directories. +global-exclude .git* # Exclude git files and directories. +global-exclude .idea # Exclude PyCharm project settings. diff --git a/README.rst b/README.rst index 9472ec3..76430be 100644 --- a/README.rst +++ b/README.rst @@ -1,97 +1,127 @@ -diffpy.srxplanar -======================================================================== +|Icon| |title|_ +=============== + +.. |title| replace:: diffpy.srxplanar +.. _title: https://diffpy.github.io/diffpy.srxplanar + +.. |Icon| image:: https://avatars.githubusercontent.com/diffpy + :target: https://diffpy.github.io/diffpy.srxplanar + :height: 100px + +|PyPI| |Forge| |PythonVersion| |PR| + +|CI| |Codecov| |Black| |Tracking| + +.. |Black| image:: https://img.shields.io/badge/code_style-black-black + :target: https://github.com/psf/black + +.. |CI| image:: https://github.com/diffpy/diffpy.cmi/actions/workflows/matrix-and-codecov-on-merge-to-main.yml/badge.svg + :target: https://github.com/diffpy/diffpy.cmi/actions/workflows/matrix-and-codecov-on-merge-to-main.yml + +.. |Codecov| image:: https://codecov.io/gh/diffpy/diffpy.srxplanar/branch/main/graph/badge.svg + :target: https://codecov.io/gh/diffpy/diffpy.srxplanar + +.. |Forge| image:: https://img.shields.io/conda/vn/conda-forge/diffpy.cmi + :target: https://anaconda.org/conda-forge/diffpy.srxplanar + +.. |PR| image:: https://img.shields.io/badge/PR-Welcome-29ab47ff + :target: https://github.com/diffpy/diffpy.srxplanar/pulls + +.. |PyPI| image:: https://img.shields.io/pypi/v/diffpy.srxplanar + :target: https://pypi.org/project/diffpy.srxplanar/ + +.. |PythonVersion| image:: https://img.shields.io/pypi/pyversions/diffpy.srxplanar + :target: https://pypi.org/project/diffpy.srxplanar/ + +.. |Tracking| image:: https://img.shields.io/badge/issue_tracking-github-blue + :target: https://github.com/diffpy/diffpy.srxplanar/issues + +This is part of xPDFsuite package. diffpy.srxplanar package provides 2D diffraction image integration using non splitting pixel algorithm. And it can estimate and propagate statistic -uncertainty of raw counts and integrated intensity. If you are using this -software. If you use this program to do productive scientific research that -leads to publication, we kindly ask that you acknowledge use of the program -by citing the following paper in your publication: - - Xiaohao Yang, Pavol Juhas, Simon J. L. Billinge, On the estimation of - statistical uncertainties on powder diffraction and small angle - scattering data from 2-D x-ray detectors, arXiv:1309.3614 +uncertainty of raw counts and integrated intensity. To learn more about diffpy.srxplanar library, see the examples directory included in this distribution or the API documentation at http://diffpy.github.io/diffpy.srxplanar/ -REQUIREMENTS ------------------------------------------------------------------------- +For more information about the diffpy.srxplanar library, please consult our `online documentation `_. + +Citation +-------- + +If you use this program to do productive scientific research that +leads to publication, we kindly ask that you acknowledge use of the program +by citing the following paper in your publication: + + Xiaohao Yang, Pavol Juhas, Simon J. L. Billinge, On the estimation of + statistical uncertainties on powder diffraction and small angle + scattering data from 2-D x-ray detectors, arXiv:1309.3614 + +Installation +------------ + +The preferred method is to be installed with `xpdfsuite` package or the wheel file. + +To install the package with the wheel file, you can first download the wheel file, then type -The diffpy.srxplanar requires Python 2.6 or 2.7 and the following software: +`pip install /path/to/diffpy.srxplanar-X.Y.Z-py3-none-any.whl`. -* `setuptools` >=0.61(https://pypi.python.org/pypi/setuptools) -* `numpy` >=1.60(http://www.numpy.org/) -* `scipy` >=1.10(www.scipy.org/) -* `FabIO` >=0.80(http://sourceforge.net/projects/fable/files/fabio/) -If your python version < 2.7 (these two packages are included in 2.7 but not in 2.6) - -* `ordereddict` https://pypi.python.org/pypi/ordereddict -* `argparse` https://pypi.python.org/pypi/argparse +This package also provides command-line utilities. To check the software has been installed correctly, type :: -On Ubuntu Linux the part of required software can be easily installed using -the system package manager:: + diffpy.srxplanar --version - sudo aptitude install \ - python-setuptools python-numpy python-scipy - -For Mac OS X machine with the MacPorts package manager one could do:: +You can also type the following command to verify the installation. :: - sudo port install \ - python27 py27-setuptools py27-numpy py27-scipy + python -c "import diffpy.srxplanar; print(diffpy.srxplanar.__version__)" -When installing with MacPorts, make sure the MacPorts bin directory is the -first in the system PATH and that python27 is selected as the default -Python version in MacPorts:: - sudo port select --set python python27 - -For other Linux distributions use their respective package manager; note -the packages may have slightly different names. diffpy.srxplanar should work -on other Unix-like operating systems as well. Please, search the -web for instructions how to install external dependencies on your particular -system. +To view the basic usage and available commands, type :: -For other packages, please go to the webpage list above to download and install. + diffpy.srxplanar -h +Getting Started +--------------- -INSTALLATION ------------------------------------------------------------------------- +You may consult our `online documentation `_ for tutorials and API references. -To install the diffpy.srxplanar package:: +Support and Contribute +---------------------- - python setup.py install +If you see a bug or want to request a feature, please `report it as an issue `_ and/or `submit a fix as a PR `_. -By default the files are installed in the system directories, which are -usually only writeable by the root. See the usage info -"./setup.py install --help" for options to install as a normal user under -different location. Note that installation to non-standard directories may -require adjustments to the PATH and PYTHONPATH environment variables. +Feel free to fork the project. To install diffpy.srxplanar +in a development mode, with its sources being directly used by Python +rather than copied to a package directory, use the following in the root +directory :: -DEVELOPMENT ------------------------------------------------------------------------- + pip install -e . -diffpy.srxplanar is an open-source software developed at the Columbia University -The diffpy.srxplanar sources are hosted at +To ensure code quality and to prevent accidental commits into the default branch, please set up the use of our pre-commit +hooks. -https://github.com/diffpy/diffpy.srxplanar +1. Install pre-commit in your working environment by running ``conda install pre-commit``. -Feel free to fork the project and contribute. To install diffpy.srxplanar -in a development mode, where the sources are directly used by Python -rather than copied to a system directory, use:: +2. Initialize pre-commit (one time only) ``pre-commit install``. - python setup.py develop --user +Thereafter your code will be linted by black and isort and checked against flake8 before you can commit. +If it fails by black or isort, just rerun and it should pass (black and isort will modify the files so should +pass after they are modified). If the flake8 test fails please see the error messages and fix them manually before +trying to commit again. -CONTACTS ------------------------------------------------------------------------- +Improvements and fixes are always appreciated. -For more information on diffpy.srxplanar please visit the project web-page: +Before contributing, please read our `Code of Conduct `_. -http://www.diffpy.org/ +Contact +------- -or email Prof. Simon Billinge at sb2896@columbia.edu +For more information on diffpy.srxplanar please visit the project `web-page `_ or email Simon Billinge at sb2896@columbia.edu. +Acknowledgements +---------------- +``diffpy.srxplanar`` is built and maintained with `scikit-package `_. diff --git a/cookiecutter.json b/cookiecutter.json new file mode 100644 index 0000000..9c24d55 --- /dev/null +++ b/cookiecutter.json @@ -0,0 +1,18 @@ +{ + "maintainer_name": "Simon Billinge", + "maintainer_email": "sb2896@columbia.edu", + "maintainer_github_username": "sbillinge", + "contributors": "Simon Billinge, Billinge Group members", + "license_holders": "The Trustees of Columbia University in the City of New York", + "project_name": "diffpy.srxplanar", + "github_username_or_orgname": "diffpy", + "github_repo_name": "diffpy.srxplanar", + "conda_pypi_package_dist_name": "diffpy.srxplanar", + "package_dir_name": "diffpy.srxplanar", + "project_short_description": "Distance Printer, calculate the inter atomic distances. Part of xPDFsuite", + "project_keywords": "diffpy, pdf, data interpretation", + "minimum_supported_python_version": "3.12", + "maximum_supported_python_version": "3.14", + "project_needs_c_code_compiled": "No", + "project_has_gui_tests": "No" +} diff --git a/diffpy/__init__.py b/diffpy/__init__.py deleted file mode 100644 index 794cadc..0000000 --- a/diffpy/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env python -############################################################################## -# -# diffpy.srxplanar by DANSE Diffraction group -# Simon J. L. Billinge -# (c) 2010 Trustees of the Columbia University -# in the City of New York. All rights reserved. -# -# File coded by: Xiaohao Yang -# -# See AUTHORS.txt for a list of people who contributed. -# See LICENSE.txt for license information. -# -############################################################################## - -__import__('pkg_resources').declare_namespace(__name__) - diff --git a/diffpy/confutils/config.py b/diffpy/confutils/config.py deleted file mode 100644 index 2fcc9a4..0000000 --- a/diffpy/confutils/config.py +++ /dev/null @@ -1,733 +0,0 @@ -#!/usr/bin/env python -############################################################################## -# -# diffpy.confutils by DANSE Diffraction group -# Simon J. L. Billinge -# (c) 2012 Trustees of the Columbia University -# in the City of New York. All rights reserved. -# -# File coded by: Xiaohao Yang -# -# See AUTHORS.txt for a list of people who contributed. -# See LICENSE.txt for license information. -# -############################################################################## - -''' -package for organizing program configurations. It can read/write configurations -file, parse arguments from command lines, and also parse arguments passed from -method/function calling inside python. - -Note: for python 2.6, argparse and orderedDict is required, install them with -easy_install -''' - - -import ConfigParser -import re -import os -import sys -from functools import partial -import argparse -try: - from collections import OrderedDict -except: - from ordereddict import OrderedDict - -from diffpy.confutils.tools import _configPropertyRad, _configPropertyR, \ - _configPropertyRW, str2bool, opt2Str, str2Opt, StrConv, FakeConfigFile - -class ConfigBase(object): - ''' - _optdatalist_default, _optdatalist are metadata used to - initialize the options, see below for examples - - options presents in --help (in cmd), config file, headers have same order as - in these list, so arrange them in right order here. - - optional args to control if the options presents in args, config file or - file header - - 'args' - default is 'a' - if 'a', this option will be available in self.args - if 'n', this option will not be available in self.args - 'config' - default is 'a' - if 'f', this option will present in self.config and be written to - config file only in full mode - if 'a', this option will present in self.config and be written to - config file both in full and short mode - if 'n', this option will not present in self.config - 'header' - default is 'a' - if 'f', this option will be written to header only in full mode - if 'a', this option will be written to header both in full and short - mode - if 'n', this option will not be written to header - - so in short mode, all options with 'a' will be written, in full mode, - all options with 'a' or 'f' will be written - ''' - - # Text to display before the argument help - _description = \ - '''Description of configurations - ''' - # Text to display after the argument help - _epilog = \ - ''' - ''' - - ''' - optdata contains these keys: - these args will be passed to argparse, see the documents of argparse for - detail information - - 'f': full, (positional) - 's': short - 'h': help - 't': type - 'a': action - 'n': nargs - 'd': default - 'c': choices - 'r': required - 'de': dest - 'co': const - ''' - _optdatanamedict = {'h':'help', - 't':'type', - 'a':'action', - 'n':'nargs', - 'd':'default', - 'c':'choices', - 'r':'required', - 'de':'dest', - 'co':'const'} - - # examples, overload it - _optdatalist_default = [ - ['configfile', {'sec':'Control', 'config':'f', 'header':'n', - 's':'c', - 'h':'name of input config file', - 'd':'', }], - ['createconfig', {'sec':'Control', 'config':'n', 'header':'n', - 'h':'create a config file according to default or current values', - 'd':'', }], - ['createconfigfull', {'sec':'Control', 'config':'n', 'header':'n', - 'h':'create a full configurable config file', - 'd':'', }], - ] - # examples, overload it - _optdatalist = [ - ['tifdirectory', {'sec':'Experiment', 'header':'n', - 's':'tifdir', - 'h':'directory of raw tif files', - 'd':'currentdir', }], - ['integrationspace', {'sec':'Experiment', - 'h':'integration space, could be twotheta or qspace', - 'd':'twotheta', - 'c':['twotheta', 'qspace'], }], - ['wavelength', {'sec':'Experiment', - 'h':'wavelength of x-ray, in A', - 'd':0.1000, }], - ['rotationd', {'sec':'Experiment', - 's':'rot', - 'h':'rotation angle of tilt plane, in degree', - 'd':0.0, }], - ['includepattern', {'sec':'Beamline', - 's':'ipattern', - 'h':'file name pattern for included files', - 'n':'*', - 'd':['*.tif'], }], - ['excludepattern', {'sec':'Beamline', - 's':'epattern', - 'h':'file name pattern for excluded files', - 'n':'*', - 'd':['*.dark.tif', '*.raw.tif'], }], - ['fliphorizontal', {'sec':'Beamline', - 'h':'filp the image horizontally', - 'n':'?', - 'co':True, - 'd':False, }], - ['regulartmatrixenable', {'sec':'Others', - 'h':'normalize tmatrix in splitting method', - 'n':'?', - 'co':True, - 'd':False, }], - ['maskedges', {'sec':'Others', 'config':'f', 'header':'f', - 'h':'mask the edge pixels, first four means the number of pixels masked in each edge \ - (left, right, top, bottom), the last one is the radius of a region masked around the corner', - 'n':5, - 'd':[1, 1, 1, 1, 50], }], - ] - - # some default data - # configfile: default config file name - # headertitle: default title of header - _defaultdata = {'configfile': ['config.cfg'], - 'headertitle': 'Configuration information' - } - - - def __init__(self, filename=None, args=None, **kwargs): - ''' - init the class and update the values of options if specified in - filename/args/kwargs - - it will: - 1. call self._preInit method - 2. find the config file if specified in filename/args/kwargs - if failed, try to find default config file - 3. update the options value using filename/args/kwargs - file > args > kwargs - - :param filename: str, file name of the config file - :param args: list of str, args passed from cmd - :param kwargs: dict, optional kwargs - - :return: None - ''' - # call self._preInit - self._preInit(**kwargs) - - # update config, first detect if a default config should be load - filename = self._findDefaultConfigFile(filename, args, **kwargs) - rv = self.updateConfig(filename, args, **kwargs) - return - - # example, overload it - def _preInit(self, **kwargs): - ''' - method called in init process, overload it! - - this method will be called before reading config from file/args/kwargs - ''' - # for name in ['rotation']: - # setattr(self.__class__, name, _configPropertyRad(name+'d')) - # self._configlist['Experiment'].extend(['rotation']) - return - - ########################################################################### - - def _findConfigFile(self, filename=None, args=None, **kwargs): - ''' - find config file, if any config is specified in filename/args/kwargs - then return the filename of config. - - :param filename: str, file name of config file - :param filename: list of str, args passed from cmd - :param kwargs: optional kwargs - - :return: name of config file if found, otherwise None - ''' - rv = None - if (filename != None): - rv = filename - if (args != None): - if ('--configfile' in args) or ('-c' in args): - obj = self.args.parse_args(args) - rv = obj.configfile - if kwargs.has_key('configfile'): - rv = kwargs['configfile'] - return rv - - def _findDefaultConfigFile(self, filename=None, args=None, **kwargs): - ''' - find default config file, if any config is specified in - filename/args/kwargs or in self._defaultdata['configfile'], then return - the filename of config. - - kwargs > args > filename > default - - param filename: str, file name of config file - param filename: list of str, args passed from cmd - param kwargs: optional kwargs - - return: name of config file if found, otherwise None - ''' - rv = self._findConfigFile(filename, args, **kwargs) - if rv == None: - for dconf in self._defaultdata['configfile']: - if (os.path.exists(dconf))and(rv == None): - rv = dconf - return rv - - ########################################################################### - - def _updateSelf(self, optnames=None, **kwargs): - ''' - update the options value, then copy the values in the self.'options' to - self.config - - 1. call self._preUpdateSelf - 2. apply options' value from *self.option* to self.config - 3. call self._postUpdateSelf - - :param optnames: str or list of str, name of options whose value has - been changed, if None, update all options - ''' - # so some check right here - self._preUpdateSelf(**kwargs) - # copy value to self.config - self._copySelftoConfig(optnames) - # so some check right here - self._postUpdateSelf(**kwargs) - return - - # example, overload it - def _preUpdateSelf(self, **kwargs): - ''' - additional process called in self._updateSelf, this method is called - before self._copySelftoConfig(), i.e. before copy options value to - self.config (config file) - ''' - return - - def _postUpdateSelf(self, **kwargs): - ''' - additional process called in self._updateSelf, this method is called - after self._copySelftoConfig(), i.e. before copy options value to - self.config (config file) - ''' - return - - ########################################################################### - - def _getTypeStr(self, optname): - ''' - return the type of option - - :param optname: str, name of option - - :return: string, type of the option - ''' - opttype = self._getTypeStrC(optname) - return opttype - - @classmethod - def _getTypeStrC(cls, optname): - ''' - class method, return the type of option - first try to get type information from metadata, if failed, try - to get type from default value - - :param optname: str, name of option - - :return: string, type of the option - ''' - optdata = cls._optdata[optname] - if optdata.has_key('t'): - opttype = optdata['t'] - else: - value = optdata['d'] - if isinstance(value, str): - opttype = 'str' - elif isinstance(value, bool): - opttype = 'bool' - elif isinstance(value, float): - opttype = 'float' - elif isinstance(value, int): - opttype = 'int' - elif isinstance(value, list): - if len(value) == 0: - opttype = 'strlist' - elif isinstance(value[0], str): - opttype = 'strlist' - elif isinstance(value[0], bool): - opttype = 'boollist' - elif isinstance(value[0], float): - opttype = 'floatlist' - elif isinstance(value[0], int): - opttype = 'intlist' - - return opttype - - ########################################################################### - - def _detectAddSections(self): - ''' - detect sections present in self._optdata and add them to self.config - also add it to self._configlist - ''' - self._detectAddSectionsC(self) - return - - @classmethod - def _detectAddSectionsC(cls): - ''' - class method, detect sections present in self._optdata and add them to self.config - also add it to self._configlist - ''' - # seclist = [self._optdata[key]['sec'] for key in self._optdata.keys()] - seclist = [cls._optdata[opt[0]]['sec'] for opt in cls._optdatalist] - secdict = OrderedDict.fromkeys(seclist) - # for sec in set(seclist): - for sec in secdict.keys(): - cls.config.add_section(sec) - cls._configlist[sec] = [] - return - - def _addOpt(self, optname): - ''' - add options to self.config and self.args and self.*option*, - this will read metadata from self._optdatalist - - :param optname: string, name of option - ''' - self._addOptC(self, optname) - return - - @classmethod - def _addOptC(cls, optname): - ''' - Class method, add options to self.config and self.args and - self.*option*, this will read metadata in self._optdatalist - - :param optname: string, name of option - ''' - optdata = cls._optdata[optname] - opttype = cls._getTypeStrC(optname) - - # replace currentdir in default to os.getcwd() - if optdata['d'] == 'currentdir': - optdata['d'] = os.getcwd() - - # add to cls.'optname' - cls._addOptSelfC(optname, optdata) - - # add to cls.config - secname = optdata['sec'] if optdata.has_key('sec') else 'Others' - cls._configlist[secname].append(optname) - if optdata.get('config', 'a') != 'n': - strvalue = ', '.join(map(str, optdata['d'])) if isinstance(optdata['d'], list) else str(optdata['d']) - cls.config.set(secname, optname, strvalue) - # add to cls.args - if optdata.get('args', 'a') != 'n': - # transform optdata to a dict that can pass to add_argument method - pargs = dict() - for key in optdata.keys(): - if cls._optdatanamedict.has_key(key): - pargs[cls._optdatanamedict[key]] = optdata[key] - pargs['default'] = argparse.SUPPRESS - pargs['type'] = StrConv(opttype) - # add args - if optdata.has_key('f'): - cls.args.add_argument(optname, **pargs) - elif optdata.has_key('s'): - cls.args.add_argument('--' + optname, '-' + optdata['s'], **pargs) - else: - cls.args.add_argument('--' + optname, **pargs) - return - - @classmethod - def _addOptSelfC(cls, optname, optdata): - ''' - class method, assign options value to *self.option*, using metadata - - :param optname: string, name of the option - :param optdata: dict, metadata of the options, get it from self._optdatalist - ''' - setattr(cls, optname, optdata['d']) - return - - def _copyConfigtoSelf(self, optnames=None): - ''' - copy the options' value from self.config to self.*option* - - :param optnames: str or list of str, names of options whose value copied - from self.config to self.*option*'. Set None to update all - ''' - if optnames != None: - optnames = optnames if isinstance(optnames, list) else [optnames] - else: - optnames = [] - for secname in self.config.sections(): - optnames += self.config.options(secname) - - for optname in optnames: - if self._optdata.has_key(optname): - secname = self._optdata[optname]['sec'] - opttype = self._getTypeStr(optname) - optvalue = self.config.get(secname, optname) - setattr(self, optname, str2Opt(opttype, optvalue)) - return - - def _copySelftoConfig(self, optnames=None): - ''' - copy the value from self.*option* to self.config - - :param optname: str or list of str, names of options whose value copied - from self.*option* to self.config. Set None to update all - ''' - if optnames != None: - optnames = optnames if isinstance(optnames, list) else [optnames] - else: - optnames = [] - for secname in self.config.sections(): - optnames += self.config.options(secname) - - for optname in optnames: - if self._optdata.has_key(optname): - secname = self._optdata[optname]['sec'] - opttype = self._getTypeStr(optname) - optvalue = getattr(self, optname) - self.config.set(secname, optname, opt2Str(opttype, optvalue)) - return - - ########################################################################### - - def parseArgs(self, pargs): - ''' - parse args and update the value in self.*option*, this will call the - self.args() to parse args, - - :param pargs: list of string, arguments to parse, usually comming from sys.argv - ''' - obj = self.args.parse_args(pargs) - changedargs = obj.__dict__.keys() - for optname in changedargs: - if self._optdata.has_key(optname): - setattr(self, optname, getattr(obj, optname)) - # update self - if len(changedargs) > 0: - self._updateSelf(changedargs) - return obj - - def parseKwargs(self, **kwargs): - ''' - update self.*option* values according to the kwargs - - :param kwargs: dict, keywords=value - ''' - if kwargs != {}: - changedargs = [] - for optname, optvalue in kwargs.iteritems(): - if self._optdata.has_key(optname): - setattr(self, optname, optvalue) - changedargs.append(optname) - # update self - self._updateSelf(changedargs) - return - - def parseConfigFile(self, filename): - ''' - read a config file and update the self.*option* - - :param filename: str, file name of config file (include path) - ''' - if filename != None: - filename = os.path.abspath(filename) - if os.path.exists(filename): - self.configfile = filename - self._copySelftoConfig() - fileobj = FakeConfigFile(filename) - # self.config.read(filename) - self.config.readfp(fileobj) - self._copyConfigtoSelf() - self._updateSelf() - return - - def updateConfig(self, filename=None, args=None, **kwargs): - ''' - update config according to config file, args(from sys.argv) or **kwargs - - 1. call self._preUpdateConfig() - 2. process file/args/kwargs passed to this method, - 3. read a configfile if specified in args or kwargs - 4. call self._postUpdateConfig() - 5. write config file if specified in args/kwargs - - :param filename: str, file name of the config file - :param args: list of str, args passed from cmd, - :param kwargs: dict, optional kwargs - - :return: True if anything updated, False if nothing updated - ''' - # call self._preUpdateConfig - self._preUpdateConfig(**kwargs) - - filename = self._findConfigFile(filename, args, **kwargs) - if filename != None: - rv = self.parseConfigFile(filename) - if args != None: - rv = self.parseArgs(args) - if kwargs != {}: - rv = self.parseKwargs(**kwargs) - - if (filename == None)and((args == None)or(args == []))and(kwargs == {}): - rv = self._updateSelf() - - # call self._callbackUpdateConfig - self._postUpdateConfig(**kwargs) - - # write config file - self._createConfigFile() - return rv - - def _preUpdateConfig(self, **kwargs): - ''' - Method called before parsing args or kwargs or config file, in self.updateConfig - ''' - return - - def _postUpdateConfig(self, **kwargs): - ''' - Method called after parsing args or kwargs or config file, in self.updateConfig - ''' - return - - ########################################################################### - def _createConfigFile(self): - ''' - write output config file if specfied in configuration - the filename is specified by self.createconfig - ''' - if (self.createconfig != '')and(self.createconfig != None): - self.writeConfig(self.createconfig, 'short') - self.createconfig = '' - if (self.createconfigfull != '')and(self.createconfigfull != None): - self.writeConfig(self.createconfigfull, 'full') - self.createconfigfull = '' - return - - def writeConfig(self, filename, mode='short', changeconfigfile=True): - ''' - write config to file. the file is compatiable with python package ConfigParser - - :param filename: string, name of file - :param mode: string, 'short' or 'full' ('s' or 'f'). - in short mode, all options with 'a' will be written, in full mode, - all options with 'a' or 'f' will be written - ''' - if changeconfigfile: - self.configfile = os.path.abspath(filename) - self._updateSelf() - # func decide if wirte the option to config according to mode - # options not present in self._optdata will not be written to config - if mode.startswith('s'): - mcond = lambda optname: self._optdata.get(optname, {'config':'n'}).get('config', 'a') == 'a' - else: - mcond = lambda optname: self._optdata.get(optname, {'config':'n'}).get('config', 'a') != 'n' - - lines = [] - for section in self.config._sections: - tlines = [] - for (key, value) in self.config._sections[section].items(): - if (key != "__name__") and mcond(key): - tlines.append("%s = %s" % (key, str(value).replace('\n', '\n\t'))) - if len(tlines) > 0: - lines.append("[%s]" % section) - lines.extend(tlines) - lines.append('') - rv = "\n".join(lines) + "\n" - fp = open(filename, 'w') - fp.write(rv) - fp.close() - return - - def getHeader(self, title=None, mode='full'): - ''' - get a header of configurations values, - - :param title: str, title of header, if None, try to get it from self.defaultvalue - :param mode: string, 'short' or 'full' ('s' or 'f'). - in short mode, all options with 'a' will be written, in full mode, - all options with 'a' or 'f' will be written - - :return: string, lines with line break that can be directly writen to a text file - ''' - - lines = [] - title = '# %s #' % (self._defaultdata['headertitle'] if title == None else title) - lines.append(title) - # func decide if wirte the option to header according to mode - # options not present in self._optdata will not be written to header - if mode.startswith('s'): - mcond = lambda optname: self._optdata.get(optname, {'header':'n'}).get('header', 'a') == 'a' - else: - mcond = lambda optname: self._optdata.get(optname, {'header':'n'}).get('header', 'a') != 'n' - - for secname in self._configlist.keys(): - tlines = [] - for optname in self._configlist[secname]: - if mcond(optname): - value = getattr(self, optname) - ttype = self._getTypeStr(optname) - strvalue = ', '.join(map(str, value)) if ttype.endswith('list') else str(value) - tlines.append("%s = %s" % (optname, strvalue)) - if len(tlines) > 0: - lines.append("[%s]" % secname) - lines.extend(tlines) - lines.append('') - rv = "\n".join(lines) + "\n" - return rv - - def resetDefault(self, optnames=None): - ''' - reset all values to their default value - - :param optnames: list of str, name of options to reset, None for all options - ''' - if optnames == None: - optnames = self._optdata.keys() - for optname in optnames: - if self._optdata.has_key(optname): - setattr(self, optname, self._optdata[optname]['d']) - self._updateSelf() - return - - ########################################################################### - #IMPORTANT call this method if you want to add options as class attributes!!! - - @classmethod - def initConfigClass(cls): - ''' - init config class and add options to class - - IMPORTANT: call this method after you define the metadata of your config - class to add options as class attributes!!! - ''' - cls._preInitConfigClass() - - cls.config = ConfigParser.ConfigParser(dict_type=OrderedDict) - cls.args = argparse.ArgumentParser(description=cls._description, - epilog=cls._epilog, - formatter_class=argparse.RawDescriptionHelpFormatter) - cls._configlist = OrderedDict({}) - - cls._optdatalist = cls._optdatalist_default + cls._optdatalist - cls._optdata = dict(cls._optdatalist) - cls._detectAddSectionsC() - for opt in cls._optdatalist: - key = opt[0] - cls._addOptC(key) - - cls._postInitConfigClass() - return - - @classmethod - def _postInitConfigClass(cls): - ''' - additional processes called after initConfigClass - - overload it - ''' - pass - - @classmethod - def _preInitConfigClass(cls): - ''' - additional processes called before initConfigClass - - overload it - ''' - pass - -#VERY IMPORTANT!!! -# add options to class -# initConfigClass(ConfigBase) -# ConfigBase.initConfigClass() - -if __name__ == '__main__': - - test = ConfigBase() - test.updateConfig() diff --git a/diffpy/srxplanar/__init__.py b/diffpy/srxplanar/__init__.py deleted file mode 100644 index 3094612..0000000 --- a/diffpy/srxplanar/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env python -############################################################################## -# -# diffpy.srxplanar by DANSE Diffraction group -# Simon J. L. Billinge -# (c) 2010 Trustees of the Columbia University -# in the City of New York. All rights reserved. -# -# File coded by: Xiaohao Yang -# -# See AUTHORS.txt for a list of people who contributed. -# See LICENSE.txt for license information. -# -############################################################################## - -# package version -from diffpy.srxplanar.version import __version__ - -# some convenience imports -from diffpy.srxplanar.srxplanar import SrXplanar -from diffpy.srxplanar.srxplanarconfig import SrXplanarConfig - -# unit tests - - -# End of file diff --git a/diffpy/srxplanar/mask.py b/diffpy/srxplanar/mask.py deleted file mode 100644 index e36be3e..0000000 --- a/diffpy/srxplanar/mask.py +++ /dev/null @@ -1,251 +0,0 @@ -#!/usr/bin/env python -############################################################################## -# -# diffpy.srxplanar by DANSE Diffraction group -# Simon J. L. Billinge -# (c) 2010 Trustees of the Columbia University -# in the City of New York. All rights reserved. -# -# File coded by: Xiaohao Yang -# -# See AUTHORS.txt for a list of people who contributed. -# See LICENSE.txt for license information. -# -############################################################################## - -import numpy as np -import scipy.sparse as ssp -try: - import fabio - def openImage(im): - rv = fabio.openimage.openimage(im) - return rv.data -except: - import tifffile - print 'Only tiff or .npy mask is support since fabio is not available' - def openImage(im): - try: - rv = tifffile.imread(im) - except: - rv = 0 - return rv - -import scipy.ndimage.filters as snf -import scipy.ndimage.morphology as snm -import os -from diffpy.srxplanar.srxplanarconfig import _configPropertyR - -class Mask(object): - ''' - provide methods for mask generation, including: - - static mask: tif mask, npy mask - dymanic mask: masking dark pixels, bright pixels - - ''' - - xdimension = _configPropertyR('xdimension') - ydimension = _configPropertyR('ydimension') - fliphorizontal = _configPropertyR('fliphorizontal') - flipvertical = _configPropertyR('flipvertical') - wavelength = _configPropertyR('wavelength') - maskfile = _configPropertyR('maskfile') - brightpixelmask = _configPropertyR('brightpixelmask') - darkpixelmask = _configPropertyR('darkpixelmask') - cropedges = _configPropertyR('cropedges') - avgmask = _configPropertyR('avgmask') - - def __init__(self, p, calculate): - self.config = p - self.staticmask = np.zeros((self.ydimension, self.xdimension)) - self.dynamicmask = None - self.calculate = calculate - return - - def staticMask(self, maskfile=None): - ''' - create a static mask according existing mask file. This mask remain unchanged for different images - - :param maskfile: string, file name of mask, - mask file supported: .npy, .tif file, ATTN: mask in .npy form should be already flipped, - and 1 (or larger) stands for masked pixels, 0(<0) stands for unmasked pixels - - :return: 2d array of boolean, 1 stands for masked pixel - ''' - maskfile = self.maskfile if maskfile == None else maskfile - - if os.path.exists(maskfile): - if maskfile.endswith('.npy'): - rv = np.load(maskfile) - elif maskfile.endswith('.tif'): - immask = openImage(maskfile) - rv = self.flipImage(immask) - else: - rv = np.zeros((self.ydimension, self.xdimension)) - - self.staticmask = (rv > 0) - return self.staticmask - - def dynamicMask(self, pic, dymask=None, brightpixelmask=None, darkpixelmask=None, avgmask=None): - ''' - create a dynamic mask according to image array. This mask changes for different images - - :param pic: 2d array, image array to be processed - :parma dymask: 2d array, mask array used in average mask calculation - :param brightpixelmask: pixels with much lower intensity compare to adjacent pixels will be masked - :param darkpixelmask: pixels with much higher intensity compare to adjacent pixels will be masked - :param avgmask: Mask the pixels too bright or too dark compared to the average intensity at the similar diffraction angle - - :return: 2d array of boolean, 1 stands for masked pixel - ''' - - brightpixelmask = self.brightpixelmask if brightpixelmask == None else brightpixelmask - darkpixelmask = self.darkpixelmask if darkpixelmask == None else darkpixelmask - avgmask = self.avgmask if avgmask == None else avgmask - - if darkpixelmask or brightpixelmask or avgmask: - rv = np.zeros((self.ydimension, self.xdimension)) - if darkpixelmask: - rv += self.darkPixelMask(pic) - if brightpixelmask: - rv += self.brightPixelMask(pic) - if avgmask: - rv += self.avgMask(pic, dymask=dymask) - self.dynamicmask = (rv > 0) - else: - self.dynamicmask = None - return self.dynamicmask - - def edgeMask(self, cropedges=None): - ''' - generate edge mask - - :param cropedges: crop the image, maske pixels around the image edge (left, right, - top, bottom), must larger than 0, if None, use self.corpedges - ''' - ce = self.cropedges if cropedges == None else cropedges - mask = np.ones((self.ydimension, self.xdimension), dtype=bool) - mask[ce[2]:-ce[3], ce[0]:-ce[1]] = 0 - return mask - - def avgMask(self, image, high=None, low=None, dymask=None, cropedges=None): - ''' - generate a mask that automatically mask the pixels, whose intensities are - too high or too low compare to the pixels which have similar twotheta value - - :param image: 2d array, image file (array) - :param high: float (default: 2.0), int > avgint * high will be masked - :param low: float (default: 0.5), int < avgint * low will be masked - :param dymask: 2d bool array, mask array used in calculation, True for masked pixel, - if None, then use self.staticmask - :param cropedges: crop the image, maske pixels around the image edge (left, right, - top, bottom), must larger than 0, if None, use self.config.corpedges - - :return 2d bool array, True for masked pixel, edgemake included, dymask not included - ''' - if dymask == None: - dymask = self.staticmask - high = self.config.avgmaskhigh if high == None else high - low = self.config.avgmasklow if low == None else low - - self.calculate.genIntegrationInds(dymask) - chi = self.calculate.intensity(image) - index = np.rint(self.calculate.tthorqmatrix / self.config.tthorqstep).astype(int) - index[index >= len(chi[1]) - 1] = len(chi[1]) - 1 - avgimage = chi[1][index.ravel()].reshape(index.shape) - mask = np.ones((self.ydimension, self.xdimension), dtype=bool) - ce = self.cropedges if cropedges == None else cropedges - mask[ce[2]:-ce[3], ce[0]:-ce[1]] = np.logical_or(image[ce[2]:-ce[3], ce[0]:-ce[1]] < avgimage * low, - image[ce[2]:-ce[3], ce[0]:-ce[1]] > avgimage * high) - return mask - - def darkPixelMask(self, pic, r=None): - ''' - pixels with much lower intensity compare to adjacent pixels will be masked - - :param pic: 2d array, image array to be processed - :param r: float, a threshold for masked pixels - - :return: 2d array of boolean, 1 stands for masked pixel - ''' - r = self.config.darkpixelr if r == None else r # 0.1 - - avgpic = np.average(pic) - ks = np.ones((5, 5)) - ks1 = np.ones((7, 7)) - picb = snf.percentile_filter(pic, 5, 3) < avgpic * r - picb = snm.binary_dilation(picb, structure=ks) - picb = snm.binary_erosion(picb, structure=ks1) - return picb - - def brightPixelMask(self, pic, size=None, r=None): - ''' - pixels with much higher intensity compare to adjacent pixels will be masked, - this mask is used when there are some bright spots/pixels whose intensity is higher - than its neighbors but not too high. Only use this on a very good powder averaged - data. Otherwise it may mask wrong pixels. - - This mask has similar functions as 'selfcorr' function. However, this mask will only - consider pixels' local neighbors pixels and tend to mask more pixels. While 'selfcorr' - function compare one pixel to other pixels in same bin. - - :param pic: 2d array, image array to be processed - :param size: int, size of local testing area - :param r: float, a threshold for masked pixels - - :return: 2d array of boolean, 1 stands for masked pixel - ''' - size = self.config.brightpixelsize if size == None else size # 5 - r = self.config.brightpixelr if r == None else r # 1.2 - - rank = snf.rank_filter(pic, -size, size) - ind = snm.binary_dilation(pic > rank * r, np.ones((3, 3))) - return ind - - def undersample(self, undersamplerate): - ''' - a special mask used for undesampling image. It will create a mask that - discard (total number*(1-undersamplerate)) pixels - :param undersamplerate: float, 0~1, ratio of pixels to keep - - :return: 2d array of boolean, 1 stands for masked pixel - ''' - mask = np.random.rand(self.ydimension, self.xdimension) < undersamplerate - return mask - - def flipImage(self, pic): - ''' - flip image if configured in config - - :param pic: 2d array, image array - - :return: 2d array, flipped image array - ''' - if self.fliphorizontal: - pic = pic[:, ::-1] - if self.flipvertical: - pic = pic[::-1, :] - return pic - - def saveMask(self, filename, pic=None, addmask=None): - ''' - generate a mask according to the addmask and pic. save it to .npy. 1 stands for masked pixel - the mask has same order as the pic, which means if the pic is flipped, the mask is fliped - (when pic is loaded though loadimage, it is flipped) - - :param filename: str, filename of mask file to be save - :param pic: 2d array, image array - :param addmask: list of str, control which mask to generate - - :return: 2d array of boolean, 1 stands for masked pixel - ''' - if not hasattr(self, 'mask'): - self.normalMask(addmask) - if (not hasattr(self, 'dynamicmask')) and (pic != None): - self.dynamicMask(pic, addmask=addmask) - tmask = self.mask - if hasattr(self, 'dynamicmask'): - if self.dynamicmask != None: - tmask = np.logical_or(self.mask, self.dynamicmask) if pic != None else self.mask - np.save(filename, tmask) - return tmask diff --git a/diffpy/srxplanar/saveresults.py b/diffpy/srxplanar/saveresults.py deleted file mode 100644 index 80be1ae..0000000 --- a/diffpy/srxplanar/saveresults.py +++ /dev/null @@ -1,156 +0,0 @@ -#!/usr/bin/env python -############################################################################## -# -# diffpy.srxplanar by DANSE Diffraction group -# Simon J. L. Billinge -# (c) 2010 Trustees of the Columbia University -# in the City of New York. All rights reserved. -# -# File coded by: Xiaohao Yang -# -# See AUTHORS.txt for a list of people who contributed. -# See LICENSE.txt for license information. -# -############################################################################## - -import numpy as np -import scipy.io -import os -from diffpy.srxplanar.srxplanarconfig import _configPropertyR - -class SaveResults(object): - ''' - save results into files - ''' - integrationspace = _configPropertyR('integrationspace') - savedirectory = _configPropertyR('savedirectory') - gsasoutput = _configPropertyR('gsasoutput') - filenameplus = _configPropertyR('filenameplus') - - def __init__(self, p): - self.config = p - self.prepareCalculation() - return - - def prepareCalculation(self): - if not os.path.exists(self.savedirectory): - os.makedirs(self.savedirectory) - return - - def getFilePathWithoutExt(self, filename): - ''' - get the normalized full path of filename with out extension - - :param filename: string, could be full path or file name only and with/without ext, only the base part of filename is used. - - :return: string, full normalized path of file without extension - ''' - filebase = os.path.splitext(os.path.split(filename)[1])[0] - if self.filenameplus != '' and self.filenameplus != None: - filenamep = '_'.join([filebase, self.filenameplus, self.integrationspace]) - else: - filenamep = '_'.join([filebase, self.integrationspace]) - filepathwithoutext = os.path.join(self.savedirectory, filenamep) - return filepathwithoutext - - def save(self, rv): - ''' - save diffraction intensity in .chi and gsas format(optional) - - :param rv: dict, result include integrated diffration intensity - the rv['chi'] should be a 2d array with shape (2,len of intensity) or (3, len of intensity) - file name is generated according to orginal file name and savedirectory - ''' - rv = self.saveChi(rv['chi'], rv['filename']) - if self.gsasoutput: - if self.gsasoutput in set(['std', 'esd', 'fxye']): - rv = [rv, self.saveGSAS(rv['chi'], rv['filename'])] - return rv - - def saveChi(self, xrd, filename): - ''' - save diffraction intensity in .chi - - :param xrd: 2d array with shape (2,len of intensity) or (3, len of intensity), [tthorq, intensity, (unceratinty)] - :param filename: str, base file name - ''' - filepath = self.getFilePathWithoutExt(filename) + '.chi' - f = open(filepath, 'wb') - f.write(self.config.getHeader(mode='short')) - f.write('#### start data\n') - np.savetxt(f, xrd.transpose(), fmt='%g') - f.close() - return filepath - - def saveGSAS(self, xrd, filename): - ''' - save diffraction intensity in gsas format - - :param xrd: 2d array with shape (2,len of intensity) or (3, len of intensity), [tthorq, intensity, (unceratinty)] - :param filename: str, base file name - ''' - filepath = self.getFilePathWithoutExt(filename) + '.gsas' - f = open(filepath, 'wb') - f.write(self.config.getHeader(mode='short')) - f.write('#### start data\n') - if xrd.shape[0] == 3: - s = writeGSASStr(os.path.splitext(path)[0], self.gsasoutput, xrd[0], xrd[1], xrd[2]) - elif xrd.shape[0] == 2: - s = writeGSASStr(os.path.splitext(path)[0], self.gsasoutput, xrd[0], xrd[1]) - f.write(s) - f.close() - return filepath - -def writeGSASStr(name, mode, tth, iobs, esd=None): - """ - Return string of integrated intensities in GSAS format. - :param mode: string, gsas file type, could be 'std', 'esd', 'fxye' (gsas format) - :param tth: ndarray, two theta angle - :param iobs: ndarray, Xrd intensity - :param esd: ndarray, optional error value of intensity - - :return: string, a string to be saved to file - """ - maxintensity = 999999 - logscale = numpy.floor(numpy.log10(maxintensity / numpy.max(iobs))) - logscale = min(logscale, 0) - scale = 10 ** int(logscale) - lines = [] - ltitle = 'Angular Profile' - ltitle += ': %s' % name - ltitle += ' scale=%g' % scale - if len(ltitle) > 80: ltitle = ltitle[:80] - lines.append("%-80s" % ltitle) - ibank = 1 - nchan = len(iobs) - # two-theta0 and dtwo-theta in centidegrees - tth0_cdg = tth[0] * 100 - dtth_cdg = (tth[-1] - tth[0]) / (len(tth) - 1) * 100 - if esd == None: mode = 'std' - if mode == 'std': - nrec = int(numpy.ceil(nchan / 10.0)) - lbank = "BANK %5i %8i %8i CONST %9.5f %9.5f %9.5f %9.5f STD" % \ - (ibank, nchan, nrec, tth0_cdg, dtth_cdg, 0, 0) - lines.append("%-80s" % lbank) - lrecs = [ "%2i%6.0f" % (1, ii * scale) for ii in iobs ] - for i in range(0, len(lrecs), 10): - lines.append("".join(lrecs[i:i + 10])) - if mode == 'esd': - nrec = int(numpy.ceil(nchan / 5.0)) - lbank = "BANK %5i %8i %8i CONST %9.5f %9.5f %9.5f %9.5f ESD" % \ - (ibank, nchan, nrec, tth0_cdg, dtth_cdg, 0, 0) - lines.append("%-80s" % lbank) - lrecs = [ "%8.0f%8.0f" % (ii, ee * scale) for ii, ee in zip(iobs, esd) ] - for i in range(0, len(lrecs), 5): - lines.append("".join(lrecs[i:i + 5])) - if mode == 'fxye': - nrec = nchan - lbank = "BANK %5i %8i %8i CONST %9.5f %9.5f %9.5f %9.5f FXYE" % \ - (ibank, nchan, nrec, tth0_cdg, dtth_cdg, 0, 0) - lines.append("%-80s" % lbank) - lrecs = [ "%22.10f%22.10f%24.10f" % (xx * scale, yy * scale, ee * scale) for xx, yy, ee in zip(tth, iobs, esd) ] - for i in range(len(lrecs)): - lines.append("%-80s" % lrecs[i]) - lines[-1] = "%-80s" % lines[-1] - rv = "\r\n".join(lines) + "\r\n" - return rv diff --git a/diffpy/srxplanar/selfcalibrate.py b/diffpy/srxplanar/selfcalibrate.py deleted file mode 100644 index 8d4cca6..0000000 --- a/diffpy/srxplanar/selfcalibrate.py +++ /dev/null @@ -1,310 +0,0 @@ -import numpy as np -import scipy as sp -import os -from functools import partial -from scipy.optimize import minimize, leastsq, fmin_bfgs, fmin_l_bfgs_b, fmin_tnc, minimize_scalar, fmin_powell, \ - fmin_cg, fmin_slsqp, brent, golden -from matplotlib import rcParams -rcParams['backend'] = 'Qt4Agg' -try: - import PySide - rcParams['backend.qt4'] = 'PySide' - import matplotlib.pyplot as plt - mplenabled = True -except: - try: - import PyQt4 - import matplotlib.pyplot as plt - mplenabled = True - except: - mplenabled = False - -def halfcut(p, srx, image, xycenter, qind=[50, 500], show=False, mode='x', output=0): - ''' - cut the image into two half, integrate them and compare the results, if the calibration - information is correct, two half should give same results. - - :param p: calibration parameters - :param srx: SrXplanar object, object to do the integration - :param image: str or 2d array, image to be calibrated - :param xycenter: [int, int], cut position - :param qind: [int, int], range of q to calculate the difference - :param show: bool, True to plot the cut - :param mode: str, mode of calibration, could be x, y, tilt, rotation, all, xy - :param output: int, 0 to return one number (sum of square of difference), - 1 to return the difference array - - :return: sum of square of difference or difference array - ''' - if mode == 'x': - srx.updateConfig(xbeamcenter=p) - elif mode == 'y': - srx.updateConfig(ybeamcenter=p) - elif mode == 'tilt': - srx.updateConfig(tiltd=p) - elif mode == 'rotation': - srx.updateConfig(rotationd=p) - elif mode == 'all': - srx.updateConfig(xbeamcenter=p[0], - ybeamcenter=p[1], - rotationd=p[2], - tiltd=p[3]) - elif mode == 'xy': - srx.updateConfig(xbeamcenter=p[0], - ybeamcenter=p[1]) - elif mode == 'show': - pass - - srx.prepareCalculation() - kwargs = {'savename':None, - 'savefile':False, - 'flip':False, - 'correction':False, - } - if mode != 'y': - srx.config.extracrop = [1, srx.config.xdimension - xycenter[0], 1, 1] - res1 = srx.integrate(image, **kwargs) - chi1 = res1['chi'][1][qind[0]:qind[1]] - - srx.config.extracrop = [xycenter[0], 1, 1, 1] - res2 = srx.integrate(image, **kwargs) - chi2 = res2['chi'][1][qind[0]:qind[1]] - - if mode != 'x': - srx.config.extracrop = [1, 1, 1, srx.config.ydimension - xycenter[1]] - res3 = srx.integrate(image, **kwargs) - chi3 = res3['chi'][1][qind[0]:qind[1]] - - srx.config.extracrop = [1, 1, xycenter[1], 1] - res4 = srx.integrate(image, **kwargs) - chi4 = res4['chi'][1][qind[0]:qind[1]] - - if mode == 'x': - rv = chi1 - chi2 - rv = rv / (chi1 + chi2).mean() - elif mode == 'y': - rv = chi3 - chi4 - rv = rv / (chi3 + chi4).mean() - else: - r1 = chi1 - chi2 - r2 = chi3 - chi4 - rv = np.concatenate([r1 / (chi1 + chi2).mean(), r2 / (chi3 + chi4).mean()]) - - rv0 = np.sum(rv ** 2) - print p - print rv0 - if output == 0: - rv = rv0 - - if show and mplenabled: - print p - print rv - plotRes(mode, res1, res2, res3, res4) - return rv - -def plotRes(mode, res1, res2, res3, res4): - ''' - plot results - ''' - plt.ion() - plt.figure(1) - plt.clf() - if mode != 'y': - plt.plot(res1['chi'][0], res1['chi'][1], label='left') - plt.plot(res2['chi'][0], res2['chi'][1], label='right') - if mode != 'x': - plt.plot(res3['chi'][0], res3['chi'][1], label='up') - plt.plot(res4['chi'][0], res4['chi'][1], label='down') - plt.legend() - plt.show() - return - -def minimize1(func, bounds): - ''' - 1d minimizer - - :param func: callable function f(x), 1d function - :param bounds: (float, float), the initial bounds - - :return: float, the value of x - ''' - diffb = np.abs(bounds[1] - bounds[0]) - if diffb > 6: - trylist = np.linspace(bounds[0], bounds[1], 3 * int(bounds[1] - bounds[0]) + 1, True) - else: - trylist = np.linspace(bounds[0], bounds[1], 21, True) - vlow = np.inf - rv = trylist[0] - for v in trylist: - temp = func(v) - if temp < vlow: - rv = v - vlow = temp - if diffb > 6: - trylist = np.linspace(rv - 0.5, rv + 0.5, 21, True) - else: - trylist = np.linspace(rv - diffb / 12.0, rv + diffb / 12.0, 21, True) - - for v in trylist: - temp = func(v) - if temp < vlow: - rv = v - vlow = temp - return rv - -def selfCalibrateX(srx, image, xycenter=None, mode='all', output=0, showresults=False, qrange=[None, None], **kwargs): - ''' - Do the self calibration using mode X - - the initial value is read from the current value of srx object, and the - refined results will be writrn into the srx object - - :param srx: SrXplanar object, object to do the integration - :param image: str or 2d array, image to be calibrated - :param xycenter: [int, int], cut position, if None, determine it using current beam center - :param mode: str, mode of calibration, could be x, y, xy, tilt, rotation, all - :param output: int, 0 to use fmin optimizer, 1 to use leastsq optimizer - :param showresults: bool, plot the halfcut result - :param qrange: q range used in calculating difference - - :return: list, refined parameter - ''' - bak = {} - for opt in ['uncertaintyenable', 'integrationspace', 'qmax', 'qstep', - 'cropedges', 'extracrop', 'brightpixelmask', 'darkpixelmask', 'avgmask']: - bak[opt] = getattr(srx.config, opt) - - xycenter = [int(srx.config.xbeamcenter), - int(srx.config.ybeamcenter)] - - qmax = srx.config.qmax - # qstep = qmax / 2000 - qstep = qmax / srx.config.xdimension - - srx.updateConfig(uncertaintyenable=False, - integrationspace='qspace', - # qmax=qmax, - qstep=qstep, - brightpixelmask=False, - darkpixelmask=False, - avgmask=False) - # qind = [50, 1000] - qind = [None, None] - qind[0] = int(qrange[0] / qstep) if qrange[0] != None else srx.config.xdimension / 20 - qind[0] = 0 if qind[0] < 0 else qind[0] - qind[1] = int(qrange[1] / qstep) if qrange[1] != None else srx.config.xdimension / 2 - qind[1] = srx.config.xdimension - 5 if qind[1] > srx.config.xdimension - 5 else qind[1] - - srx.prepareCalculation() - srxconfig = srx.config - image = np.array(srx._getPic(image)) - - func = partial(halfcut, srx=srx, image=image, qind=qind, mode=mode, output=output, - xycenter=xycenter, show=False) - - xywidth = 6 if not kwargs.has_key('xywidth') else kwargs['xywidth'] - if mode == 'x': - p0 = [srxconfig.xbeamcenter] - bounds = (p0[0] - xywidth, p0[0] + xywidth) - elif mode == 'y': - p0 = [srxconfig.ybeamcenter] - bounds = (p0[0] - xywidth, p0[0] + xywidth) - elif mode == 'tilt': - p0 = [srxconfig.tiltd] - bounds = (p0[0] - 5, p0[0] + 5) - elif mode == 'rotation': - p0 = [srxconfig.rotationd] - bounds = (0, 360) - elif mode == 'all': - p0 = [srxconfig.xbeamcenter, srxconfig.ybeamcenter, srxconfig.rotationd, srxconfig.tiltd] - bounds = [[p0[0] - xywidth, p0[0] + xywidth], [p0[1] - xywidth, p0[1] + xywidth], - [0, 360], [srxconfig.tiltd - 10, srxconfig.tiltd + 10]] - elif mode == 'xy': - p0 = [srxconfig.xbeamcenter, srxconfig.ybeamcenter] - bounds = [[p0[0] - xywidth, p0[0] + xywidth], [p0[1] - xywidth, p0[1] + xywidth]] - - if output == 0: - if mode in ['x', 'y', 'tilt', 'rotation']: - rv = minimize1(func, bounds) - p = [rv] - else: - rv = minimize(func, p0, method='Powell', bounds=bounds, options={'xtol':0.001, 'ftol':0.001}) - p = rv.x - else: - rv = leastsq(func, p0, epsfcn=0.001) - p = rv[0] - - print p - if mode == 'x': - srx.updateConfig(xbeamcenter=p[0], **bak) - prv = p[0] - elif mode == 'y': - srx.updateConfig(ybeamcenter=p[0], **bak) - elif mode == 'tilt': - srx.updateConfig(tiltd=p[0], ** bak) - elif mode == 'rotation': - srx.updateConfig(rotation=p[0], ** bak) - elif mode == 'xy': - srx.updateConfig(xbeamcenter=p[0], ybeamcenter=p[1], ** bak) - elif mode == 'all': - srx.updateConfig(xbeamcenter=p[0], ybeamcenter=p[1], rotationd=p[2], tiltd=p[3], ** bak) - - if showresults: - halfcut([], srx=srx, image=image, xycenter=xycenter, qind=qind, show=True, mode='show', output=output) - return p - -def selfCalibrate(srx, image, mode='xy', cropedges='auto', showresults=False, qrange=[None, None], **kwargs): - ''' - Do the self calibration - - the initial value is read from the current value of srx object, and the - refined results will be writrn into the srx object - - :param srx: SrXplanar object, object to do the integration - :param image: str or 2d array, image to be calibrated - :param mode: str or list of str: - all: refine all parameters at once - xy: refine x and y - list of str: eg. ['x', 'y', 'xy'] -> refine x, then y, then xy - :param cropedges: list of int or str - if list of int, it will be passed to srx instance and used as cropedges - if 'auto', the cropedges of srx instance will be set automaticly , - if 'x'('y'), then a slice along x(y) axis will be used - if 'box', then a box around the center will be used - if 'all', then use all pixels - :param showresults: bool, plot the halfcut result - :param qrange: q range used in calculating difference - - :return: list, refined parameter - ''' - - # lineCalibrate(srx, image) - - p = [] - if isinstance(mode, str): - xc = srx.config.xbeamcenter - yc = srx.config.ybeamcenter - xd = srx.config.xdimension - yd = srx.config.ydimension - - if not isinstance(cropedges, (list, tuple)): - if cropedges == 'y' or (cropedges == 'auto' and mode == 'y'): - ce = [int(xc - 50), int(xd - xc - 50), yd / 100, yd / 100] - elif cropedges == 'x' or (cropedges == 'auto' and mode == 'x'): - ce = [xd / 100, xd / 100, int(yc - 50), int(yd - yc - 50)] - elif cropedges == 'box' or (cropedges == 'auto' and (not mode in ['x', 'y'])): - ce = [int(xc - xd / 6), int(xd - xc - xd / 6), - int(yc - yd / 6), int(yd - yc - yd / 6)] - else: - ce = [10, 10, 10, 10] - - cebak = srx.config.cropedges - srx.updateConfig(cropedges=ce) - p = selfCalibrateX(srx, image, mode=mode, showresults=showresults, qrange=qrange, **kwargs) - srx.updateConfig(cropedges=cebak) - - elif isinstance(mode, (list, tuple)): - for m in mode: - p = selfCalibrate(srx, image, m, cropedges, qrange=qrange) - return p - diff --git a/diffpy/srxplanar/srxplanarconfig.py b/diffpy/srxplanar/srxplanarconfig.py deleted file mode 100644 index 58ccce4..0000000 --- a/diffpy/srxplanar/srxplanarconfig.py +++ /dev/null @@ -1,356 +0,0 @@ -#!/usr/bin/env python -############################################################################## -# -# diffpy.srxplanar by DANSE Diffraction group -# Simon J. L. Billinge -# (c) 2010 Trustees of the Columbia University -# in the City of New York. All rights reserved. -# -# File coded by: Xiaohao Yang -# -# See AUTHORS.txt for a list of people who contributed. -# See LICENSE.txt for license information. -# -############################################################################## - -import numpy as np -import ConfigParser -import re, os, sys -from functools import partial -import argparse - -from diffpy.confutils.config import ConfigBase -from diffpy.confutils.tools import _configPropertyRad, _configPropertyR, _configPropertyRW - -_description = \ -''' -SrXplanar -- integrate 2D powder diffraction image to 1D with unceratinty propagation -''' - # Text to display after the argument help -_epilog = \ -''' -Examples: - -srxplanar KFe2As2-00838.tif -c test.cfg ---integration using config file test.cfg - -srxplanar *.tif -c test.cfg -s ---integration all .tif image and sum them into one - -srxplanar --createconfig config.cfg ---create default (short) config file using all default value - -''' - -_optdatalist = [ - # control group - ['filenames', {'sec':'Control', 'config':'n', 'header':'n', - 'f':'filename', - 'h':'filename or list of filenames or filename pattern or list of filename pattern', - 'n':'*', - 'd':[], }], - ['output', {'sec':'Experiment', 'config':'n', 'header':'n', - 's':'o', - 'h':'basename of output file', - 'd':'', }], - ['summation', {'sec':'Control', 'config':'n', 'header':'n', - 's':'s', - 'h':'sum all the image and then integrate', - 'n':'?', - 'co':True, - 'd':False, }], - # Expeiment gropu - ['opendirectory', {'sec':'Control', 'header':'n', - 's':'opendir', - 'h':'directory of input 2D image files', - 'd':'currentdir', - 'tt':'directory'}], - ['savedirectory', {'sec':'Control', 'header':'n', - 's':'savedir', - 'h':'directory of output files', - 'd':'currentdir', - 'tt':'directory'}], - ['maskfile', {'sec':'Experiment', - 's':'mask', - 'h':'the mask file (support numpy .npy array, and tiff image, >0 stands for masked pixel)', - 'd':'', - 'tt':'file'}], - ['createmask', {'sec':'Control', 'config':'n', 'header':'n', - 'h':'create a mask file according to current image file and value of addmask', - 'd':'', }], - ['integrationspace', {'sec':'Experiment', - 'h':'the x-grid of integrated 1D diffraction data', - 'd':'twotheta', - 'c':['qspace', 'twotheta'], }], - ['wavelength', {'sec':'Experiment', - 'h':'wavelength of x-ray, in Angstrom', - 'd':0.1000, }], - ['xbeamcenter', {'sec':'Experiment', - 's':'xc', - 'h':'beamcenter in x axis, in pixel', - 'd':1024.0, }], - ['ybeamcenter', {'sec':'Experiment', - 's':'yc', - 'h':'beamcenter in y axis, in pixel', - 'd':1024.0, }], - ['distance', {'sec':'Experiment', - 's':'dis', - 'h':'distance between detector and sample, in mm', - 'd':200.0, }], - ['rotationd', {'sec':'Experiment', - 's':'rot', - 'h':'rotation angle of tilt plane, in degree', - 'd':0.0, }], - ['tiltd', {'sec':'Experiment', - 's':'tilt', - 'h':'tilt angle of tilt plane, in degree', - 'd':0.0, }], - ['tthstepd', {'sec':'Experiment', - 's':'ts', - 'h':'integration step in twotheta space, in degree', - 'd':0.02, }], - ['qstep', {'sec':'Experiment', - 's':'qs', - 'h':'integration step in q space, in Angstrom^-1', - 'd':0.02, }], - # Beamline group - ['includepattern', {'sec':'Beamline', 'header':'n', 'config':'f', - 's':'ipattern', - 'h':'list of string, file name patterns for included files', - 'n':'*', - 'd':['*.tif', '*.tif.bz2'], }], - ['excludepattern', {'sec':'Beamline', 'header':'n', 'config':'f', - 's':'epattern', - 'h':'list of string, file name patterns for excluded files', - 'n':'*', - 'd':['*.dark.tif', '*.raw.tif'], }], - ['fliphorizontal', {'sec':'Beamline', - 'h':'filp the image horizontally', - 'n':'?', - 'co':True, - 'd':False, }], - ['flipvertical', {'sec':'Beamline', - 'h':'filp the image vertically', - 'n':'?', - 'co':True, - 'd':True, }], - ['xdimension', {'sec':'Beamline', - 's':'xd', - 'h':'detector dimension in x axis, in pixel', - 'd':2048, }], - ['ydimension', {'sec':'Beamline', - 's':'yd', - 'h':'detector dimension in y axis, in pixel', - 'd':2048, }], - ['xpixelsize', {'sec':'Beamline', - 's':'xp', - 'h':'detector pixel size in x axis, in mm', - 'd':0.2, }], - ['ypixelsize', {'sec':'Beamline', - 's':'yp', - 'h':'detector pixel size in y axis, in mm', - 'd':0.2, }], - # Others Group - ['uncertaintyenable', {'sec':'Others', - 's':'error', - 'h':'enable uncertainty propagation', - 'n':'?', - 'co':True, - 'd':True, }], - ['sacorrectionenable', {'sec':'Others', - 's':'sacorr', - 'h':'enable solid angle correction', - 'n':'?', - 'co':True, - 'd':True, }], - ['polcorrectionenable', {'sec':'Others', - 's':'polarcorr', - 'h':'enable polarization correction', - 'n':'?', - 'co':True, - 'd':True, }], - ['polcorrectf', {'sec':'Others', - 's':'polarf', - 'h':'polarization correction factor', - 'd':0.99, }], - ['brightpixelmask', {'sec':'Others', - 'h':'mask the bright pixel by comparing their local environments', - 'n':'?', - 'co':True, - 'd':True, }], - ['darkpixelmask', {'sec':'Others', - 'h':'mask the dark pixel by comparing their local environments', - 'n':'?', - 'co':True, - 'd':True, }], - ['avgmask', {'sec':'Others', - 'h':'create a dynamic averaging mask that mask pixel with too high or too low intensity compare to the pixels which have similar twotheta value', - 'n':'?', - 'co':True, - 'd':True, }], - ['gsasoutput', {'sec':'Others', 'header':'n', - 'h':'select if want to output gsas format file', - 'c':['None', 'std', 'esd', 'fxye'], - 'd':'None', }], - ['filenameplus', {'sec':'Others', 'header':'n', - 'h':'string appended to the output filename', - 'd':'', }], - ['cropedges', {'sec':'Others', - 'h':'crop the image, maske pixels around the image edge (left, right, top, bottom), must larger than 0', - 'n':4, - 'tt':'array', - 't':'intlist', - 'd':[10, 10, 10, 10], }], - ['extracrop', {'sec':'Others', 'args':'n', 'config':'n', 'header':'n', - 'h':'crop the edge pixels, first four means the number of pixels masked in each edge \ -(left, right, top, bottom), this crop is after all prepare calculation, \ -so change this value does not require a config update, value must larger than 0', - 'n':4, - 'tt':'array', - 't':'intlist', - 'd':[1, 1, 1, 1], }], - ['nocalculation', {'sec':'Others', 'config':'n', 'header':'n', - 'h':'set True to disable all calculation, will automaticly set True if createconfig or createmask', - 'n':'?', - 'co':True, - 'd':False, }], - # masking - ['brightpixelr', {'sec':'Others', 'args':'n', 'config':'n', 'header':'n', - 'h':'a threshold for masked pixels in bright pixel masking', - 'd':1.2, }], - ['brightpixelsize', {'sec':'Others', 'args':'n', 'config':'n', 'header':'n', - 'h':'size of local testing area in bright pixel masking', - 'd':5, }], - ['darkpixelr', {'sec':'Others', 'args':'n', 'config':'n', 'header':'n', - 'h':'a threshold for masked pixels in dark pixel masking', - 'd':0.1, }], - ['avgmaskhigh', {'sec':'Others', 'args':'n', 'config':'n', 'header':'n', - 'h':'a threshold for masked pixels in average masking, pixels with (self_int > avg_int * avgmaskhigh) will be masked', - 'd':2.0, }], - ['avgmasklow', {'sec':'Others', 'args':'n', 'config':'n', 'header':'n', - 'h':'a threshold for masked pixels in average masking, pixels with (self_int < avg_int * avgmasklow) will be masked', - 'd':0.5, }], - ] - -_defaultdata = {'configfile': ['srxplanar.cfg', 'SrXplanar.cfg'], - 'headertitle': 'SrXplanar configration' - } - -class SrXplanarConfig(ConfigBase): - ''' - config class, based on ConfigBase class in diffpy.confutils - ''' - - # Text to display before the argument help - _description = _description - - # Text to display after the argument help - _epilog = _epilog - - _optdatalist = _optdatalist - - _defaultdata = _defaultdata - - def _preInit(self, **kwargs): - ''' - method called in init process, overload it! - - this method will be called before reading config from file/args/kwargs - - add degree/rad delegation for rotation, tilt, tthstep, tthmax - ''' - - for name in ['rotation', 'tilt', 'tthstep', 'tthmax']: - setattr(self.__class__, name, _configPropertyRad(name + 'd')) - # cls._configlist['Experiment'].extend(['rotation', 'tilt', 'tthstep', 'tthmax']) - return - - def _preUpdateSelf(self, **kwargs): - ''' - additional process called in self._updateSelf, this method is called - before self._copySelftoConfig(), i.e. before copy options value to - self.config (config file) - - check the tthmaxd and qmax, and set tthorqmax, tthorqstep according to integration space - - :param kwargs: optional kwargs - ''' - self.tthmaxd, self.qmax = checkMax(self) - if self.integrationspace == 'twotheta': - self.tthorqmax = self.tthmax - self.tthorqstep = self.tthstep - elif self.integrationspace == 'qspace': - self.tthorqmax = self.qmax - self.tthorqstep = self.qstep - - self.cropedges = [a if a > 1 else 1 for a in self.cropedges] - self.extracrop = [a if a > 1 else 1 for a in self.extracrop] - return - - def _postUpdateConfig(self, **kwargs): - ''' - post processing after parse args or kwargs, this method is called after - in self._postPocessing and before creating config file action - - set nocalculatio flag when create config or create mask - - :param kwargs: optional kwargs - ''' - - if (self.createconfig != '')and(self.createconfig != None): - self.nocalculation = True - if (self.createconfigfull != '')and(self.createconfigfull != None): - self.nocalculation = True - if self.createmask != '': - self.nocalculation = True - return - -def checkMax(config): - ''' - calculate the max twotheta angle (and q) of a detector with current geometry - - :param config: SrXplanarConfig, config instance stores the geometry parameters - - :return: [tthmaxd, qmax], max twotheta angle(in degree) and max q value of current - detector. - ''' - xdimension = getattr(config, 'xdimension') - ydimension = getattr(config, 'ydimension') - xbeamcenter = getattr(config, 'xbeamcenter') - ybeamcenter = getattr(config, 'ybeamcenter') - xpixelsize = getattr(config, 'xpixelsize') - ypixelsize = getattr(config, 'ypixelsize') - rotation = getattr(config, 'rotation') - tilt = getattr(config, 'tilt') - distance = getattr(config, 'distance') - wavelength = getattr(config, 'wavelength') - - - xr = (np.array([0, xdimension + 1]) - xbeamcenter) * xpixelsize - yr = (np.array([0, ydimension + 1]) - ybeamcenter) * ypixelsize - sinr = np.sin(rotation) - cosr = np.cos(rotation) - sint = np.sin(tilt) - cost = np.cos(tilt) - sourcexr = distance * sint * cosr - sourceyr = -distance * sint * sinr - sourcezr = distance * cost - - dmatrix = ((xr - sourcexr) ** 2).reshape(1, 2) + \ - ((yr - sourceyr) ** 2).reshape(2, 1) + sourcezr ** 2 - dmatrix = np.sqrt(dmatrix) - tthmatrix1 = ((-xr + sourcexr) * sourcexr).reshape(1, 2) + \ - ((-yr + sourceyr) * sourceyr).reshape(2, 1) + sourcezr * sourcezr - tthmatrix = np.arccos(tthmatrix1 / dmatrix / distance) - qmatrix = 4 * np.pi * np.sin(tthmatrix / 2.0) / wavelength - - tthmaxd = np.degrees(np.max(tthmatrix)) + 0.5 - qmax = np.max(qmatrix) + 0.1 - return tthmaxd, qmax - -SrXplanarConfig.initConfigClass() - -if __name__ == '__main__': - a = SrXplanarConfig() - a.updateConfig() - a.writeConfig('test.cfg') diff --git a/diffpy/srxplanar/version.py b/diffpy/srxplanar/version.py deleted file mode 100644 index 08b9544..0000000 --- a/diffpy/srxplanar/version.py +++ /dev/null @@ -1,32 +0,0 @@ -#!/usr/bin/env python -############################################################################## -# -# diffpy.srxplanar by DANSE Diffraction group -# Simon J. L. Billinge -# (c) 2010 Trustees of the Columbia University -# in the City of New York. All rights reserved. -# -# File coded by: Xiaohao Yang -# -# See AUTHORS.txt for a list of people who contributed. -# See LICENSE.txt for license information. -# -############################################################################## - -"""Definition of __version__, __date__, __gitsha__. -""" - -from pkg_resources import resource_stream -from ConfigParser import SafeConfigParser - -# obtain version information from the version.cfg file -cp = SafeConfigParser() -cp.readfp(resource_stream(__name__, 'version.cfg')) - -__version__ = cp.get('DEFAULT', 'version') -__date__ = cp.get('DEFAULT', 'date') -__gitsha__ = cp.get('DEFAULT', 'commit') - -del cp - -# End of file diff --git a/doc/manual/source/index.rst b/doc/manual/source/index.rst deleted file mode 100644 index b63f158..0000000 --- a/doc/manual/source/index.rst +++ /dev/null @@ -1,63 +0,0 @@ -.. diffpy.srxplanar documentation master file, created by - sphinx-quickstart on Mon Aug 19 16:59:08 2013. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -#################################################### -diffpy.srxplanar's documentation -#################################################### - -Software version |release|. - -Last updated |today|. - -diffpy.srxplanar package provides 2D diffraction image integration using -non splitting pixel algorithm. And it can estimate and propagate statistic -uncertainty of raw counts and integrated intensity. If you are using this -software. If you use this program to do productive scientific research that -leads to publication, we kindly ask that you acknowledge use of the program -by citing the following paper in your publication: - - Xiaohao Yang, Pavol Juhas, Simon J. L. Billinge, On the estimation of - statistical uncertainties on powder diffraction and small angle - scattering data from 2-D x-ray detectors, arXiv:1309.3614 - -=================== -Disclaimer -=================== - -.. literalinclude:: ../../../LICENSE.txt - -.. index:: authors - -================ -Acknowledgments -================ - -Developers ------------ - -diffpy.srxplanar is developed and maintained by - -.. literalinclude:: ../../../AUTHORS.txt - - -====================================== -Installation -====================================== - -See the `README.rst `_ -file included with the distribution. - -API and Indices -================== - -.. toctree:: - :maxdepth: 2 - - api/diffpy.rst - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` - diff --git a/doc/manual/Makefile b/docs/Makefile similarity index 83% rename from doc/manual/Makefile rename to docs/Makefile index 9d3be9d..85739d2 100644 --- a/doc/manual/Makefile +++ b/docs/Makefile @@ -6,6 +6,12 @@ SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = build +BASENAME = $(subst .,,$(subst $() $(),,diffpy.srxplanar)) + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 @@ -14,7 +20,7 @@ ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) sou # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext publish +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @@ -29,17 +35,20 @@ help: @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: - -rm -rf $(BUILDDIR)/* + rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @@ -77,17 +86,17 @@ qthelp: @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Structure.qhcp" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/$(BASENAME).qhcp" @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Structure.qhc" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/$(BASENAME).qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/Structure" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Structure" + @echo "# mkdir -p $$HOME/.local/share/devhelp/$(BASENAME)" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/$(BASENAME)" @echo "# devhelp" epub: @@ -108,6 +117,12 @@ latexpdf: $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo diff --git a/doc/examples/KFe2As2-00838.tif b/docs/examples/KFe2As2-00838.tif similarity index 100% rename from doc/examples/KFe2As2-00838.tif rename to docs/examples/KFe2As2-00838.tif diff --git a/doc/examples/config.cfg b/docs/examples/config.cfg similarity index 96% rename from doc/examples/config.cfg rename to docs/examples/config.cfg index c28428a..ede1848 100644 --- a/doc/examples/config.cfg +++ b/docs/examples/config.cfg @@ -28,6 +28,5 @@ sacorrectionenable = True polcorrectionenable = True polcorrectf = 0.95 gsasoutput = None -filenameplus = +filenameplus = maskedges = 20, 20, 20, 20, 100 - diff --git a/doc/examples/KFe2As2-00838_fit2d.chi b/docs/examples/data/KFe2As2-00838_fit2d.chi similarity index 100% rename from doc/examples/KFe2As2-00838_fit2d.chi rename to docs/examples/data/KFe2As2-00838_fit2d.chi diff --git a/doc/examples/KFe2As2-00838_twotheta.chi b/docs/examples/data/KFe2As2-00838_twotheta.chi similarity index 99% rename from doc/examples/KFe2As2-00838_twotheta.chi rename to docs/examples/data/KFe2As2-00838_twotheta.chi index dd4d34a..8cf60d7 100644 --- a/doc/examples/KFe2As2-00838_twotheta.chi +++ b/docs/examples/data/KFe2As2-00838_twotheta.chi @@ -1,4 +1,4 @@ -# SrXplanar configration # +# SrXplanar configuration # [Experiment] addmask = edgemask integrationspace = twotheta diff --git a/doc/examples/fit2d.txt b/docs/examples/fit2d.txt similarity index 98% rename from doc/examples/fit2d.txt rename to docs/examples/fit2d.txt index dc38e1a..f56f3e3 100644 --- a/doc/examples/fit2d.txt +++ b/docs/examples/fit2d.txt @@ -1,14 +1,14 @@ -INFO: Number of function calls = 7 -INFO: Sum of squares = 2.3219 -INFO: Number of rejected coordinates = 0 -INFO: Number of function calls = 7 -INFO: Sum of squares = 2.3219 -INFO: Refined Beam centre = 1299.430 1010.190 (pixels) -INFO: Refined Beam centre = 186.240 1.299 (mm) -INFO: Refined sample to detector distance = 369.579 mm -INFO: Refined wavelength = 0.1078 Angstroms - Energy (keV) = 115.0 -INFO: Refined tilt plane rotation angle = 49.715 degrees -INFO: Refined tilt angle = -1.480 degrees -INFO: ROT X = 0.068 ROT Y = -0.425 degrees -INFO: Stability indicator (proportional to D-spacing; Angstroms) = 0.46505 +INFO: Number of function calls = 7 +INFO: Sum of squares = 2.3219 +INFO: Number of rejected coordinates = 0 +INFO: Number of function calls = 7 +INFO: Sum of squares = 2.3219 +INFO: Refined Beam centre = 1299.430 1010.190 (pixels) +INFO: Refined Beam centre = 186.240 1.299 (mm) +INFO: Refined sample to detector distance = 369.579 mm +INFO: Refined wavelength = 0.1078 Angstroms + Energy (keV) = 115.0 +INFO: Refined tilt plane rotation angle = 49.715 degrees +INFO: Refined tilt angle = -1.480 degrees +INFO: ROT X = 0.068 ROT Y = -0.425 degrees +INFO: Stability indicator (proportional to D-spacing; Angstroms) = 0.46505 diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..2be8306 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,36 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build +set SPHINXPROJ=PackagingScientificPython + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% + +:end +popd diff --git a/docs/source/_static/.placeholder b/docs/source/_static/.placeholder new file mode 100644 index 0000000..e69de29 diff --git a/doc/manual/source/api/diffpy.confutils.rst b/docs/source/api/diffpy.confutils.rst similarity index 59% rename from doc/manual/source/api/diffpy.confutils.rst rename to docs/source/api/diffpy.confutils.rst index 17aaed8..ab7ad9d 100644 --- a/doc/manual/source/api/diffpy.confutils.rst +++ b/docs/source/api/diffpy.confutils.rst @@ -1,35 +1,34 @@ confutils Package ================= -:mod:`confutils` Package ------------------------- +:mod:`diffpy.confutils` Package +------------------------------- .. automodule:: diffpy.confutils :members: :undoc-members: :show-inheritance: -:mod:`config` Module --------------------- +:mod:`diffpy.confutils.config` Module +------------------------------------- .. automodule:: diffpy.confutils.config :members: :undoc-members: :show-inheritance: -:mod:`tools` Module -------------------- +:mod:`diffpy.confutils.tools` Module +------------------------------------ .. automodule:: diffpy.confutils.tools :members: :undoc-members: :show-inheritance: -:mod:`version` Module ---------------------- +:mod:`diffpy.confutils.version` Module +-------------------------------------- .. automodule:: diffpy.confutils.version :members: :undoc-members: :show-inheritance: - diff --git a/doc/manual/source/api/diffpy.rst b/docs/source/api/diffpy.rst similarity index 98% rename from doc/manual/source/api/diffpy.rst rename to docs/source/api/diffpy.rst index 844522c..dadd2c0 100644 --- a/doc/manual/source/api/diffpy.rst +++ b/docs/source/api/diffpy.rst @@ -5,4 +5,3 @@ diffpy Package diffpy.confutils diffpy.srxplanar - diff --git a/doc/manual/source/api/diffpy.srxplanar.rst b/docs/source/api/diffpy.srxplanar.rst similarity index 55% rename from doc/manual/source/api/diffpy.srxplanar.rst rename to docs/source/api/diffpy.srxplanar.rst index 935795d..f7aedd7 100644 --- a/doc/manual/source/api/diffpy.srxplanar.rst +++ b/docs/source/api/diffpy.srxplanar.rst @@ -1,67 +1,84 @@ -srxplanar Package -================= +:tocdepth: -1 -:mod:`srxplanar` Package ------------------------- +|title| +======= + +.. |title| replace:: diffpy.srxplanar package .. automodule:: diffpy.srxplanar :members: :undoc-members: :show-inheritance: -:mod:`calculate` Module ------------------------ +Submodules +--------------------- + +|module_1| +---------- + +.. |module_1| replace:: diffpy.srxplanar.calculate module .. automodule:: diffpy.srxplanar.calculate :members: :undoc-members: :show-inheritance: -:mod:`loadimage` Module ------------------------ +|module_2| +---------- + +.. |module_2| replace:: diffpy.srxplanar.loadimage module .. automodule:: diffpy.srxplanar.loadimage :members: :undoc-members: :show-inheritance: -:mod:`mask` Module ------------------- +|module_3| +---------- + +.. |module_3| replace:: diffpy.srxplanar.mask module .. automodule:: diffpy.srxplanar.mask :members: :undoc-members: :show-inheritance: -:mod:`saveresults` Module -------------------------- +|module_4| +---------- + +.. |module_4| replace:: diffpy.srxplanar.saveresults module .. automodule:: diffpy.srxplanar.saveresults :members: :undoc-members: :show-inheritance: -:mod:`srxplanar` Module ------------------------ +|module_5| +---------- + +.. |module_5| replace:: diffpy.srxplanar.srxplanar module .. automodule:: diffpy.srxplanar.srxplanar :members: :undoc-members: :show-inheritance: -:mod:`srxplanarconfig` Module ------------------------------ +|module_6| +---------- + +.. |module_6| replace:: diffpy.srxplanar.srxplanarconfig module .. automodule:: diffpy.srxplanar.srxplanarconfig :members: :undoc-members: :show-inheritance: -:mod:`version` Module ---------------------- +|module_7| +---------- + +.. |module_7| replace:: diffpy.srxplanar.version module .. automodule:: diffpy.srxplanar.version :members: :undoc-members: :show-inheritance: - diff --git a/doc/manual/source/api/modules.rst b/docs/source/api/modules.rst similarity index 100% rename from doc/manual/source/api/modules.rst rename to docs/source/api/modules.rst diff --git a/doc/manual/source/conf.py b/docs/source/conf.py similarity index 57% rename from doc/manual/source/conf.py rename to docs/source/conf.py index dbd1ae7..b86e8e3 100644 --- a/doc/manual/source/conf.py +++ b/docs/source/conf.py @@ -1,10 +1,11 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- # -# diffpy.Structure documentation build configuration file, created by -# sphinx-quickstart on Tue Oct 22 12:02:48 2013. +# diffpy.srxplanar documentation build configuration file, created by # noqa: E501 +# sphinx-quickstart on Thu Jan 30 15:49:41 2014. # -# This file is execfile()d with the current directory set to its containing dir. +# This file is execfile()d with the current directory set to its +# containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. @@ -12,249 +13,309 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys, os +import sys import time +from importlib.metadata import version +from pathlib import Path + +# Attempt to import the version dynamically from GitHub tag. +try: + fullversion = version("diffpy.srxplanar") +except Exception: + fullversion = "No version found. The correct version will appear in the released version." # noqa: E501 # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) -sys.path.insert(0, os.path.abspath('../../..')) +# documentation root, use Path().resolve() to make it absolute, like shown here. # noqa: E501 +# sys.path.insert(0, str(Path(".").resolve())) +sys.path.insert(0, str(Path("../..").resolve())) +sys.path.insert(0, str(Path("../../src").resolve())) # abbreviations -ab_authors = u'Xiaohao Yang, Simon J.L. Billinge group' +ab_authors = "Xiaohao Yang and Billinge Group members" -# -- General configuration ----------------------------------------------------- +# -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.coverage', 'sphinx.ext.pngmath'] +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "sphinx.ext.intersphinx", + "sphinx_rtd_theme", + "sphinx_copybutton", + "m2r", +] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] -# The suffix of source filenames. -source_suffix = '.rst' +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +source_suffix = [".rst", ".md"] # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'diffpy.srxplanar' -copyright = u'%Y, Columbia University' +project = "diffpy.srxplanar" +copyright = "%Y, The Trustees of Columbia University in the City of New York" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. -from setup import versiondata -fullversion = versiondata.get('DEFAULT', 'version') + # The short X.Y version. -version = '.'.join(fullversion.split('.')[:2]) +version = "".join(fullversion.split(".post")[:1]) # The full version, including alpha/beta/rc tags. release = fullversion # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. -#language = None +# language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' -today_seconds = versiondata.getint('DEFAULT', 'timestamp') -today = time.strftime('%B %d, %Y', time.localtime(today_seconds)) +# today = '' +today = time.strftime("%B %d, %Y", time.localtime()) year = today.split()[-1] # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # substitute YEAR in the copyright string -copyright = copyright.replace('%Y', year) +copyright = copyright.replace("%Y", year) + +# For sphinx_copybutton extension. +# Do not copy "$" for shell commands in code-blocks. +copybutton_prompt_text = r"^\$ " +copybutton_prompt_is_regexp = True # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = [] +exclude_patterns = ["build"] -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # A list of ignored prefixes for module index sorting. -modindex_common_prefix = ['diffpy.srxplanar'] +modindex_common_prefix = ["diffpy.srxplanar"] # Display all warnings for missing links. nitpicky = True -# -- Options for HTML output --------------------------------------------------- +# -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'default' +# +html_theme = "sphinx_rtd_theme" + +html_context = { + "display_github": True, + "github_user": "diffpy", + "github_repo": "diffpy.srxplanar", + "github_version": "main", + "conf_py_path": "/docs/source/", +} # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} -html_theme_options = {'collapsiblesidebar' : 'true'} +# +html_theme_options = { + "navigation_with_keys": "true", +} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +# html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Output file base name for HTML help builder. -htmlhelp_basename = 'Structuredoc' +basename = "diffpy.srxplanar".replace(" ", "").replace(".", "") +htmlhelp_basename = basename + "doc" -# -- Options for LaTeX output -------------------------------------------------- +# -- Options for LaTeX output --------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). -#'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). latex_documents = [ - ('index', 'srxplanar_manual.tex', u'srxplanar Documentation', - ab_authors, 'manual'), + ( + "index", + "diffpy.srxplanar.tex", + "diffpy.srxplanar Documentation", + ab_authors, + "manual", + ), ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True -# -- Options for manual page output -------------------------------------------- +# -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ - ('index', 'srxplanar', u'srxplanar Documentation', - ab_authors, 1) + ( + "index", + "diffpy.srxplanar", + "diffpy.srxplanar Documentation", + ab_authors, + 1, + ) ] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False -# -- Options for Texinfo output ------------------------------------------------ +# -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'srxplanar', u'srxplanar Documentation', - ab_authors, 'srxplanar', 'One line description of project.', - 'Miscellaneous'), + ( + "index", + "diffpy.srxplanar", + "diffpy.srxplanar Documentation", + ab_authors, + "diffpy.srxplanar", + "One line description of project.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 0000000..2933b6d --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,57 @@ +####### +|title| +####### + +.. |title| replace:: diffpy.srxplanar documentation + +| Software version |release| +| Last updated |today|. + +diffpy.srxplanar package provides 2D diffraction image integration using +non splitting pixel algorithm. And it can estimate and propagate statistic +uncertainty of raw counts and integrated intensity. If you are using this +software. If you use this program to do productive scientific research that +leads to publication, we kindly ask that you acknowledge use of the program +by citing the following paper in your publication: + + Xiaohao Yang, Pavol Juhas, Simon J. L. Billinge, On the estimation of + statistical uncertainties on powder diffraction and small angle + scattering data from 2-D x-ray detectors, arXiv:1309.3614 + +======= +Authors +======= + +``diffpy.srxplanar`` is developed by Xiaohao Yang and Billinge Group members. The maintainer for this project is Simon Billinge. For a detailed list of contributors see +https://github.com/diffpy/diffpy.srxplanar/graphs/contributors. + +============ +Installation +============ + +See the `README `_ +file included with the distribution. + +================ +Acknowledgements +================ + +``diffpy.srxplanar`` is built and maintained with `scikit-package `_. + +================= +Table of contents +================= +.. toctree:: + :maxdepth: 2 + + Package API + Modules + Release notes + license + +======= +Indices +======= + +* :ref:`genindex` +* :ref:`search` diff --git a/docs/source/license.rst b/docs/source/license.rst new file mode 100644 index 0000000..4b57678 --- /dev/null +++ b/docs/source/license.rst @@ -0,0 +1,38 @@ +:tocdepth: -1 + +.. index:: license + +License +####### + +OPEN SOURCE LICENSE AGREEMENT +============================= +BSD 3-Clause License + +Copyright (c) 2008-2025, The Trustees of Columbia University in the City of New York. +All Rights Reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/docs/source/release.rst b/docs/source/release.rst new file mode 100644 index 0000000..27cd0cc --- /dev/null +++ b/docs/source/release.rst @@ -0,0 +1,5 @@ +:tocdepth: -1 + +.. index:: release notes + +.. include:: ../../CHANGELOG.rst diff --git a/news/TEMPLATE.rst b/news/TEMPLATE.rst new file mode 100644 index 0000000..790d30b --- /dev/null +++ b/news/TEMPLATE.rst @@ -0,0 +1,23 @@ +**Added:** + +* + +**Changed:** + +* + +**Deprecated:** + +* + +**Removed:** + +* + +**Fixed:** + +* + +**Security:** + +* diff --git a/news/doc.rst b/news/doc.rst new file mode 100644 index 0000000..b0ec659 --- /dev/null +++ b/news/doc.rst @@ -0,0 +1,23 @@ +**Added:** + +* + +**Changed:** + +* + +**Deprecated:** + +* + +**Removed:** + +* + +**Fixed:** + +* Support ``scikit-package`` Level 5 standard (https://scikit-package.github.io/scikit-package/). + +**Security:** + +* diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..8dcd94a --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,88 @@ +[build-system] +requires = ["setuptools>=62.0", "setuptools-git-versioning>=2.0"] +build-backend = "setuptools.build_meta" + +[project] +name = "diffpy.srxplanar" +dynamic=['version', 'dependencies'] +authors = [ + { name="Simon Billinge", email="sb2896@columbia.edu" }, +] +maintainers = [ + { name="Simon Billinge", email="sb2896@columbia.edu" }, +] +description = "Distance Printer, calculate the inter atomic distances. Part of xPDFsuite" +keywords = ['diffpy', 'pdf', 'data interpretation'] +readme = "README.rst" +requires-python = ">=3.12, <3.15" +classifiers = [ + 'Development Status :: 5 - Production/Stable', + 'Environment :: Console', + 'Intended Audience :: Developers', + 'Intended Audience :: Science/Research', + 'License :: OSI Approved :: BSD License', + 'Operating System :: MacOS :: MacOS X', + 'Operating System :: Microsoft :: Windows', + 'Operating System :: POSIX', + 'Operating System :: Unix', + 'Programming Language :: Python :: 3.12', + 'Programming Language :: Python :: 3.13', + 'Programming Language :: Python :: 3.14', + 'Topic :: Scientific/Engineering :: Physics', + 'Topic :: Scientific/Engineering :: Chemistry', +] + +[project.urls] +Homepage = "https://github.com/diffpy/diffpy.srxplanar/" +Issues = "https://github.com/diffpy/diffpy.srxplanar/issues/" + +[tool.setuptools-git-versioning] +enabled = true +template = "{tag}" +dev_template = "{tag}" +dirty_template = "{tag}" + +[tool.setuptools.packages.find] +where = ["src"] # list of folders that contain the packages (["."] by default) +include = ["*"] # package names should match these glob patterns (["*"] by default) +exclude = [] # exclude packages matching these glob patterns (empty by default) +namespaces = false # to disable scanning PEP 420 namespaces (true by default) + +[project.scripts] +diffpy-srxplanar = "diffpy.srxplanar_app:main" + +[tool.setuptools.dynamic] +dependencies = {file = ["requirements/pip.txt"]} + +[tool.codespell] +exclude-file = ".codespell/ignore_lines.txt" +ignore-words = ".codespell/ignore_words.txt" +skip = "*.cif,*.dat" + +[tool.docformatter] +recursive = true +wrap-summaries = 72 +wrap-descriptions = 72 + +[tool.black] +line-length = 79 +include = '\.pyi?$' +exclude = ''' +/( + \.git + | \.hg + | \.mypy_cache + | \.tox + | \.venv + | \.rst + | \.txt + | _build + | buck-out + | build + | dist + + # The following are specific to Black, you probably don't want those. + | blib2to3 + | tests/data +)/ +''' diff --git a/requirements/conda.txt b/requirements/conda.txt new file mode 100644 index 0000000..3b5f87d --- /dev/null +++ b/requirements/conda.txt @@ -0,0 +1,5 @@ +numpy +scipy +fabio +tifffile +configparser diff --git a/requirements/docs.txt b/requirements/docs.txt new file mode 100644 index 0000000..5f34c6e --- /dev/null +++ b/requirements/docs.txt @@ -0,0 +1,5 @@ +sphinx +sphinx_rtd_theme +sphinx-copybutton +doctr +m2r diff --git a/requirements/pip.txt b/requirements/pip.txt new file mode 100644 index 0000000..3b5f87d --- /dev/null +++ b/requirements/pip.txt @@ -0,0 +1,5 @@ +numpy +scipy +fabio +tifffile +configparser diff --git a/requirements/tests.txt b/requirements/tests.txt new file mode 100644 index 0000000..a727786 --- /dev/null +++ b/requirements/tests.txt @@ -0,0 +1,6 @@ +flake8 +pytest +codecov +coverage +pytest-cov +pytest-env diff --git a/setup.py b/setup.py deleted file mode 100755 index 1053272..0000000 --- a/setup.py +++ /dev/null @@ -1,95 +0,0 @@ -#!/usr/bin/env python - -# Installation script for diffpy.Structure - -"""srxplanar - 2D diffraction image integration and uncertainty propagation -using non splitting pixel algorithm - -Packages: diffpy.srxplanar -""" - -import os -from setuptools import setup, find_packages - -# versioncfgfile holds version data for git commit hash and date. -# It must reside in the same directory as version.py. -MYDIR = os.path.dirname(os.path.abspath(__file__)) -versioncfgfile = os.path.join(MYDIR, 'diffpy/srxplanar/version.cfg') - -def gitinfo(): - from subprocess import Popen, PIPE - kw = dict(stdout=PIPE, cwd=MYDIR) - proc = Popen(['git', 'describe', '--match=v[[:digit:]]*'], **kw) - desc = proc.stdout.read() - proc = Popen(['git', 'log', '-1', '--format=%H %at %ai'], **kw) - glog = proc.stdout.read() - rv = {} - rv['version'] = '-'.join(desc.strip().split('-')[:-1]).lstrip('v') - rv['commit'], rv['timestamp'], rv['date'] = glog.strip().split(None, 2) - return rv - -def getversioncfg(config=versioncfgfile): - from ConfigParser import SafeConfigParser - cp = SafeConfigParser() - cp.read(config) - gitdir = os.path.join(MYDIR, '.git') - if not os.path.exists(gitdir): return cp - d = cp.defaults() - g = gitinfo() - if g['version'] != d.get('version') or g['commit'] != d.get('commit'): - cp.set('DEFAULT', 'version', g['version']) - cp.set('DEFAULT', 'commit', g['commit']) - cp.set('DEFAULT', 'date', g['date']) - cp.set('DEFAULT', 'timestamp', g['timestamp']) - cp.write(open(config, 'w')) - return cp - -# generate version.cfg for diffpy.confutils -versioncfgfile1 = os.path.join(MYDIR, 'diffpy/confutils/version.cfg') -getversioncfg(versioncfgfile1) -versiondata = getversioncfg(versioncfgfile) - -# define distribution -setup_args = dict( - name="diffpy.srxplanar", - version=versiondata.get('DEFAULT', 'version'), - namespace_packages=['diffpy'], - packages=find_packages(), - include_package_data=True, - zip_safe=False, - entry_points={ - # define console_scripts here, see setuptools docs for details. - 'console_scripts' : ['srxplanar = diffpy.srxplanar.srxplanar:main' - ], - }, - - author='Simon J.L. Billinge', - author_email='sb2896@columbia.edu', - maintainer='Xiaohao Yang', - maintainer_email='sodestiny1@gmail.com', - url='https://github.com/diffpy/diffpy.srxplanar', - description="2D diffraction image integration and uncertainty propagation", - license='BSD-style license', - keywords="diffpy planar integration non-splitting uncertainty", - classifiers=[ - # List of possible values at - # http://pypi.python.org/pypi?:action=list_classifiers - 'Development Status :: 5 - Production/Stable', - 'Environment :: Console', - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: BSD License', - 'Operating System :: MacOS :: MacOS X', - 'Operating System :: Microsoft :: Windows', - 'Operating System :: POSIX', - 'Operating System :: Unix', - 'Programming Language :: Python :: 2.6', - 'Programming Language :: Python :: 2.7', - 'Topic :: Scientific/Engineering :: Chemistry', - 'Topic :: Scientific/Engineering :: Physics', - ], -) - -if __name__ == '__main__': - setup(**setup_args) - -# End of file diff --git a/src/diffpy/__init__.py b/src/diffpy/__init__.py new file mode 100644 index 0000000..b4a5565 --- /dev/null +++ b/src/diffpy/__init__.py @@ -0,0 +1,14 @@ +#!/usr/bin/env python +############################################################################## +# +# (c) 2010-2025 The Trustees of Columbia University in the City of New York. +# All rights reserved. +# +# File coded by: Xiaohao Yang and Billinge Group members. +# +# See GitHub contributions for a more detailed list of contributors. +# https://github.com/diffpy/diffpy.srxplanar/graphs/contributors +# +# See LICENSE.rst for license information. +# +############################################################################## diff --git a/diffpy/confutils/__init__.py b/src/diffpy/confutils/__init__.py similarity index 71% rename from diffpy/confutils/__init__.py rename to src/diffpy/confutils/__init__.py index 5595ca3..a3af0ba 100644 --- a/diffpy/confutils/__init__.py +++ b/src/diffpy/confutils/__init__.py @@ -3,7 +3,7 @@ # # diffpy.confutils by DANSE Diffraction group # Simon J. L. Billinge -# (c) 2010 Trustees of the Columbia University +# (c) 2010-2025 Trustees of the Columbia University # in the City of New York. All rights reserved. # # File coded by: Xiaohao Yang @@ -14,9 +14,6 @@ ############################################################################## # package version -from diffpy.srxplanar.version import __version__ - -# some convenience imports -from diffpy.confutils.config import ConfigBase, ConfigBase +from diffpy.srxplanar.version import __version__ # noqa: F401 # End of file diff --git a/src/diffpy/confutils/config.py b/src/diffpy/confutils/config.py new file mode 100644 index 0000000..faefc9a --- /dev/null +++ b/src/diffpy/confutils/config.py @@ -0,0 +1,820 @@ +#!/usr/bin/env python +############################################################################## +# +# diffpy.confutils by DANSE Diffraction group +# Simon J. L. Billinge +# (c) 2012-2025 Trustees of the Columbia University +# in the City of New York. All rights reserved. +# +# File coded by: Xiaohao Yang +# +# See AUTHORS.txt for a list of people who contributed. +# See LICENSE.txt for license information. +# +############################################################################## +"""Package for organizing program configurations. It can read/write +configurations file, parse arguments from command lines, and also parse +arguments passed from method/function calling inside python. + +Note: for python 2.6, argparse and orderedDict is required, install them with +easy_install +""" + + +import argparse +import os +from collections import OrderedDict +from configparser import ConfigParser + +from diffpy.confutils.tools import FakeConfigFile, StrConv, opt2Str, str2Opt + + +class ConfigBase(object): + """_optdatalist_default, _optdatalist are metadata used to + initialize the options, see below for examples. + + options presents in --help (in cmd), config file, + headers have same order as in these list, + so arrange them in right order here. + + optional args to control if the options presents in args, config file or + file header + + 'args' - default is 'a' + if 'a', this option will be available in self.args + if 'n', this option will not be available in self.args + 'config' - default is 'a' + if 'f', this option will present in self.config and be written to + config file only in full mode + if 'a', this option will present in self.config and be written to + config file both in full and short mode + if 'n', this option will not present in self.config + 'header' - default is 'a' + if 'f', this option will be written to header only in full mode + if 'a', this option will be written to header both in full and short + mode + if 'n', this option will not be written to header + + so in short mode, all options with 'a' will be written, in full mode, + all options with 'a' or 'f' will be written + """ + + # Text to display before the argument help + _description = """Description of configurations + """ + # Text to display after the argument help + _epilog = """ + """ + + """ + optdata contains these keys: + these args will be passed to argparse, see the documents of argparse for + detail information + + 'f': full, (positional) + 's': short + 'h': help + 't': type + 'a': action + 'n': nargs + 'd': default + 'c': choices + 'r': required + 'de': dest + 'co': const + """ + _optdatanamedict = { + "h": "help", + "t": "type", + "a": "action", + "n": "nargs", + "d": "default", + "c": "choices", + "r": "required", + "de": "dest", + "co": "const", + } + + # examples, overload it + _optdatalist_default = [ + [ + "configfile", + { + "sec": "Control", + "config": "f", + "header": "n", + "s": "c", + "h": "name of input config file", + "d": "", + }, + ], + [ + "createconfig", + { + "sec": "Control", + "config": "n", + "header": "n", + "h": ( + "create a config file according to default" + "or current values" + ), + "d": "", + }, + ], + [ + "createconfigfull", + { + "sec": "Control", + "config": "n", + "header": "n", + "h": "create a full configurable config file", + "d": "", + }, + ], + ] + # examples, overload it + _optdatalist = [ + [ + "tifdirectory", + { + "sec": "Experiment", + "header": "n", + "s": "tifdir", + "h": "directory of raw tif files", + "d": "currentdir", + }, + ], + [ + "integrationspace", + { + "sec": "Experiment", + "h": "integration space, could be twotheta or qspace", + "d": "twotheta", + "c": ["twotheta", "qspace"], + }, + ], + [ + "wavelength", + { + "sec": "Experiment", + "h": "wavelength of x-ray, in A", + "d": 0.1000, + }, + ], + [ + "rotationd", + { + "sec": "Experiment", + "s": "rot", + "h": "rotation angle of tilt plane, in degree", + "d": 0.0, + }, + ], + [ + "includepattern", + { + "sec": "Beamline", + "s": "ipattern", + "h": "file name pattern for included files", + "n": "*", + "d": ["*.tif"], + }, + ], + [ + "excludepattern", + { + "sec": "Beamline", + "s": "epattern", + "h": "file name pattern for excluded files", + "n": "*", + "d": ["*.dark.tif", "*.raw.tif"], + }, + ], + [ + "fliphorizontal", + { + "sec": "Beamline", + "h": "flip the image horizontally", + "n": "?", + "co": True, + "d": False, + }, + ], + [ + "regulartmatrixenable", + { + "sec": "Others", + "h": "normalize tmatrix in splitting method", + "n": "?", + "co": True, + "d": False, + }, + ], + [ + "maskedges", + { + "sec": "Others", + "config": "f", + "header": "f", + "h": ( + "mask the edge pixels, first four means" + " the number of pixels masked in each edge \n" + " (left, right, top, bottom)," + " the last one is the radius of a region" + " masked around the corner" + ), + "n": 5, + "d": [1, 1, 1, 1, 50], + }, + ], + ] + + # some default data + # configfile: default config file name + # headertitle: default title of header + _defaultdata = { + "configfile": ["config.cfg"], + "headertitle": "Configuration information", + } + + def __init__(self, filename=None, args=None, **kwargs): + """Init the class and update the values of options if specified + in filename/args/kwargs. + + it will: + 1. call self._preInit method + 2. find the config file if specified in filename/args/kwargs + if failed, try to find default config file + 3. update the options value using filename/args/kwargs + file > args > kwargs + + :param filename: str, file name of the config file + :param args: list of str, args passed from cmd + :param kwargs: dict, optional kwargs + + :return: None + """ + # call self._preInit + self._preInit(**kwargs) + + # update config, first detect if a default config should be load + filename = self._findDefaultConfigFile(filename, args, **kwargs) + self.updateConfig(filename, args, **kwargs) + return + + # example, overload it + def _preInit(self, **kwargs): + """Method called in init process, overload it! + + this method will be called before reading config from + file/args/kwargs + """ + # for name in ['rotation']: + # setattr(self.__class__, name, _configPropertyRad(name+'d')) + # self._configlist['Experiment'].extend(['rotation']) + return + + ########################################################################### + + def _findConfigFile(self, filename=None, args=None, **kwargs): + """Find config file, if any config is specified in + filename/args/kwargs then return the filename of config. + + :param filename: str, file name of config file + :param filename: list of str, args passed from cmd + :param kwargs: optional kwargs + :return: name of config file if found, otherwise None + """ + rv = None + if filename is not None: + rv = filename + if args is not None: + if ("--configfile" in args) or ("-c" in args): + obj = self.args.parse_args(args) + rv = obj.configfile + if kwargs.has_key("configfile"): + rv = kwargs["configfile"] + return rv + + def _findDefaultConfigFile(self, filename=None, args=None, **kwargs): + """Find default config file, if any config is specified in + filename/args/kwargs or in self._defaultdata['configfile'], then + return the filename of config. + + kwargs > args > filename > default + + param filename: str, file name of config file + param filename: list of str, args passed from cmd + param kwargs: optional kwargs + + return: name of config file if found, otherwise None + """ + rv = self._findConfigFile(filename, args, **kwargs) + if rv is None: + for dconf in self._defaultdata["configfile"]: + if (os.path.exists(dconf)) and (rv is None): + rv = dconf + return rv + + ########################################################################### + + def _updateSelf(self, optnames=None, **kwargs): + """Update the options value, then copy the values in the + self.'options' to self.config. + + 1. call self._preUpdateSelf + 2. apply options' value from *self.option* to self.config + 3. call self._postUpdateSelf + + :param optnames: str or list of str, name of options whose value has + been changed, if None, update all options + """ + # so some check right here + self._preUpdateSelf(**kwargs) + # copy value to self.config + self._copySelftoConfig(optnames) + # so some check right here + self._postUpdateSelf(**kwargs) + return + + # example, overload it + def _preUpdateSelf(self, **kwargs): + """Additional process called in self._updateSelf, this method is + called before self._copySelftoConfig(), i.e. before copy options + value to self.config (config file)""" + return + + def _postUpdateSelf(self, **kwargs): + """Additional process called in self._updateSelf, this method is + called after self._copySelftoConfig(), i.e. before copy options + value to self.config (config file)""" + return + + ########################################################################### + + def _getTypeStr(self, optname): + """Return the type of option. + + :param optname: str, name of option + :return: string, type of the option + """ + opttype = self._getTypeStrC(optname) + return opttype + + @classmethod + def _getTypeStrC(cls, optname): + """Class method, return the type of option first try to get type + information from metadata, if failed, try to get type from + default value. + + :param optname: str, name of option + :return: string, type of the option + """ + optdata = cls._optdata[optname] + if "t" in optdata: + opttype = optdata["t"] + else: + value = optdata["d"] + if isinstance(value, str): + opttype = "str" + elif isinstance(value, bool): + opttype = "bool" + elif isinstance(value, float): + opttype = "float" + elif isinstance(value, int): + opttype = "int" + elif isinstance(value, list): + if len(value) == 0: + opttype = "strlist" + elif isinstance(value[0], str): + opttype = "strlist" + elif isinstance(value[0], bool): + opttype = "boollist" + elif isinstance(value[0], float): + opttype = "floatlist" + elif isinstance(value[0], int): + opttype = "intlist" + + return opttype + + ########################################################################### + + def _detectAddSections(self): + """Detect sections present in self._optdata and add them to + self.config also add it to self._configlist.""" + self._detectAddSectionsC(self) + return + + @classmethod + def _detectAddSectionsC(cls): + """Class method, detect sections present in self._optdata and + add them to self.config also add it to self._configlist.""" + # seclist = [self._optdata[key]['sec'] for key in self._optdata.keys()] + seclist = [cls._optdata[opt[0]]["sec"] for opt in cls._optdatalist] + secdict = OrderedDict.fromkeys(seclist) + # for sec in set(seclist): + for sec in secdict.keys(): + cls.config.add_section(sec) + cls._configlist[sec] = [] + return + + def _addOpt(self, optname): + """Add options to self.config and self.args and self.*option*, + this will read metadata from self._optdatalist. + + :param optname: string, name of option + """ + self._addOptC(self, optname) + return + + @classmethod + def _addOptC(cls, optname): + """Class method, add options to self.config and self.args and + self.*option*, this will read metadata in self._optdatalist. + + :param optname: string, name of option + """ + optdata = cls._optdata[optname] + opttype = cls._getTypeStrC(optname) + + # replace currentdir in default to os.getcwd() + if optdata["d"] == "currentdir": + optdata["d"] = os.getcwd() + + # add to cls.'optname' + cls._addOptSelfC(optname, optdata) + + # add to cls.config + secname = optdata["sec"] if "sec" in optdata else "Others" + cls._configlist[secname].append(optname) + if optdata.get("config", "a") != "n": + strvalue = ( + ", ".join(map(str, optdata["d"])) + if isinstance(optdata["d"], list) + else str(optdata["d"]) + ) + cls.config.set(secname, optname, strvalue) + # add to cls.args + if optdata.get("args", "a") != "n": + # transform optdata to a dict that can pass to add_argument method + pargs = dict() + for key in optdata.keys(): + if key in cls._optdatanamedict: + pargs[cls._optdatanamedict[key]] = optdata[key] + pargs["default"] = argparse.SUPPRESS + pargs["type"] = StrConv(opttype) + # add args + if "f" in optdata: + cls.args.add_argument(optname, **pargs) + elif "s" in optdata: + cls.args.add_argument( + "--" + optname, "-" + optdata["s"], **pargs + ) + else: + cls.args.add_argument("--" + optname, **pargs) + return + + @classmethod + def _addOptSelfC(cls, optname, optdata): + """Class method, assign options value to *self.option*, using + metadata. + + :param optname: string, name of the option + :param optdata: dict, metadata of the options, get it from + self._optdatalist + """ + setattr(cls, optname, optdata["d"]) + return + + def _copyConfigtoSelf(self, optnames=None): + """Copy the options' value from self.config to self.*option* + + :param optnames: str or list of str, names of options whose + value copied from self.config to self.*option*'. Set None to + update all + """ + if optnames is not None: + optnames = optnames if isinstance(optnames, list) else [optnames] + else: + optnames = [] + for secname in self.config.sections(): + optnames += self.config.options(secname) + + for optname in optnames: + if self._optdata.has_key(optname): + secname = self._optdata[optname]["sec"] + opttype = self._getTypeStr(optname) + optvalue = self.config.get(secname, optname) + setattr(self, optname, str2Opt(opttype, optvalue)) + return + + def _copySelftoConfig(self, optnames=None): + """Copy the value from self.*option* to self.config. + + :param optname: str or list of str, names of options whose value + copied from self.*option* to self.config. Set None to update + all + """ + if optnames is not None: + optnames = optnames if isinstance(optnames, list) else [optnames] + else: + optnames = [] + for secname in self.config.sections(): + optnames += self.config.options(secname) + + for optname in optnames: + if self._optdata.has_key(optname): + secname = self._optdata[optname]["sec"] + opttype = self._getTypeStr(optname) + optvalue = getattr(self, optname) + self.config.set(secname, optname, opt2Str(opttype, optvalue)) + return + + ########################################################################### + + def parseArgs(self, pargs): + """Parse args and update the value in self.*option*, this will + call the self.args() to parse args, + + :param pargs: list of string, arguments to parse, usually coming + from sys.argv + """ + obj = self.args.parse_args(pargs) + changedargs = obj.__dict__.keys() + for optname in changedargs: + if self._optdata.has_key(optname): + setattr(self, optname, getattr(obj, optname)) + # update self + if len(changedargs) > 0: + self._updateSelf(changedargs) + return obj + + def parseKwargs(self, **kwargs): + """Update self.*option* values according to the kwargs. + + :param kwargs: dict, keywords=value + """ + if kwargs != {}: + changedargs = [] + for optname, optvalue in kwargs.iteritems(): + if self._optdata.has_key(optname): + setattr(self, optname, optvalue) + changedargs.append(optname) + # update self + self._updateSelf(changedargs) + return + + def parseConfigFile(self, filename): + """Read a config file and update the self.*option* + + :param filename: str, file name of config file (include path) + """ + if filename is not None: + filename = os.path.abspath(filename) + if os.path.exists(filename): + self.configfile = filename + self._copySelftoConfig() + fileobj = FakeConfigFile(filename) + # self.config.read(filename) + self.config.readfp(fileobj) + self._copyConfigtoSelf() + self._updateSelf() + return + + def updateConfig(self, filename=None, args=None, **kwargs): + """Update config according to a config file, args (from + ``sys.argv``), or ``**kwargs``. + + Steps: + 1. call ``self._preUpdateConfig()`` + 2. process file/args/kwargs passed to this method + 3. read a config file if specified in ``args`` or ``kwargs`` + 4. call ``self._postUpdateConfig()`` + 5. write config file if specified in ``args``/``kwargs`` + + :param filename: str, file name of the config file + :param args: list of str, args passed from cmd + :param kwargs: dict, optional keyword arguments + :return: True if anything updated, False if nothing updated + """ + + # call self._preUpdateConfig + self._preUpdateConfig(**kwargs) + + filename = self._findConfigFile(filename, args, **kwargs) + if filename is not None: + rv = self.parseConfigFile(filename) + if args is not None: + rv = self.parseArgs(args) + if kwargs != {}: + rv = self.parseKwargs(**kwargs) + + if ( + (filename is None) + and ((args is None) or (args == [])) + and (kwargs == {}) + ): + rv = self._updateSelf() + + # call self._callbackUpdateConfig + self._postUpdateConfig(**kwargs) + + # write config file + self._createConfigFile() + return rv + + def _preUpdateConfig(self, **kwargs): + """Method called before parsing args or kwargs or config file, + in self.updateConfig.""" + return + + def _postUpdateConfig(self, **kwargs): + """Method called after parsing args or kwargs or config file, in + self.updateConfig.""" + return + + ########################################################################### + def _createConfigFile(self): + """Write output config file if specified in configuration the + filename is specified by self.createconfig.""" + if (self.createconfig != "") and (self.createconfig is not None): + self.writeConfig(self.createconfig, "short") + self.createconfig = "" + if (self.createconfigfull != "") and ( + self.createconfigfull is not None + ): + self.writeConfig(self.createconfigfull, "full") + self.createconfigfull = "" + return + + def writeConfig(self, filename, mode="short", changeconfigfile=True): + """Write config to file. the file is compatible with python + package ConfigParser. + + :param filename: string, name of file + :param mode: string, 'short' or 'full' ('s' or 'f'). in short + mode, all options with 'a' will be written, in full mode, + all options with 'a' or 'f' will be written + """ + if changeconfigfile: + self.configfile = os.path.abspath(filename) + self._updateSelf() + # func decide if write the option to config according to mode + # options not present in self._optdata will not be written to config + if mode.startswith("s"): + mcond = ( + lambda optname: self._optdata.get( + optname, {"config": "n"} + ).get("config", "a") + == "a" + ) + else: + mcond = ( + lambda optname: self._optdata.get( + optname, {"config": "n"} + ).get("config", "a") + != "n" + ) + + lines = [] + for section in self.config._sections: + tlines = [] + for key, value in self.config._sections[section].items(): + if (key != "__name__") and mcond(key): + tlines.append( + "%s = %s" % (key, str(value).replace("\n", "\n\t")) + ) + if len(tlines) > 0: + lines.append("[%s]" % section) + lines.extend(tlines) + lines.append("") + rv = "\n".join(lines) + "\n" + fp = open(filename, "w") + fp.write(rv) + fp.close() + return + + def getHeader(self, title=None, mode="full"): + """Get a header of configurations values, + + :param title: str, title of header, if None, try to get it from + self.defaultvalue + :param mode: string, 'short' or 'full' ('s' or 'f'). in short + mode, all options with 'a' will be written, in full mode, + all options with 'a' or 'f' will be written + :return: string, lines with line break that can be directly + written to a text file + """ + + lines = [] + title = "# %s #" % ( + self._defaultdata["headertitle"] if title is None else title + ) + lines.append(title) + # func decide if write the option to header according to mode + # options not present in self._optdata will not be written to header + if mode.startswith("s"): + mcond = ( + lambda optname: self._optdata.get( + optname, {"header": "n"} + ).get("header", "a") + == "a" + ) + else: + mcond = ( + lambda optname: self._optdata.get( + optname, {"header": "n"} + ).get("header", "a") + != "n" + ) + + for secname in self._configlist.keys(): + tlines = [] + for optname in self._configlist[secname]: + if mcond(optname): + value = getattr(self, optname) + ttype = self._getTypeStr(optname) + strvalue = ( + ", ".join(map(str, value)) + if ttype.endswith("list") + else str(value) + ) + tlines.append("%s = %s" % (optname, strvalue)) + if len(tlines) > 0: + lines.append("[%s]" % secname) + lines.extend(tlines) + lines.append("") + rv = "\n".join(lines) + "\n" + return rv + + def resetDefault(self, optnames=None): + """Reset all values to their default value. + + :param optnames: list of str, name of options to reset, None for + all options + """ + if optnames is None: + optnames = self._optdata.keys() + for optname in optnames: + if self._optdata.has_key(optname): + setattr(self, optname, self._optdata[optname]["d"]) + self._updateSelf() + return + + ########################################################################### + # IMPORTANT call this method if you want to add options + # as class attributes!!! + + @classmethod + def initConfigClass(cls): + """Init config class and add options to class. + + IMPORTANT: call this method after you define + the metadata of your config class to add options as class attributes!!! + """ + cls._preInitConfigClass() + + cls.config = ConfigParser(dict_type=OrderedDict) + cls.args = argparse.ArgumentParser( + description=cls._description, + epilog=cls._epilog, + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + cls._configlist = OrderedDict({}) + + cls._optdatalist = cls._optdatalist_default + cls._optdatalist + cls._optdata = dict(cls._optdatalist) + cls._detectAddSectionsC() + for opt in cls._optdatalist: + key = opt[0] + cls._addOptC(key) + + cls._postInitConfigClass() + return + + @classmethod + def _postInitConfigClass(cls): + """Additional processes called after initConfigClass. + + overload it + """ + pass + + @classmethod + def _preInitConfigClass(cls): + """Additional processes called before initConfigClass. + + overload it + """ + pass + + +# VERY IMPORTANT!!! +# add options to class +# initConfigClass(ConfigBase) +# ConfigBase.initConfigClass() + +if __name__ == "__main__": + + test = ConfigBase() + test.updateConfig() diff --git a/diffpy/confutils/tools.py b/src/diffpy/confutils/tools.py similarity index 54% rename from diffpy/confutils/tools.py rename to src/diffpy/confutils/tools.py index 3690461..63277d6 100644 --- a/diffpy/confutils/tools.py +++ b/src/diffpy/confutils/tools.py @@ -3,7 +3,7 @@ # # diffpy.confutils by DANSE Diffraction group # Simon J. L. Billinge -# (c) 2013 Trustees of the Columbia University +# (c) 2013-2025 Trustees of the Columbia University # in the City of New York. All rights reserved. # # File coded by: Xiaohao Yang @@ -13,114 +13,118 @@ # ############################################################################## -import numpy as np +import hashlib import re import time import zlib -import hashlib + +import numpy as np + def _configPropertyRad(nm): - ''' - helper function of options delegation, rad to degree - ''' - rv = property(fget=lambda self: np.radians(getattr(self, nm)), - fset=lambda self, val: setattr(self, nm, np.degrees(val)), - fdel=lambda self: delattr(self, nm)) + """Helper function of options delegation, rad to degree.""" + rv = property( + fget=lambda self: np.radians(getattr(self, nm)), + fset=lambda self, val: setattr(self, nm, np.degrees(val)), + fdel=lambda self: delattr(self, nm), + ) return rv + def _configPropertyR(name): - ''' - Create a property that forwards self.name to self.config.name. - + """Create a property that forwards self.name to self.config.name. + read only - ''' - rv = property(fget=lambda self: getattr(self.config, name), - doc='attribute forwarded to self.config, read-only') + """ + rv = property( + fget=lambda self: getattr(self.config, name), + doc="attribute forwarded to self.config, read-only", + ) return rv + def _configPropertyRW(name): - ''' - Create a property that forwards self.name to self.config.name. - + """Create a property that forwards self.name to self.config.name. + read and write - ''' - rv = property(fget=lambda self: getattr(self.config, nm), - fset=lambda self, value: setattr(self.config, nm, value), - fdel=lambda self: delattr(self, nm), - doc='attribute forwarded to self.config, read/write') + """ + rv = property( + fget=lambda self: getattr(self.config, name), + fset=lambda self, value: setattr(self.config, name, value), + fdel=lambda self: delattr(self, name), + doc="attribute forwarded to self.config, read/write", + ) return rv + def str2bool(v): - ''' - turn string to bool - ''' + """Turn string to bool.""" return v.lower() in ("yes", "true", "t", "1") + def opt2Str(opttype, optvalue): - ''' - turn the value of one option to string, according to the option type - list of values are truned into "value1, value2, value3..." - - :param opttype: string, type of opitons, for example 'str' or 'intlist' + """Turn the value of one option to string, according to the option + type list of values are turned into "value1, value2, value3...". + + :param opttype: string, type of options, for example 'str' or + 'intlist' :param optvalue: value of the option - :return: string, usually stored in ConfigBase.config - ''' + """ - if opttype.endswith('list'): - rv = ', '.join(map(str, optvalue)) + if opttype.endswith("list"): + rv = ", ".join(map(str, optvalue)) else: rv = str(optvalue) return rv + def StrConv(opttype): - ''' - get the type (a converter function) according to the opttype - + """Get the type (a converter function) according to the opttype. + the function doesn't take list - + :param opttype: string, a type of options, could be 'str', 'int', 'float', or 'bool' - - :return: type (converter function) - - ''' - if opttype.startswith('str'): + :return: type (converter function) + """ + if opttype.startswith("str"): conv = str - elif opttype.startswith('int'): + elif opttype.startswith("int"): conv = int - elif opttype.startswith('float'): + elif opttype.startswith("float"): conv = float - elif opttype.startswith('bool'): + elif opttype.startswith("bool"): conv = str2bool else: conv = None return conv + def str2Opt(opttype, optvalue): - ''' - convert the string to value of one option, according to the option type - - :param opttype: string, type of opitons, for example 'str' or 'intlist' + """Convert the string to value of one option, according to the + option type. + + :param opttype: string, type of options, for example 'str' or + 'intlist' :param optvalue: string, value of the option - :return: value of the option, usually stored in ConfigBase.config - ''' + """ # base converter conv = StrConv(opttype) - if opttype.endswith('list'): - temp = re.split('\s*,\s*', optvalue) + if opttype.endswith("list"): + temp = re.split(r"\s*,\s*", optvalue) rv = map(conv, temp) if len(temp) > 0 else [] else: rv = conv(optvalue) return rv + class FakeConfigFile(object): - ''' - A fake configfile object used in reading config from header of data - or a real config file. - ''' - def __init__(self, configfile, endline='###'): + """A fake configfile object used in reading config from header of + data or a real config file.""" + + def __init__(self, configfile, endline="###"): self.configfile = configfile self.fp = open(configfile) self.endline = endline @@ -129,36 +133,31 @@ def __init__(self, configfile, endline='###'): return def readline(self): - ''' - readline function - ''' + """Readline function.""" line = self.fp.readline() if line.startswith(self.endline) or self.ended: - rv = '' + rv = "" self.ended = True else: rv = line return rv def close(self): - ''' - close the file - ''' + """Close the file.""" self.fp.close() return + def checkCRC32(filename): - ''' - calculate the crc32 value of file - + """Calculate the crc32 value of file. + :param filename: path to the file - :return: crc32 value of file - ''' + """ try: - fd = open(filename, 'rb') - except: - return 'Read error' + fd = open(filename, "rb") + except Exception: + return "Read error" eachLine = fd.readline() prev = 0 while eachLine: @@ -167,18 +166,17 @@ def checkCRC32(filename): fd.close() return prev + def checkMD5(filename, blocksize=65536): - ''' - calculate the MD5 value of file - + """Calculate the MD5 value of file. + :param filename: path to the file - :return: md5 value of file - ''' + """ try: - fd = open(filename, 'rb') - except: - return 'Read error' + fd = open(filename, "rb") + except Exception: + return "Read error" buf = fd.read(blocksize) md5 = hashlib.md5() while len(buf) > 0: @@ -187,14 +185,14 @@ def checkMD5(filename, blocksize=65536): fd.close() return md5.hexdigest() + def checkFileVal(filename): - ''' - check file integrity using crc32 and md5. It will read file twice then - compare the crc32 and md5. If two results doesn't match, it will wait until - the file is completed written to disk. - + """Check file integrity using crc32 and md5. It will read file twice + then compare the crc32 and md5. If two results doesn't match, it + will wait until the file is completed written to disk. + :param filename: path to the file - ''' + """ valflag = False lastcrc = checkCRC32(filename) while not valflag: diff --git a/diffpy/confutils/version.py b/src/diffpy/confutils/version.py similarity index 50% rename from diffpy/confutils/version.py rename to src/diffpy/confutils/version.py index 4b9271d..be10460 100644 --- a/diffpy/confutils/version.py +++ b/src/diffpy/confutils/version.py @@ -3,7 +3,7 @@ # # diffpy.confutils by DANSE Diffraction group # Simon J. L. Billinge -# (c) 2013 Trustees of the Columbia University +# (c) 2013-2025 Trustees of the Columbia University # in the City of New York. All rights reserved. # # File coded by: Xiaohao Yang @@ -12,21 +12,17 @@ # See LICENSE.txt for license information. # ############################################################################## +"""Definition of __version__, __date__, __gitsha__.""" -"""Definition of __version__, __date__, __gitsha__. -""" +# We do not use the other three variables, but can be added back if needed. +# __all__ = ["__date__", "__git_commit__", "__timestamp__", "__version__"] -from pkg_resources import resource_stream -from ConfigParser import SafeConfigParser +# obtain version information +from importlib.metadata import PackageNotFoundError, version -# obtain version information from the version.cfg file -cp = SafeConfigParser() -cp.readfp(resource_stream(__name__, 'version.cfg')) - -__version__ = cp.get('DEFAULT', 'version') -__date__ = cp.get('DEFAULT', 'date') -__gitsha__ = cp.get('DEFAULT', 'commit') - -del cp +try: + __version__ = version("diffpy.srxplanar") +except PackageNotFoundError: + __version__ = "unknown" # End of file diff --git a/src/diffpy/srxplanar/__init__.py b/src/diffpy/srxplanar/__init__.py new file mode 100644 index 0000000..4c4ed1c --- /dev/null +++ b/src/diffpy/srxplanar/__init__.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python +############################################################################## +# +# (c) 2010-2025 The Trustees of Columbia University in the City of New York. +# All rights reserved. +# +# File coded by: Xiaohao Yang, Simon Billinge, Billinge Group members. +# +# See GitHub contributions for a more detailed list of contributors. +# https://github.com/diffpy/diffpy.srxplanar/graphs/contributors +# +# See LICENSE.rst for license information. +# +############################################################################## +"""Distance Printer, calculate the inter atomic distances. + +Part of xPDFsuite +""" + +# package version +from diffpy.srxplanar.version import __version__ + +# silence the pyflakes syntax checker +assert __version__ or True + +# End of file diff --git a/diffpy/srxplanar/calculate.py b/src/diffpy/srxplanar/calculate.py similarity index 51% rename from diffpy/srxplanar/calculate.py rename to src/diffpy/srxplanar/calculate.py index f3aa395..05469e9 100644 --- a/diffpy/srxplanar/calculate.py +++ b/src/diffpy/srxplanar/calculate.py @@ -3,7 +3,7 @@ # # diffpy.srxplanar by DANSE Diffraction group # Simon J. L. Billinge -# (c) 2010 Trustees of the Columbia University +# (c) 2010-2025 Trustees of the Columbia University # in the City of New York. All rights reserved. # # File coded by: Xiaohao Yang @@ -14,113 +14,122 @@ ############################################################################## import numpy as np -import scipy.sparse as ssp import scipy.ndimage.filters as snf -import scipy.ndimage.morphology as snm -from diffpy.srxplanar.srxplanarconfig import _configPropertyR + +from diffpy.confutils.tools import _configPropertyR + class Calculate(object): - ''' - provide methods for integration, variance calculation and distance/Q matrix calculation etc. - ''' - # define configuration properties that are forwarded to self.config - xdimension = _configPropertyR('xdimension') - ydimension = _configPropertyR('ydimension') - xpixelsize = _configPropertyR('xpixelsize') - ypixelsize = _configPropertyR('ypixelsize') - xbeamcenter = _configPropertyR('xbeamcenter') - ybeamcenter = _configPropertyR('ybeamcenter') - rotation = _configPropertyR('rotation') - tilt = _configPropertyR('tilt') - distance = _configPropertyR('distance') - wavelength = _configPropertyR('wavelength') - integrationspace = _configPropertyR('integrationspace') - qmax = _configPropertyR('qmax') - qstep = _configPropertyR('qstep') - tthmax = _configPropertyR('tthmax') - tthstep = _configPropertyR('tthstep') - tthmaxd = _configPropertyR('tthmaxd') - tthstepd = _configPropertyR('tthstepd') - tthorqstep = _configPropertyR('tthorqstep') - tthorqmax = _configPropertyR('tthorqmax') - uncertaintyenable = _configPropertyR('uncertaintyenable') - sacorrectionenable = _configPropertyR('sacorrectionenable') - polcorrectionenable = _configPropertyR('polcorrectionenable') - polcorrectf = _configPropertyR('polcorrectf') - cropedges = _configPropertyR('cropedges') - extracrop = _configPropertyR('extracrop') + """Provide methods for integration, variance calculation and + distance/Q matrix calculation etc.""" + # define configuration properties that are forwarded to self.config + xdimension = _configPropertyR("xdimension") + ydimension = _configPropertyR("ydimension") + xpixelsize = _configPropertyR("xpixelsize") + ypixelsize = _configPropertyR("ypixelsize") + xbeamcenter = _configPropertyR("xbeamcenter") + ybeamcenter = _configPropertyR("ybeamcenter") + rotation = _configPropertyR("rotation") + tilt = _configPropertyR("tilt") + distance = _configPropertyR("distance") + wavelength = _configPropertyR("wavelength") + integrationspace = _configPropertyR("integrationspace") + qmax = _configPropertyR("qmax") + qstep = _configPropertyR("qstep") + tthmax = _configPropertyR("tthmax") + tthstep = _configPropertyR("tthstep") + tthmaxd = _configPropertyR("tthmaxd") + tthstepd = _configPropertyR("tthstepd") + tthorqstep = _configPropertyR("tthorqstep") + tthorqmax = _configPropertyR("tthorqmax") + uncertaintyenable = _configPropertyR("uncertaintyenable") + sacorrectionenable = _configPropertyR("sacorrectionenable") + polcorrectionenable = _configPropertyR("polcorrectionenable") + polcorrectf = _configPropertyR("polcorrectf") + cropedges = _configPropertyR("cropedges") + extracrop = _configPropertyR("extracrop") def __init__(self, p): - # create parameter proxy, so that parameters can be accessed by self.parametername in read-only mode + # create parameter proxy, so that parameters can be + # accessed by self.parametername in read-only mode self.config = p self.prepareCalculation() return def prepareCalculation(self): - ''' - prepare data for calculation - ''' + """Prepare data for calculation.""" self.xydimension = self.xdimension * self.ydimension - self.xr = (np.arange(self.xdimension, dtype=float) - self.xbeamcenter + 0.5) * self.xpixelsize - self.yr = (np.arange(self.ydimension, dtype=float) - self.ybeamcenter + 0.5) * self.ypixelsize - - self.xr = self.xr[self.cropedges[0]:-self.cropedges[1]] - self.yr = self.yr[self.cropedges[2]:-self.cropedges[3]] - + self.xr = ( + np.arange(self.xdimension, dtype=float) - self.xbeamcenter + 0.5 + ) * self.xpixelsize + self.yr = ( + np.arange(self.ydimension, dtype=float) - self.ybeamcenter + 0.5 + ) * self.ypixelsize + + self.xr = self.xr[self.cropedges[0] : -self.cropedges[1]] + self.yr = self.yr[self.cropedges[2] : -self.cropedges[3]] + self.dmatrix = self.genDistanceMatrix() - self.azimuthmatrix = np.arctan2(self.yr.reshape(len(self.yr), 1), - self.xr.reshape(1, len(self.xr))) + self.azimuthmatrix = np.arctan2( + self.yr.reshape(len(self.yr), 1), self.xr.reshape(1, len(self.xr)) + ) self.genTTHorQMatrix() self.perviousmaskedmatrix = np.zeros(4) return def genTTHorQMatrix(self): - ''' - generate a twotheta matrix or q matrix which stores the tth or q value - or each pixel - ''' + """Generate a twotheta matrix or q matrix which stores the tth + or q value or each pixel.""" # set tth or q grid - if self.integrationspace == 'twotheta': - self.bin_edges = np.r_[0, np.arange(self.tthstep / 2, self.tthmax, self.tthstep)] + if self.integrationspace == "twotheta": + self.bin_edges = np.r_[ + 0, np.arange(self.tthstep / 2, self.tthmax, self.tthstep) + ] self.xgrid = np.degrees(self.bin_edges[1:] - self.tthstep / 2) self.tthorqmatrix = self.genTTHMatrix() - elif self.integrationspace == 'qspace': - self.bin_edges = np.r_[0, np.arange(self.qstep / 2, self.qmax, self.qstep)] + elif self.integrationspace == "qspace": + self.bin_edges = np.r_[ + 0, np.arange(self.qstep / 2, self.qmax, self.qstep) + ] self.xgrid = self.bin_edges[1:] - self.qstep / 2 self.tthorqmatrix = self.genQMatrix() return def genIntegrationInds(self, mask=None): - ''' - generate self.bin_number used in integration (number of pixels in on bin) - - :param mask: 2D array, mask of image, should have same dimension, 1 for masked pixel - + """Generate self.bin_number used in integration (number of + pixels in on bin) + + :param mask: 2D array, mask of image, should have same + dimension, 1 for masked pixel :return: self.bin_number - ''' + """ self.maskedmatrix = np.array(self.tthorqmatrix) - if mask == None: + if mask is None: # mask = np.zeros((self.ydimension, self.xdimension), dtype=bool) mask = np.zeros((len(self.yr), len(self.xr)), dtype=bool) ce = self.cropedges - mask = mask[ce[2]:-ce[3], ce[0]:-ce[1]] + mask = mask[ce[2] : -ce[3], ce[0] : -ce[1]] self.maskedmatrix[mask] = 1000.0 - + # extra crop - maskedmatrix = self.getMaskedmatrixPic() - # self.bin_number = np.array(np.histogram(maskedmatrix, self.bin_edges)[0], dtype=float) + self.getMaskedmatrixPic() + # self.bin_number = np.array( + # np.histogram(maskedmatrix, self.bin_edges)[0], + # dtype=float, + # ) # self.bin_number[self.bin_number <= 0] = 1 return # self.bin_number def intensity(self, pic): - ''' - 2D to 1D image integration, intensity of pixels are binned and then take average, - - :param pic: 2D array, array of raw counts, corrections hould be already applied - - :retrun: 2d array, [tthorq, intensity, unceratinty] or [tthorq, intensity] - ''' + """2D to 1D image integration, intensity of pixels are binned + and then take average, + + :param pic: 2D array, array of raw counts, corrections should be + already applied + :return: 2d array, [tthorq, intensity, unceratinty] or [tthorq, + intensity] + """ intensity = self.calculateIntensity(pic) if self.uncertaintyenable: @@ -129,75 +138,79 @@ def intensity(self, pic): else: rv = np.vstack([self.xgrid, intensity]) return rv - + def getMaskedmatrixPic(self, pic=None): - ''' - return the maskedmatrix and pic using self.extracrop and self.cropedges - - :param pic: 2d array, pic array, if None, then only return maskedmatrix - - :return: croped maskedmatrix and pic - ''' + """Return the maskedmatrix and pic using self.extracrop and + self.cropedges. + + :param pic: 2d array, pic array, if None, then only return + maskedmatrix + :return: croped maskedmatrix and pic + """ ec = self.extracrop ce = self.cropedges s = [ecx - cex if ecx > cex else 0 for ecx, cex in zip(ec, ce)] s[3] = -s[3] if s[3] != 0 else None s[1] = -s[1] if s[1] != 0 else None - rv = self.maskedmatrix[s[2]:s[3], s[0]:s[1]] - + rv = self.maskedmatrix[s[2] : s[3], s[0] : s[1]] + temps = np.array(s) if any(self.perviousmaskedmatrix != temps): - self.perviousmaskedmatrix = temps - self.bin_number = np.array(np.histogram(rv, self.bin_edges)[0], dtype=float) - self.bin_number[self.bin_number <= 0] = 1 - - if pic != None: + self.perviousmaskedmatrix = temps + self.bin_number = np.array( + np.histogram(rv, self.bin_edges)[0], dtype=float + ) + self.bin_number[self.bin_number <= 0] = 1 + + if pic is not None: ps = [max(s1, s2) for s1, s2 in zip(ce, ec)] - rv = self.maskedmatrix[s[2]:s[3], s[0]:s[1]], pic[ps[2]:-ps[3], ps[0]:-ps[1]] + rv = ( + self.maskedmatrix[s[2] : s[3], s[0] : s[1]], + pic[ps[2] : -ps[3], ps[0] : -ps[1]], + ) return rv - + def calculateIntensity(self, pic): - ''' - calculate the 1D intensity - - :param pic: 2D array, array of raw counts, raw counts should be corrected - - :retrun: 1d array, 1D integrated intensity - ''' - + """Calculate the 1D intensity. + + :param pic: 2D array, array of raw counts, raw counts should be + corrected + :return: 1d array, 1D integrated intensity + """ + maskedmatrix, pic = self.getMaskedmatrixPic(pic) - + intensity = np.histogram(maskedmatrix, self.bin_edges, weights=pic)[0] return intensity / self.bin_number def calculateVariance(self, pic): - ''' - calculate the 1D intensity - - :param pic: 2D array, array of raw counts, corrections hould be already applied - - :retrun: 1d array, variance of integrated intensity - ''' + """Calculate the 1D intensity. + + :param pic: 2D array, array of raw counts, corrections should be + already applied + :return: 1d array, variance of integrated intensity + """ maskedmatrix = self.getMaskedmatrixPic() - + picvar = self.calculateVarianceLocal(pic) - variance = np.histogram(maskedmatrix, self.bin_edges, weights=picvar)[0] + variance = np.histogram(maskedmatrix, self.bin_edges, weights=picvar)[ + 0 + ] return variance / self.bin_number def calculateVarianceLocal(self, pic): - ''' - calculate the variance of raw counts of each pixel are calculated according to their - loacl variance. - - :param pic: 2d array, 2d image array, corrections hould be already applied - + """Calculate the variance of raw counts of each pixel are + calculated according to their local variance. + + :param pic: 2d array, 2d image array, corrections should be + already applied :return: 2d array, variance of each pixel - ''' + """ maskedmatrix, pic = self.getMaskedmatrixPic(pic) - - picavg = snf.uniform_filter(pic, 5, mode='wrap') + + picavg = snf.uniform_filter(pic, 5, mode="wrap") pics2 = (pic - picavg) ** 2 - pvar = snf.uniform_filter(pics2, 5, mode='wrap') + pvar = snf.uniform_filter(pics2, 5, mode="wrap") gain = pvar / pic inds = np.nonzero(np.logical_and(np.isnan(gain), np.isinf(gain))) @@ -207,11 +220,10 @@ def calculateVarianceLocal(self, pic): return var def genDistanceMatrix(self): - ''' - Calculate the distance matrix - + """Calculate the distance matrix. + :return: 2d array, distance between source and each pixel - ''' + """ sinr = np.sin(-self.rotation) cosr = np.cos(-self.rotation) sint = np.sin(self.tilt) @@ -224,16 +236,16 @@ def genDistanceMatrix(self): dmatrix = np.zeros((len(self.yr), len(self.xr)), dtype=float) dmatrix += ((self.xr - sourcexr) ** 2).reshape(1, len(self.xr)) dmatrix += ((self.yr - sourceyr) ** 2).reshape(len(self.yr), 1) - dmatrix += sourcezr ** 2 + dmatrix += sourcezr**2 self.dmatrix = np.sqrt(dmatrix) return self.dmatrix def genTTHMatrix(self): - ''' - Calculate the diffraction angle matrix - - :return: 2d array, two theta angle (in radians) of each pixel's center - ''' + """Calculate the diffraction angle matrix. + + :return: 2d array, two theta angle (in radians) of each pixel's + center + """ sinr = np.sin(-self.rotation) cosr = np.cos(-self.rotation) @@ -243,21 +255,27 @@ def genTTHMatrix(self): sourceyr = self.distance * sint * sinr sourcezr = self.distance * cost - # tthmatrix1 = np.zeros((self.ydimension, self.xdimension), dtype=float) + # tthmatrix1 = np.zeros( + # (self.ydimension, self.xdimension), + # dtype=float, + # ) tthmatrix1 = np.zeros((len(self.yr), len(self.xr)), dtype=float) - tthmatrix1 += ((-self.xr + sourcexr) * sourcexr).reshape(1, len(self.xr)) - tthmatrix1 += ((-self.yr + sourceyr) * sourceyr).reshape(len(self.yr), 1) + tthmatrix1 += ((-self.xr + sourcexr) * sourcexr).reshape( + 1, len(self.xr) + ) + tthmatrix1 += ((-self.yr + sourceyr) * sourceyr).reshape( + len(self.yr), 1 + ) tthmatrix1 += sourcezr * sourcezr tthmatrix = np.arccos(tthmatrix1 / self.dmatrix / self.distance) self.tthmatrix = tthmatrix return tthmatrix def genQMatrix(self): - ''' - Calculate the q matrix - + """Calculate the q matrix. + :return: 2d array, q value of each pixel's center - ''' + """ sinr = np.sin(-self.rotation) cosr = np.cos(-self.rotation) sint = np.sin(self.tilt) @@ -266,10 +284,17 @@ def genQMatrix(self): sourceyr = self.distance * sint * sinr sourcezr = self.distance * cost - # tthmatrix1 = np.zeros((self.ydimension, self.xdimension), dtype=float) + # tthmatrix1 = np.zeros( + # (self.ydimension, self.xdimension), + # dtype=float, + # ) tthmatrix1 = np.zeros((len(self.yr), len(self.xr)), dtype=float) - tthmatrix1 += ((-self.xr + sourcexr) * sourcexr).reshape(1, len(self.xr)) - tthmatrix1 += ((-self.yr + sourceyr) * sourceyr).reshape(len(self.yr), 1) + tthmatrix1 += ((-self.xr + sourcexr) * sourcexr).reshape( + 1, len(self.xr) + ) + tthmatrix1 += ((-self.yr + sourceyr) * sourceyr).reshape( + len(self.yr), 1 + ) tthmatrix1 += sourcezr * sourcezr tthmatrix = np.arccos(tthmatrix1 / self.dmatrix / self.distance) self.tthmatrix = tthmatrix @@ -277,41 +302,50 @@ def genQMatrix(self): return Q def genCorrectionMatrix(self): - ''' - generate correction matrix. multiple the 2D raw counts array by this correction matrix - to get corrected raw counts. It will calculate solid angle correction or polarization correction. - + """Generate correction matrix. multiple the 2D raw counts array + by this correction matrix to get corrected raw counts. It will + calculate solid angle correction or polarization correction. + :return: 2d array, correction matrix to apply on the image - ''' + """ rv = self._solidAngleCorrection() * self._polarizationCorrection() return rv def _solidAngleCorrection(self): - ''' - generate correction matrix of soild angle correction for 2D flat detector. - + """Generate correction matrix of soild angle correction for 2D + flat detector. + :return: 2d array, correction matrix to apply on the image - ''' + """ if self.sacorrectionenable: sourcezr = self.distance * np.cos(self.tilt) - correction = (self.dmatrix / sourcezr) + correction = self.dmatrix / sourcezr else: correction = np.ones((len(self.yr), len(self.xr))) return correction def _polarizationCorrection(self): - ''' - generate correction matrix of polarization correction for powder diffraction for 2D flat detector. - require the self.polcorrectf factor in configuration. + """Generate correction matrix of polarization correction for + powder diffraction for 2D flat detector. require the + self.polcorrectf factor in configuration. :return: 2d array, correction matrix to apply on the image - ''' + """ if self.polcorrectionenable: - # tthmatrix = self.tthorqmatrix if self.integrationspace == 'twotheta' else self.genTTHMatrix() + # tthmatrix = ( + # self.tthorqmatrix + # if self.integrationspace == "twotheta" + # else self.genTTHMatrix() + # ) tthmatrix = self.tthmatrix azimuthmatrix = self.azimuthmatrix p = 0.5 * (1 + (np.cos(tthmatrix)) ** 2) - p1 = 0.5 * self.polcorrectf * np.cos(2 * azimuthmatrix) * (np.sin(tthmatrix)) ** 2 + p1 = ( + 0.5 + * self.polcorrectf + * np.cos(2 * azimuthmatrix) + * (np.sin(tthmatrix)) ** 2 + ) p = 1.0 / (p - p1) else: # p = np.ones((self.ydimension, self.xdimension)) diff --git a/diffpy/srxplanar/loadimage.py b/src/diffpy/srxplanar/loadimage.py similarity index 58% rename from diffpy/srxplanar/loadimage.py rename to src/diffpy/srxplanar/loadimage.py index 5a2c660..b30e97e 100644 --- a/diffpy/srxplanar/loadimage.py +++ b/src/diffpy/srxplanar/loadimage.py @@ -3,7 +3,7 @@ # # diffpy.srxplanar by DANSE Diffraction group # Simon J. L. Billinge -# (c) 2010 Trustees of the Columbia University +# (c) 2010-2025 Trustees of the Columbia University # in the City of New York. All rights reserved. # # File coded by: Xiaohao Yang @@ -13,13 +13,13 @@ # ############################################################################## +import fnmatch +import os import time + import numpy as np -import os -import fnmatch -import sys -from diffpy.srxplanar.srxplanarconfig import _configPropertyR -from tifffile import imsave as saveImage + +from diffpy.confutils.tools import _configPropertyR try: import fabio @@ -27,7 +27,8 @@ def openImage(im): rv = fabio.openimage.openimage(im) return rv.data -except: + +except ImportError: import tifffile def openImage(im): @@ -36,31 +37,28 @@ def openImage(im): class LoadImage(object): - ''' - provide methods to filter files and load images - ''' + """Provide methods to filter files and load images.""" + # define configuration properties that are forwarded to self.config - xdimension = _configPropertyR('xdimension') - ydimension = _configPropertyR('ydimension') - opendirectory = _configPropertyR('opendirectory') - filenames = _configPropertyR('filenames') - includepattern = _configPropertyR('includepattern') - excludepattern = _configPropertyR('excludepattern') - fliphorizontal = _configPropertyR('fliphorizontal') - flipvertical = _configPropertyR('flipvertical') + xdimension = _configPropertyR("xdimension") + ydimension = _configPropertyR("ydimension") + opendirectory = _configPropertyR("opendirectory") + filenames = _configPropertyR("filenames") + includepattern = _configPropertyR("includepattern") + excludepattern = _configPropertyR("excludepattern") + fliphorizontal = _configPropertyR("fliphorizontal") + flipvertical = _configPropertyR("flipvertical") def __init__(self, p): self.config = p return def flipImage(self, pic): - ''' - flip image if configured in config + """Flip image if configured in config. :param pic: 2d array, image array - :return: 2d array, flipped image array - ''' + """ if self.fliphorizontal: pic = np.array(pic[:, ::-1]) if self.flipvertical: @@ -68,14 +66,12 @@ def flipImage(self, pic): return pic def loadImage(self, filename): - ''' - load image file, if failed (for example loading an incomplete file), - then it will keep trying loading file for 5s + """Load image file, if failed (for example loading an incomplete + file), then it will keep trying loading file for 5s. :param filename: str, image file name - :return: 2d ndarray, 2d image array (flipped) - ''' + """ if os.path.exists(filename): filenamefull = filename else: @@ -85,55 +81,80 @@ def loadImage(self, filename): i = 0 while i < 10: try: - if os.path.splitext(filenamefull)[-1] == '.npy': + if os.path.splitext(filenamefull)[-1] == ".npy": image = np.load(filenamefull) else: image = openImage(filenamefull) i = 10 - except: + except FileNotFoundError: i = i + 1 time.sleep(0.5) image = self.flipImage(image) image[image < 0] = 0 return image - def genFileList(self, filenames=None, opendir=None, includepattern=None, excludepattern=None, fullpath=False): - ''' - generate the list of file in opendir according to include/exclude pattern - - :param filenames: list of str, list of file name patterns, all files match ANY pattern in this list will be included + def genFileList( + self, + filenames=None, + opendir=None, + includepattern=None, + excludepattern=None, + fullpath=False, + ): + """Generate the list of file in opendir according to + include/exclude pattern. + + :param filenames: list of str, list of file name patterns, all + files match ANY pattern in this list will be included :param opendir: str, the directory to get files - :param includepattern: list of str, list of wildcard of files that will be loaded, - all files match ALL patterns in this list will be included - :param excludepattern: list of str, list of wildcard of files that will be blocked, - any files match ANY patterns in this list will be blocked - :param fullpath: bool, if true, return the full path of each file - + :param includepattern: list of str, list of wildcard of files + that will be loaded, all files match ALL patterns in this + list will be included + :param excludepattern: list of str, list of wildcard of files + that will be blocked, any files match ANY patterns in this + list will be blocked + :param fullpath: bool, if true, return the full path of each + file :return: list of str, a list of filenames - ''' + """ fileset = self.genFileSet( - filenames, opendir, includepattern, excludepattern, fullpath) + filenames, opendir, includepattern, excludepattern, fullpath + ) return sorted(list(fileset)) - def genFileSet(self, filenames=None, opendir=None, includepattern=None, excludepattern=None, fullpath=False): - ''' - generate the list of file in opendir according to include/exclude pattern - - :param filenames: list of str, list of file name patterns, all files match ANY pattern in this list will be included + def genFileSet( + self, + filenames=None, + opendir=None, + includepattern=None, + excludepattern=None, + fullpath=False, + ): + """Generate the list of file in opendir according to + include/exclude pattern. + + :param filenames: list of str, list of file name patterns, all + files match ANY pattern in this list will be included :param opendir: str, the directory to get files - :param includepattern: list of str, list of wildcard of files that will be loaded, - all files match ALL patterns in this list will be included - :param excludepattern: list of str, list of wildcard of files that will be blocked, - any files match ANY patterns in this list will be blocked - :param fullpath: bool, if true, return the full path of each file - + :param includepattern: list of str, list of wildcard of files + that will be loaded, all files match ALL patterns in this + list will be included + :param excludepattern: list of str, list of wildcard of files + that will be blocked, any files match ANY patterns in this + list will be blocked + :param fullpath: bool, if true, return the full path of each + file :return: set of str, a list of filenames - ''' - filenames = self.filenames if filenames == None else filenames - opendir = self.opendirectory if opendir == None else opendir - includepattern = self.includepattern if includepattern == None else includepattern - excludepattern = self.excludepattern if excludepattern == None else excludepattern + """ + filenames = self.filenames if filenames is None else filenames + opendir = self.opendirectory if opendir is None else opendir + includepattern = ( + self.includepattern if includepattern is None else includepattern + ) + excludepattern = ( + self.excludepattern if excludepattern is None else excludepattern + ) # filter the filenames according to include and exclude pattern filelist = os.listdir(opendir) fileset = set() @@ -149,6 +170,7 @@ def genFileSet(self, filenames=None, opendir=None, includepattern=None, excludep fileset = fileset1 if fullpath: filelist = map( - lambda x: os.path.abspath(os.path.join(opendir, x)), fileset) + lambda x: os.path.abspath(os.path.join(opendir, x)), fileset + ) fileset = set(filelist) return fileset diff --git a/src/diffpy/srxplanar/mask.py b/src/diffpy/srxplanar/mask.py new file mode 100644 index 0000000..e4aa188 --- /dev/null +++ b/src/diffpy/srxplanar/mask.py @@ -0,0 +1,282 @@ +#!/usr/bin/env python +############################################################################## +# +# diffpy.srxplanar by DANSE Diffraction group +# Simon J. L. Billinge +# (c) 2010-2025 Trustees of the Columbia University +# in the City of New York. All rights reserved. +# +# File coded by: Xiaohao Yang +# +# See AUTHORS.txt for a list of people who contributed. +# See LICENSE.txt for license information. +# +############################################################################## + +import numpy as np + +try: + import fabio + + def openImage(im): + rv = fabio.openimage.openimage(im) + return rv.data + +except ImportError: + import tifffile + + print("Only tiff or .npy mask is support since fabio is not available") + + def openImage(im): + try: + rv = tifffile.imread(im) + except (ValueError, OSError): + rv = 0 + return rv + + +import os + +import scipy.ndimage.filters as snf +import scipy.ndimage.morphology as snm + +from diffpy.confutils.tools import _configPropertyR + + +class Mask(object): + """Provide methods for mask generation, including: + + static mask: tif mask, npy mask + dynamic mask: masking dark pixels, bright pixels + """ + + xdimension = _configPropertyR("xdimension") + ydimension = _configPropertyR("ydimension") + fliphorizontal = _configPropertyR("fliphorizontal") + flipvertical = _configPropertyR("flipvertical") + wavelength = _configPropertyR("wavelength") + maskfile = _configPropertyR("maskfile") + brightpixelmask = _configPropertyR("brightpixelmask") + darkpixelmask = _configPropertyR("darkpixelmask") + cropedges = _configPropertyR("cropedges") + avgmask = _configPropertyR("avgmask") + + def __init__(self, p, calculate): + self.config = p + self.staticmask = np.zeros((self.ydimension, self.xdimension)) + self.dynamicmask = None + self.calculate = calculate + return + + def staticMask(self, maskfile=None): + """Create a static mask according existing mask file. This mask + remain unchanged for different images. + + :param maskfile: string, file name of mask, + mask file supported: .npy, .tif file, ATTN: mask in .npy form + should be already flipped, + and 1 (or larger) stands for masked pixels, + 0(<0) stands for unmasked pixels + + :return: 2d array of boolean, 1 stands for masked pixel + """ + maskfile = self.maskfile if maskfile is None else maskfile + + if os.path.exists(maskfile): + if maskfile.endswith(".npy"): + rv = np.load(maskfile) + elif maskfile.endswith(".tif"): + immask = openImage(maskfile) + rv = self.flipImage(immask) + else: + rv = np.zeros((self.ydimension, self.xdimension)) + + self.staticmask = rv > 0 + return self.staticmask + + def dynamicMask( + self, + pic, + dymask=None, + brightpixelmask=None, + darkpixelmask=None, + avgmask=None, + ): + """Create a dynamic mask according to image array. This mask + changes for different images. + + :param pic: 2d array, image array to be processed + :param dymask: 2d array, mask array used in average mask + calculation + :param brightpixelmask: pixels with much lower intensity compare + to adjacent pixels will be masked + :param darkpixelmask: pixels with much higher intensity compare + to adjacent pixels will be masked + :param avgmask: Mask the pixels too bright or too dark compared + to the average intensity at the similar diffraction angle + :return: 2d array of boolean, 1 stands for masked pixel + """ + + brightpixelmask = ( + self.brightpixelmask + if brightpixelmask is None + else brightpixelmask + ) + darkpixelmask = ( + self.darkpixelmask if darkpixelmask is None else darkpixelmask + ) + avgmask = self.avgmask if avgmask is None else avgmask + + if darkpixelmask or brightpixelmask or avgmask: + rv = np.zeros((self.ydimension, self.xdimension)) + if darkpixelmask: + rv += self.darkPixelMask(pic) + if brightpixelmask: + rv += self.brightPixelMask(pic) + if avgmask: + rv += self.avgMask(pic, dymask=dymask) + self.dynamicmask = rv > 0 + else: + self.dynamicmask = None + return self.dynamicmask + + def edgeMask(self, cropedges=None): + """Generate edge mask. + + :param cropedges: crop the image, maske pixels around the image + edge (left, right, top, bottom), must larger than 0, if + None, use self.corpedges + """ + ce = self.cropedges if cropedges is None else cropedges + mask = np.ones((self.ydimension, self.xdimension), dtype=bool) + mask[ce[2] : -ce[3], ce[0] : -ce[1]] = 0 + return mask + + def avgMask(self, image, high=None, low=None, dymask=None, cropedges=None): + """Generate a mask that automatically mask the pixels, whose + intensities are too high or too low compare to the pixels which + have similar twotheta value. + + :param image: 2d array, image file (array) + :param high: float (default: 2.0), int > avgint * high will be + masked + :param low: float (default: 0.5), int < avgint * low will be + masked + :param dymask: 2d bool array, mask array used in calculation, + True for masked pixel, if None, then use self.staticmask + :param cropedges: crop the image, maske pixels around the image + edge (left, right, top, bottom), must larger than 0, if + None, use self.config.corpedges :return 2d bool array, True + for masked pixel, edgemake included, dymask not included + """ + if dymask is None: + dymask = self.staticmask + high = self.config.avgmaskhigh if high is None else high + low = self.config.avgmasklow if low is None else low + + self.calculate.genIntegrationInds(dymask) + chi = self.calculate.intensity(image) + index = np.rint( + self.calculate.tthorqmatrix / self.config.tthorqstep + ).astype(int) + index[index >= len(chi[1]) - 1] = len(chi[1]) - 1 + avgimage = chi[1][index.ravel()].reshape(index.shape) + mask = np.ones((self.ydimension, self.xdimension), dtype=bool) + ce = self.cropedges if cropedges is None else cropedges + mask[ce[2] : -ce[3], ce[0] : -ce[1]] = np.logical_or( + image[ce[2] : -ce[3], ce[0] : -ce[1]] < avgimage * low, + image[ce[2] : -ce[3], ce[0] : -ce[1]] > avgimage * high, + ) + return mask + + def darkPixelMask(self, pic, r=None): + """Pixels with much lower intensity compare to adjacent pixels + will be masked. + + :param pic: 2d array, image array to be processed + :param r: float, a threshold for masked pixels + :return: 2d array of boolean, 1 stands for masked pixel + """ + r = self.config.darkpixelr if r is None else r # 0.1 + + avgpic = np.average(pic) + ks = np.ones((5, 5)) + ks1 = np.ones((7, 7)) + picb = snf.percentile_filter(pic, 5, 3) < avgpic * r + picb = snm.binary_dilation(picb, structure=ks) + picb = snm.binary_erosion(picb, structure=ks1) + return picb + + def brightPixelMask(self, pic, size=None, r=None): + """Pixels with much higher intensity compare to adjacent pixels + will be masked, this mask is used when there are some bright + spots/pixels whose intensity is higher than its neighbors but + not too high. Only use this on a very good powder averaged data. + Otherwise it may mask wrong pixels. + + This mask has similar functions as 'selfcorr' function. However, + this mask will only consider pixels' local neighbors pixels and + tend to mask more pixels. While 'selfcorr' function compare one + pixel to other pixels in same bin. + + :param pic: 2d array, image array to be processed + :param size: int, size of local testing area + :param r: float, a threshold for masked pixels + :return: 2d array of boolean, 1 stands for masked pixel + """ + size = self.config.brightpixelsize if size is None else size # 5 + r = self.config.brightpixelr if r is None else r # 1.2 + + rank = snf.rank_filter(pic, -size, size) + ind = snm.binary_dilation(pic > rank * r, np.ones((3, 3))) + return ind + + def undersample(self, undersamplerate): + """A special mask used for undesampling image. It will create a + mask that discard (total number*(1-undersamplerate)) pixels + :param undersamplerate: float, 0~1, ratio of pixels to keep. + + :return: 2d array of boolean, 1 stands for masked pixel + """ + mask = ( + np.random.rand(self.ydimension, self.xdimension) < undersamplerate + ) + return mask + + def flipImage(self, pic): + """Flip image if configured in config. + + :param pic: 2d array, image array + :return: 2d array, flipped image array + """ + if self.fliphorizontal: + pic = pic[:, ::-1] + if self.flipvertical: + pic = pic[::-1, :] + return pic + + def saveMask(self, filename, pic=None, addmask=None): + """Generate a mask according to the addmask and pic. save it to + .npy. 1 stands for masked pixel the mask has same order as the + pic, which means if the pic is flipped, the mask is flipped + (when pic is loaded though loadimage, it is flipped) + + :param filename: str, filename of mask file to be save + :param pic: 2d array, image array + :param addmask: list of str, control which mask to generate + :return: 2d array of boolean, 1 stands for masked pixel + """ + if not hasattr(self, "mask"): + self.normalMask(addmask) + if (not hasattr(self, "dynamicmask")) and (pic is not None): + self.dynamicMask(pic, addmask=addmask) + tmask = self.mask + if hasattr(self, "dynamicmask"): + if self.dynamicmask is not None: + tmask = ( + np.logical_or(self.mask, self.dynamicmask) + if pic is not None + else self.mask + ) + np.save(filename, tmask) + return tmask diff --git a/src/diffpy/srxplanar/saveresults.py b/src/diffpy/srxplanar/saveresults.py new file mode 100644 index 0000000..6d3bdbe --- /dev/null +++ b/src/diffpy/srxplanar/saveresults.py @@ -0,0 +1,193 @@ +#!/usr/bin/env python +############################################################################## +# +# diffpy.srxplanar by DANSE Diffraction group +# Simon J. L. Billinge +# (c) 2010-2025 Trustees of the Columbia University +# in the City of New York. All rights reserved. +# +# File coded by: Xiaohao Yang +# +# See AUTHORS.txt for a list of people who contributed. +# See LICENSE.txt for license information. +# +############################################################################## + +import os + +import numpy as np + +from diffpy.confutils.tools import _configPropertyR + + +class SaveResults(object): + """Save results into files.""" + + integrationspace = _configPropertyR("integrationspace") + savedirectory = _configPropertyR("savedirectory") + gsasoutput = _configPropertyR("gsasoutput") + filenameplus = _configPropertyR("filenameplus") + + def __init__(self, p): + self.config = p + self.prepareCalculation() + return + + def prepareCalculation(self): + if not os.path.exists(self.savedirectory): + os.makedirs(self.savedirectory) + return + + def getFilePathWithoutExt(self, filename): + """Get the normalized full path of filename with out extension. + + :param filename: string, could be full path or file name only + and with/without ext, only the base part of filename is + used. + :return: string, full normalized path of file without extension + """ + filebase = os.path.splitext(os.path.split(filename)[1])[0] + if self.filenameplus != "" and self.filenameplus is not None: + filenamep = "_".join( + [filebase, self.filenameplus, self.integrationspace] + ) + else: + filenamep = "_".join([filebase, self.integrationspace]) + filepathwithoutext = os.path.join(self.savedirectory, filenamep) + return filepathwithoutext + + def save(self, rv): + """Save diffraction intensity in .chi and gsas format(optional) + + :param rv: dict, result include integrated diffration intensity + the rv['chi'] should be a 2d array with shape (2,len of + intensity) or (3, len of intensity) file name is generated + according to original file name and savedirectory + """ + rv = self.saveChi(rv["chi"], rv["filename"]) + if self.gsasoutput: + if self.gsasoutput in set(["std", "esd", "fxye"]): + rv = [rv, self.saveGSAS(rv["chi"], rv["filename"])] + return rv + + def saveChi(self, xrd, filename): + """Save diffraction intensity in .chi. + + :param xrd: 2d array with shape (2,len of intensity) or (3, len + of intensity), [tthorq, intensity, (unceratinty)] + :param filename: str, base file name + """ + filepath = self.getFilePathWithoutExt(filename) + ".chi" + f = open(filepath, "wb") + f.write(self.config.getHeader(mode="short")) + f.write("#### start data\n") + np.savetxt(f, xrd.transpose(), fmt="%g") + f.close() + return filepath + + def saveGSAS(self, xrd, filename): + """Save diffraction intensity in gsas format. + + :param xrd: 2d array with shape (2,len of intensity) or (3, len + of intensity), [tthorq, intensity, (unceratinty)] + :param filename: str, base file name + """ + filepath = self.getFilePathWithoutExt(filename) + ".gsas" + f = open(filepath, "wb") + f.write(self.config.getHeader(mode="short")) + f.write("#### start data\n") + if xrd.shape[0] == 3: + s = writeGSASStr( + os.path.splitext(filepath)[0], + self.gsasoutput, + xrd[0], + xrd[1], + xrd[2], + ) + elif xrd.shape[0] == 2: + s = writeGSASStr( + os.path.splitext(filepath)[0], self.gsasoutput, xrd[0], xrd[1] + ) + f.write(s) + f.close() + return filepath + + +def writeGSASStr(name, mode, tth, iobs, esd=None): + """Return string of integrated intensities in GSAS format. :param + mode: string, gsas file type, could be 'std', 'esd', 'fxye' (gsas + format) :param tth: ndarray, two theta angle :param iobs: ndarray, + Xrd intensity :param esd: ndarray, optional error value of + intensity. + + :return: string, a string to be saved to file + """ + maxintensity = 999999 + logscale = np.floor(np.log10(maxintensity / np.max(iobs))) + logscale = min(logscale, 0) + scale = 10 ** int(logscale) + lines = [] + ltitle = "Angular Profile" + ltitle += ": %s" % name + ltitle += " scale=%g" % scale + if len(ltitle) > 80: + ltitle = ltitle[:80] + lines.append("%-80s" % ltitle) + ibank = 1 + nchan = len(iobs) + # two-theta0 and dtwo-theta in centidegrees + tth0_cdg = tth[0] * 100 + dtth_cdg = (tth[-1] - tth[0]) / (len(tth) - 1) * 100 + if esd is None: + mode = "std" + if mode == "std": + nrec = int(np.ceil(nchan / 10.0)) + lbank = "BANK %5i %8i %8i CONST %9.5f %9.5f %9.5f %9.5f STD" % ( + ibank, + nchan, + nrec, + tth0_cdg, + dtth_cdg, + 0, + 0, + ) + lines.append("%-80s" % lbank) + lrecs = ["%2i%6.0f" % (1, ii * scale) for ii in iobs] + for i in range(0, len(lrecs), 10): + lines.append("".join(lrecs[i : i + 10])) + if mode == "esd": + nrec = int(np.ceil(nchan / 5.0)) + lbank = "BANK %5i %8i %8i CONST %9.5f %9.5f %9.5f %9.5f ESD" % ( + ibank, + nchan, + nrec, + tth0_cdg, + dtth_cdg, + 0, + 0, + ) + lines.append("%-80s" % lbank) + lrecs = ["%8.0f%8.0f" % (ii, ee * scale) for ii, ee in zip(iobs, esd)] + for i in range(0, len(lrecs), 5): + lines.append("".join(lrecs[i : i + 5])) + if mode == "fxye": + nrec = nchan + lbank = "BANK %5i %8i %8i CONST %9.5f %9.5f %9.5f %9.5f FXYE" % ( + ibank, + nchan, + nrec, + tth0_cdg, + dtth_cdg, + 0, + 0, + ) + lines.append("%-80s" % lbank) + lrecs = [ + "%22.10f%22.10f%24.10f" % (xx * scale, yy * scale, ee * scale) + for xx, yy, ee in zip(tth, iobs, esd) + ] + for i in range(len(lrecs)): + lines.append("%-80s" % lrecs[i]) + lines[-1] = "%-80s" % lines[-1] + rv = "\r\n".join(lines) + "\r\n" + return rv diff --git a/src/diffpy/srxplanar/selfcalibrate.py b/src/diffpy/srxplanar/selfcalibrate.py new file mode 100644 index 0000000..2c3ea7b --- /dev/null +++ b/src/diffpy/srxplanar/selfcalibrate.py @@ -0,0 +1,408 @@ +from functools import partial + +import numpy as np +from matplotlib import rcParams +from scipy.optimize import leastsq, minimize + +rcParams["backend"] = "Qt4Agg" +try: + import PySide # noqa: F401 + + rcParams["backend.qt4"] = "PySide" + import matplotlib.pyplot as plt + + mplenabled = True +except ImportError: + try: + import matplotlib.pyplot as plt + + mplenabled = True + except ImportError: + mplenabled = False + + +def halfcut( + p, srx, image, xycenter, qind=[50, 500], show=False, mode="x", output=0 +): + """Cut the image into two half, integrate them and compare the + results, if the calibration information is correct, two half should + give same results. + + :param p: calibration parameters + :param srx: SrXplanar object, object to do the integration + :param image: str or 2d array, image to be calibrated + :param xycenter: [int, int], cut position + :param qind: [int, int], range of q to calculate the difference + :param show: bool, True to plot the cut + :param mode: str, mode of calibration, could be x, y, tilt, + rotation, all, xy + :param output: int, 0 to return one number (sum of square of + difference), 1 to return the difference array + :return: sum of square of difference or difference array + """ + if mode == "x": + srx.updateConfig(xbeamcenter=p) + elif mode == "y": + srx.updateConfig(ybeamcenter=p) + elif mode == "tilt": + srx.updateConfig(tiltd=p) + elif mode == "rotation": + srx.updateConfig(rotationd=p) + elif mode == "all": + srx.updateConfig( + xbeamcenter=p[0], ybeamcenter=p[1], rotationd=p[2], tiltd=p[3] + ) + elif mode == "xy": + srx.updateConfig(xbeamcenter=p[0], ybeamcenter=p[1]) + elif mode == "show": + pass + + srx.prepareCalculation() + kwargs = { + "savename": None, + "savefile": False, + "flip": False, + "correction": False, + } + if mode != "y": + srx.config.extracrop = [1, srx.config.xdimension - xycenter[0], 1, 1] + res1 = srx.integrate(image, **kwargs) + chi1 = res1["chi"][1][qind[0] : qind[1]] + + srx.config.extracrop = [xycenter[0], 1, 1, 1] + res2 = srx.integrate(image, **kwargs) + chi2 = res2["chi"][1][qind[0] : qind[1]] + + if mode != "x": + srx.config.extracrop = [1, 1, 1, srx.config.ydimension - xycenter[1]] + res3 = srx.integrate(image, **kwargs) + chi3 = res3["chi"][1][qind[0] : qind[1]] + + srx.config.extracrop = [1, 1, xycenter[1], 1] + res4 = srx.integrate(image, **kwargs) + chi4 = res4["chi"][1][qind[0] : qind[1]] + + if mode == "x": + rv = chi1 - chi2 + rv = rv / (chi1 + chi2).mean() + elif mode == "y": + rv = chi3 - chi4 + rv = rv / (chi3 + chi4).mean() + else: + r1 = chi1 - chi2 + r2 = chi3 - chi4 + rv = np.concatenate( + [r1 / (chi1 + chi2).mean(), r2 / (chi3 + chi4).mean()] + ) + + rv0 = np.sum(rv**2) + print(p) + print(rv0) + if output == 0: + rv = rv0 + + if show and mplenabled: + print(p) + print(rv) + plotRes(mode, res1, res2, res3, res4) + return rv + + +def plotRes(mode, res1, res2, res3, res4): + """Plot results.""" + plt.ion() + plt.figure(1) + plt.clf() + if mode != "y": + plt.plot(res1["chi"][0], res1["chi"][1], label="left") + plt.plot(res2["chi"][0], res2["chi"][1], label="right") + if mode != "x": + plt.plot(res3["chi"][0], res3["chi"][1], label="up") + plt.plot(res4["chi"][0], res4["chi"][1], label="down") + plt.legend() + plt.show() + return + + +def minimize1(func, bounds): + """1d minimizer. + + :param func: callable function f(x), 1d function + :param bounds: (float, float), the initial bounds + :return: float, the value of x + """ + diffb = np.abs(bounds[1] - bounds[0]) + if diffb > 6: + trylist = np.linspace( + bounds[0], bounds[1], 3 * int(bounds[1] - bounds[0]) + 1, True + ) + else: + trylist = np.linspace(bounds[0], bounds[1], 21, True) + vlow = np.inf + rv = trylist[0] + for v in trylist: + temp = func(v) + if temp < vlow: + rv = v + vlow = temp + if diffb > 6: + trylist = np.linspace(rv - 0.5, rv + 0.5, 21, True) + else: + trylist = np.linspace(rv - diffb / 12.0, rv + diffb / 12.0, 21, True) + + for v in trylist: + temp = func(v) + if temp < vlow: + rv = v + vlow = temp + return rv + + +def selfCalibrateX( + srx, + image, + xycenter=None, + mode="all", + output=0, + showresults=False, + qrange=[None, None], + **kwargs, +): + """Do the self calibration using mode X. + + the initial value is read from the current value of srx object, and + the refined results will be writrn into the srx object + + :param srx: SrXplanar object, object to do the integration + :param image: str or 2d array, image to be calibrated + :param xycenter: [int, int], cut position, if None, determine it + using current beam center + :param mode: str, mode of calibration, could be x, y, xy, tilt, + rotation, all + :param output: int, 0 to use fmin optimizer, 1 to use leastsq + optimizer + :param showresults: bool, plot the halfcut result + :param qrange: q range used in calculating difference + :return: list, refined parameter + """ + bak = {} + for opt in [ + "uncertaintyenable", + "integrationspace", + "qmax", + "qstep", + "cropedges", + "extracrop", + "brightpixelmask", + "darkpixelmask", + "avgmask", + ]: + bak[opt] = getattr(srx.config, opt) + + xycenter = [int(srx.config.xbeamcenter), int(srx.config.ybeamcenter)] + + qmax = srx.config.qmax + # qstep = qmax / 2000 + qstep = qmax / srx.config.xdimension + + srx.updateConfig( + uncertaintyenable=False, + integrationspace="qspace", + # qmax=qmax, + qstep=qstep, + brightpixelmask=False, + darkpixelmask=False, + avgmask=False, + ) + # qind = [50, 1000] + qind = [None, None] + qind[0] = ( + int(qrange[0] / qstep) + if qrange[0] is not None + else srx.config.xdimension / 20 + ) + qind[0] = 0 if qind[0] < 0 else qind[0] + qind[1] = ( + int(qrange[1] / qstep) + if qrange[1] is not None + else srx.config.xdimension / 2 + ) + qind[1] = ( + srx.config.xdimension - 5 + if qind[1] > srx.config.xdimension - 5 + else qind[1] + ) + + srx.prepareCalculation() + srxconfig = srx.config + image = np.array(srx._getPic(image)) + + func = partial( + halfcut, + srx=srx, + image=image, + qind=qind, + mode=mode, + output=output, + xycenter=xycenter, + show=False, + ) + + xywidth = 6 if not kwargs.has_key("xywidth") else kwargs["xywidth"] + if mode == "x": + p0 = [srxconfig.xbeamcenter] + bounds = (p0[0] - xywidth, p0[0] + xywidth) + elif mode == "y": + p0 = [srxconfig.ybeamcenter] + bounds = (p0[0] - xywidth, p0[0] + xywidth) + elif mode == "tilt": + p0 = [srxconfig.tiltd] + bounds = (p0[0] - 5, p0[0] + 5) + elif mode == "rotation": + p0 = [srxconfig.rotationd] + bounds = (0, 360) + elif mode == "all": + p0 = [ + srxconfig.xbeamcenter, + srxconfig.ybeamcenter, + srxconfig.rotationd, + srxconfig.tiltd, + ] + bounds = [ + [p0[0] - xywidth, p0[0] + xywidth], + [p0[1] - xywidth, p0[1] + xywidth], + [0, 360], + [srxconfig.tiltd - 10, srxconfig.tiltd + 10], + ] + elif mode == "xy": + p0 = [srxconfig.xbeamcenter, srxconfig.ybeamcenter] + bounds = [ + [p0[0] - xywidth, p0[0] + xywidth], + [p0[1] - xywidth, p0[1] + xywidth], + ] + + if output == 0: + if mode in ["x", "y", "tilt", "rotation"]: + rv = minimize1(func, bounds) + p = [rv] + else: + rv = minimize( + func, + p0, + method="Powell", + bounds=bounds, + options={"xtol": 0.001, "ftol": 0.001}, + ) + p = rv.x + else: + rv = leastsq(func, p0, epsfcn=0.001) + p = rv[0] + + print(p) + if mode == "x": + srx.updateConfig(xbeamcenter=p[0], **bak) + elif mode == "y": + srx.updateConfig(ybeamcenter=p[0], **bak) + elif mode == "tilt": + srx.updateConfig(tiltd=p[0], **bak) + elif mode == "rotation": + srx.updateConfig(rotation=p[0], **bak) + elif mode == "xy": + srx.updateConfig(xbeamcenter=p[0], ybeamcenter=p[1], **bak) + elif mode == "all": + srx.updateConfig( + xbeamcenter=p[0], + ybeamcenter=p[1], + rotationd=p[2], + tiltd=p[3], + **bak, + ) + + if showresults: + halfcut( + [], + srx=srx, + image=image, + xycenter=xycenter, + qind=qind, + show=True, + mode="show", + output=output, + ) + return p + + +def selfCalibrate( + srx, + image, + mode="xy", + cropedges="auto", + showresults=False, + qrange=[None, None], + **kwargs, +): + """Do the self calibration. + + the initial value is read from the current value of srx object, and the + refined results will be writrn into the srx object + + :param srx: SrXplanar object, object to do the integration + :param image: str or 2d array, image to be calibrated + :param mode: str or list of str: + all: refine all parameters at once + xy: refine x and y + list of str: eg. ['x', 'y', 'xy'] -> refine x, then y, then xy + :param cropedges: list of int or str + if list of int, it will be passed to srx instance and used as cropedges + if 'auto', the cropedges of srx instance will be set automatically , + if 'x'('y'), then a slice along x(y) axis will be used + if 'box', then a box around the center will be used + if 'all', then use all pixels + :param showresults: bool, plot the halfcut result + :param qrange: q range used in calculating difference + + :return: list, refined parameter + """ + + # lineCalibrate(srx, image) + + p = [] + if isinstance(mode, str): + xc = srx.config.xbeamcenter + yc = srx.config.ybeamcenter + xd = srx.config.xdimension + yd = srx.config.ydimension + + if not isinstance(cropedges, (list, tuple)): + if cropedges == "y" or (cropedges == "auto" and mode == "y"): + ce = [int(xc - 50), int(xd - xc - 50), yd / 100, yd / 100] + elif cropedges == "x" or (cropedges == "auto" and mode == "x"): + ce = [xd / 100, xd / 100, int(yc - 50), int(yd - yc - 50)] + elif cropedges == "box" or ( + cropedges == "auto" and (mode not in ["x", "y"]) + ): + ce = [ + int(xc - xd / 6), + int(xd - xc - xd / 6), + int(yc - yd / 6), + int(yd - yc - yd / 6), + ] + else: + ce = [10, 10, 10, 10] + + cebak = srx.config.cropedges + srx.updateConfig(cropedges=ce) + p = selfCalibrateX( + srx, + image, + mode=mode, + showresults=showresults, + qrange=qrange, + **kwargs, + ) + srx.updateConfig(cropedges=cebak) + + elif isinstance(mode, (list, tuple)): + for m in mode: + p = selfCalibrate(srx, image, m, cropedges, qrange=qrange) + return p diff --git a/diffpy/srxplanar/srxplanar.py b/src/diffpy/srxplanar/srxplanar.py similarity index 50% rename from diffpy/srxplanar/srxplanar.py rename to src/diffpy/srxplanar/srxplanar.py index 1ac8386..d4c025d 100644 --- a/diffpy/srxplanar/srxplanar.py +++ b/src/diffpy/srxplanar/srxplanar.py @@ -3,7 +3,7 @@ # # diffpy.srxplanar by DANSE Diffraction group # Simon J. L. Billinge -# (c) 2010 Trustees of the Columbia University +# (c) 2010-2025 Trustees of the Columbia University # in the City of New York. All rights reserved. # # File coded by: Xiaohao Yang @@ -12,43 +12,48 @@ # See LICENSE.txt for license information. # ############################################################################## +"""Srxplanar main modular.""" -''' -srxplanar main modular -''' +import os +import sys import numpy as np -import scipy.sparse as ssp -import os, sys -# import time -from diffpy.srxplanar.srxplanarconfig import SrXplanarConfig from diffpy.srxplanar.calculate import Calculate from diffpy.srxplanar.loadimage import LoadImage from diffpy.srxplanar.mask import Mask from diffpy.srxplanar.saveresults import SaveResults +from diffpy.srxplanar.srxplanarconfig import SrXplanarConfig + +# import time + class SrXplanar(object): - ''' - main modular for srxplanar - ''' - - def __init__(self, srxplanarconfig=None, configfile=None, args=None, **kwargs): - ''' - init srxplanar form a SrXplanarConfig instance, or config file, or args passed from cmd - or kwargs. If both SrXplanarConfig instance and other configfile/args/kwargs is specified, - it will first init from config instance then update using configfile/args/kwargs - - :param srxplanarconfig: SrXplanarConfig, init srxplanar from a config instance + """Main modular for srxplanar.""" + + def __init__( + self, srxplanarconfig=None, configfile=None, args=None, **kwargs + ): + """Init srxplanar form a SrXplanarConfig instance, or config + file, or args passed from cmd or kwargs. If both SrXplanarConfig + instance and other configfile/args/kwargs is specified, it will + first init from config instance then update using + configfile/args/kwargs. + + :param srxplanarconfig: SrXplanarConfig, init srxplanar from a + config instance :param configfile: string, name of config file :param args: list of str, usually be sys.argv - :param kwargs: you can use like 'xbeamcenter=1024' or a dict to update the value of xbeamcenter - ''' - if srxplanarconfig != None: + :param kwargs: you can use like 'xbeamcenter=1024' or a dict to + update the value of xbeamcenter + """ + if srxplanarconfig is not None: self.config = srxplanarconfig self.config.updateConfig(filename=configfile, args=args, **kwargs) else: - self.config = SrXplanarConfig(filename=configfile, args=args, **kwargs) + self.config = SrXplanarConfig( + filename=configfile, args=args, **kwargs + ) # init modulars self.loadimage = LoadImage(self.config) self.calculate = Calculate(self.config) @@ -57,15 +62,15 @@ def __init__(self, srxplanarconfig=None, configfile=None, args=None, **kwargs): return def updateConfig(self, filename=None, args=None, **kwargs): - ''' - update config using configfile/args/kwargs, then rerun all prepareCalculation() - + """Update config using configfile/args/kwargs, then rerun all + prepareCalculation() + :param configfile: string, name of config file :param args: list of str, usually be sys.argv - :param kwargs: you can use like 'xbeamcenter=1024' or a dict to update the value of xbeamcenter - + :param kwargs: you can use like 'xbeamcenter=1024' or a dict to + update the value of xbeamcenter :return: None - ''' + """ self.config.updateConfig(filename=filename, args=args, **kwargs) # update instances self.calculate.prepareCalculation() @@ -73,238 +78,287 @@ def updateConfig(self, filename=None, args=None, **kwargs): return def prepareCalculation(self, pic=None): - ''' - prepare data used in calculation - - :param pic: str, list of str, or 2d array, if provided, and automask is True, then - generate a dynamic mask - + """Prepare data used in calculation. + + :param pic: str, list of str, or 2d array, if provided, and + automask is True, then generate a dynamic mask :return: None - ''' + """ self.staticmask = self.mask.staticMask() self.correction = self.calculate.genCorrectionMatrix() self.staticmask = np.logical_or(self.mask.edgeMask(), self.staticmask) self.calculate.genIntegrationInds(self.staticmask) return - + def _picChanged(self, extramask=None): - ''' - update all pic related data (such as dynamic mask) when a new image is read - + """Update all pic related data (such as dynamic mask) when a new + image is read. + :param extramask: 2d array, extra mask applied in integration - :return: None - ''' + """ dynamicmask = self.mask.dynamicMask(self.pic, dymask=self.staticmask) - if dynamicmask != None: + if dynamicmask is not None: mask = np.logical_or(self.staticmask, dynamicmask) - if extramask != None: + if extramask is not None: mask = np.logical_or(mask, extramask) - elif extramask != None: + elif extramask is not None: mask = np.logical_or(self.staticmask, extramask) else: mask = self.staticmask - if (dynamicmask != None) or (extramask != None): + if (dynamicmask is not None) or (extramask is not None): self.calculate.genIntegrationInds(mask) return def _getSaveFileName(self, imagename=None, filename=None): - ''' - get the save file name, the priority order is self.output> filename> imagename > 'output'(default name) - - :param imagename: string, filename/path of image file (drop this term if it is an image array) - :param filename: string, - - :return: string, name of file to be saved - ''' - rv = 'output' - if self.config.output != None and self.config.output != '': + """Get the save file name, the priority order is self.output> + filename> imagename > 'output'(default name) + + :param imagename: string, filename/path of image file (drop this + term if it is an image array) + :param filename: string, + :return: string, name of file to be saved + """ + rv = "output" + if self.config.output is not None and self.config.output != "": rv = self.config.output - elif filename != None: + elif filename is not None: rv = filename - elif imagename != None and isinstance(imagename, (str, unicode)): + elif imagename is not None and isinstance(imagename, str): rv = imagename return rv def _getPic(self, image, flip=None, correction=None): - ''' - load picture to 2d array - - :param image: could be a string, a list of string or a 2d array, + """Load picture to 2d array. + + :param image: could be a string, a list of string or a 2d array, if string, load the image file using the string as the path. - if list of string, load the image files using the string as their path - and sum them togethor + if list of strings, load the image files using the string + as their path and sum them together if 2d array, use that array directly :param flip: flip the image/2d array, - if None: flip on the string/list of string, not flip on the 2d array + if None: flip on the string/list of strings, + not flip on the 2d array Flip behavior is controlled in self.config - :param correction: apply correction to the returned 2d array - if None: correct on the string/list of string, not correct on the 2d array - + :param correction: apply correction to + the returned 2d array + if None: correct on the string/list of string, + not correct on the 2d array + :return: 2d array of image - ''' + """ if isinstance(image, list): rv = np.zeros((self.config.ydimension, self.config.xdimension)) for imagefile in image: rv += self._getPic(imagefile) rv /= len(image) - elif isinstance(image, (str, unicode)): + elif isinstance(image, str): rv = self.loadimage.loadImage(image) - if correction == None or correction == True: + if correction is None or correction is True: ce = self.config.cropedges - rv[ce[2]:-ce[3], ce[0]:-ce[1]] = rv[ce[2]:-ce[3], ce[0]:-ce[1]] * self.correction + rv[ce[2] : -ce[3], ce[0] : -ce[1]] = ( + rv[ce[2] : -ce[3], ce[0] : -ce[1]] * self.correction + ) # rv *= self.correction else: rv = image - if flip == True: + if flip is True: rv = self.loadimage.flipImage(rv) - if correction == True: + if correction is True: # rv *= self.correction ce = self.config.cropedges - rv[ce[2]:-ce[3], ce[0]:-ce[1]] = rv[ce[2]:-ce[3], ce[0]:-ce[1]] * self.correction - if rv.dtype.kind != 'f': + rv[ce[2] : -ce[3], ce[0] : -ce[1]] = ( + rv[ce[2] : -ce[3], ce[0] : -ce[1]] * self.correction + ) + if rv.dtype.kind != "f": rv = rv.astype(float) return rv - def integrate(self, image, savename=None, savefile=True, flip=None, correction=None, extramask=None): - ''' - integrate 2d image to 1d diffraction pattern, then save to disk - - :param image: str or 2d array, + def integrate( + self, + image, + savename=None, + savefile=True, + flip=None, + correction=None, + extramask=None, + ): + """Integrate 2d image to 1d diffraction pattern, then save to + disk. + + :param image: str or 2d array, if str, then read image file using it as file name. if 2d array, integrate this 2d array. :param savename: str, name of file to save - :param savefile: boolean, if True, save file to disk, if False, do not save file to disk + :param savefile: boolean, if True, save file to disk, + if False, do not save file to disk :param flip: flip the image/2d array, - if None: flip on the string/list of string, not flip on the 2d array + if None: flip on the string/list of string, + not flip on the 2d array Flip behavior is controlled in self.config :param correction: apply correction to the returned 2d array - if None: correct on the string/list of string, not correct on the 2d array - :param extramask: 2d array, extra mask applied in integration - - :return: dict, rv['chi'] is a 2d array of integrated intensity, shape is (2, len of intensity) - or (3, len of intensity) in [tth or q, intensity, (uncertainty)]. rv['filename'] is the - name of file to save to disk - ''' + if None: correct on the string/list of string, + not correct on the 2d array + :param extramask: 2d array, extra mask applied in integration + + :return: dict, rv['chi'] is a 2d array of integrated intensity, + shape is (2, len of intensity) or (3, len of intensity) + in [tth or q, intensity, (uncertainty)]. + rv['filename'] is the name of file to save to disk + """ + rv = {} self.pic = self._getPic(image, flip, correction) - rv['filename'] = self._getSaveFileName(imagename=image, filename=savename) + rv["filename"] = self._getSaveFileName( + imagename=image, filename=savename + ) self._picChanged(extramask=extramask) # calculate - rv['chi'] = self.chi = self.calculate.intensity(self.pic) + rv["chi"] = self.chi = self.calculate.intensity(self.pic) # save if savefile: - rv['filename'] = self.saveresults.save(rv) + rv["filename"] = self.saveresults.save(rv) return rv - def integrateFilelist(self, filelist, summation=None, filename=None, flip=None, correction=None, extramask=None): - ''' - process all file in filelist, integrate them separately or together - + def integrateFilelist( + self, + filelist, + summation=None, + filename=None, + flip=None, + correction=None, + extramask=None, + ): + """Process all file in filelist, integrate them separately or + together. + :param filelist: list of string, files to be integrated (full path) :param summation: bool or None, sum all files together or not, if None, use self.config.summation :param filename: file name of output file :param flip: flip the image/2d array, - if None: flip on the string/list of string, not flip on the 2d array + if None: flip on the string/list of string, + not flip on the 2d array Flip behavior is controlled in self.config :param correction: apply correction to the returned 2d array - if None: correct on the string/list of string, not correct on the 2d array - :param extramask: 2d array, extra mask applied in integration - - :return: list of dict, in each dict, rv['chi'] is a 2d array of integrated intensity, shape is (2, len of intensity) - or (3, len of intensity) as [tth or q, intensity, (uncertainty)]. rv['filename'] is the - name of file to save to disk - ''' - summation = self.config.summation if summation == None else summation - if (summation)and(len(filelist) > 1): + if None: correct on the string/list of string, + not correct on the 2d array + :param extramask: 2d array, extra mask applied in integration + + :return: list of dict, in each dict, rv['chi'] is a + 2d array of integrated intensity, shape is (2, len of intensity) + or (3, len of intensity) as [tth or q, intensity, (uncertainty)]. + rv['filename'] is the name of file to save to disk + """ + summation = self.config.summation if summation is None else summation + if (summation) and (len(filelist) > 1): image = self._getPic(filelist, flip, correction) - if filename == None: + if filename is None: if isinstance(filelist[-1], str): - filename = os.path.splitext(filelist[-1])[0] + '_sum.chi' + filename = os.path.splitext(filelist[-1])[0] + "_sum.chi" else: - filename = 'Sum_xrd.chi' - rv = [self.integrate(image, savename=filename, extramask=extramask)] + filename = "Sum_xrd.chi" + rv = [ + self.integrate(image, savename=filename, extramask=extramask) + ] else: i = 0 rv = [] for imagefile in filelist: - if filename == None: - rvv = self.integrate(imagefile, flip=flip, correction=correction, extramask=extramask) + if filename is None: + rvv = self.integrate( + imagefile, + flip=flip, + correction=correction, + extramask=extramask, + ) else: - rvv = self.integrate(imagefile, savename=filename + '%03d' % i, - flip=flip, correction=correction, extramask=extramask) + rvv = self.integrate( + imagefile, + savename=filename + "%03d" % i, + flip=flip, + correction=correction, + extramask=extramask, + ) rv.append(rvv) return rv def process(self): - ''' - process the images according to filenames/includepattern/excludepattern/summation - by default, it will scan current/tifdirectory and integrate all files match + """Process the images according to + filenames/includepattern/excludepattern/summation by default, it + will scan current/tifdirectory and integrate all files match includepattern/excludepattern and/or filenames. - + Usually this one is called from cmd line rather then script. - + :return: None - ''' + """ if not self.config.nocalculation: filelist = self.loadimage.genFileList() if len(filelist) > 0: self.prepareCalculation(pic=filelist[0]) self.integrateFilelist(filelist) else: - print 'No input files or configurations' + print("No input files or configurations") self.config.args.print_help() # mask creating - elif self.config.createmask != '': + elif self.config.createmask != "": self.createMask() # if no config is passed to srxplanar else: - print 'No input files or configurations' + print("No input files or configurations") self.config.args.print_help() return def createMask(self, filename=None, pic=None, addmask=None): - ''' - create and save a mask according to addmask, pic, 1 stands for masked pixel in saved file - - :param filename: name of mask file to save, 'mask.npy' if it is None - :param pic: 2d image array, may used in generating dynamic mask, Be careful if this one is flipped or not - :param addmask: list of str, control how to generate mask, see Mask module for detail - + """Create and save a mask according to addmask, pic, 1 stands + for masked pixel in saved file. + + :param filename: name of mask file to save, 'mask.npy' if it is + None + :param pic: 2d image array, may used in generating dynamic mask, + Be careful if this one is flipped or not + :param addmask: list of str, control how to generate mask, see + Mask module for detail :return: 2d array, 1 stands for masked pixel here - ''' - filename = self.config.createmask if filename == None else filename - filename = 'mask.npy' if filename == '' else filename - addmask = self.config.addmask if addmask == None else addmask - if not hasattr(self, 'mask'): + """ + filename = self.config.createmask if filename is None else filename + filename = "mask.npy" if filename == "" else filename + addmask = self.config.addmask if addmask is None else addmask + if not hasattr(self, "mask"): self.mask = Mask(self.config) - if not hasattr(self, 'loadimage'): + if not hasattr(self, "loadimage"): self.loadimage = LoadImage(self.config) - if pic == None: + if pic is None: filelist = self.loadimage.genFileList() - if hasattr(self, 'pic'): - if self.pic != None: + if hasattr(self, "pic"): + if self.pic is not None: pic = self.pic else: - pic = self.loadimage.loadImage(filelist[0]) if len(filelist) > 0 else None + pic = ( + self.loadimage.loadImage(filelist[0]) + if len(filelist) > 0 + else None + ) else: - pic = self.loadimage.loadImage(filelist[0]) if len(filelist) > 0 else None + pic = ( + self.loadimage.loadImage(filelist[0]) + if len(filelist) > 0 + else None + ) rv = self.mask.saveMask(filename, pic, addmask) return rv - def main(): - ''' - read config and integrate images - ''' + """Read config and integrate images.""" srxplanar = SrXplanar(args=sys.argv[1:]) srxplanar.process() return -if __name__ == '__main__': + +if __name__ == "__main__": sys.exit(main()) diff --git a/src/diffpy/srxplanar/srxplanarconfig.py b/src/diffpy/srxplanar/srxplanarconfig.py new file mode 100644 index 0000000..14dbcef --- /dev/null +++ b/src/diffpy/srxplanar/srxplanarconfig.py @@ -0,0 +1,636 @@ +#!/usr/bin/env python +############################################################################## +# +# diffpy.srxplanar by DANSE Diffraction group +# Simon J. L. Billinge +# (c) 2010-2025 Trustees of the Columbia University +# in the City of New York. All rights reserved. +# +# File coded by: Xiaohao Yang +# +# See AUTHORS.txt for a list of people who contributed. +# See LICENSE.txt for license information. +# +############################################################################## + +import numpy as np + +from diffpy.confutils.config import ConfigBase +from diffpy.confutils.tools import _configPropertyRad + +_description = """ +SrXplanar -- integrate 2D powder diffraction image to 1D +with unceratinty propagation +""" +# Text to display after the argument help +_epilog = """ +Examples: + +srxplanar KFe2As2-00838.tif -c test.cfg +--integration using config file test.cfg + +srxplanar *.tif -c test.cfg -s +--integration all .tif image and sum them into one + +srxplanar --createconfig config.cfg +--create default (short) config file using all default value + +""" + +_optdatalist = [ + # control group + [ + "filenames", + { + "sec": "Control", + "config": "n", + "header": "n", + "f": "filename", + "h": ( + "filename or list of filenames or filename pattern" + " or list of filename pattern" + ), + "n": "*", + "d": [], + }, + ], + [ + "output", + { + "sec": "Experiment", + "config": "n", + "header": "n", + "s": "o", + "h": "basename of output file", + "d": "", + }, + ], + [ + "summation", + { + "sec": "Control", + "config": "n", + "header": "n", + "s": "s", + "h": "sum all the image and then integrate", + "n": "?", + "co": True, + "d": False, + }, + ], + # Experiment group + [ + "opendirectory", + { + "sec": "Control", + "header": "n", + "s": "opendir", + "h": "directory of input 2D image files", + "d": "currentdir", + "tt": "directory", + }, + ], + [ + "savedirectory", + { + "sec": "Control", + "header": "n", + "s": "savedir", + "h": "directory of output files", + "d": "currentdir", + "tt": "directory", + }, + ], + [ + "maskfile", + { + "sec": "Experiment", + "s": "mask", + "h": ( + "the mask file (support numpy .npy array, and tiff image," + " >0 stands for masked pixel)" + ), + "d": "", + "tt": "file", + }, + ], + [ + "createmask", + { + "sec": "Control", + "config": "n", + "header": "n", + "h": ( + "create a mask file according to" + " current image file and value of addmask" + ), + "d": "", + }, + ], + [ + "integrationspace", + { + "sec": "Experiment", + "h": "the x-grid of integrated 1D diffraction data", + "d": "twotheta", + "c": ["qspace", "twotheta"], + }, + ], + [ + "wavelength", + { + "sec": "Experiment", + "h": "wavelength of x-ray, in Angstrom", + "d": 0.1000, + }, + ], + [ + "xbeamcenter", + { + "sec": "Experiment", + "s": "xc", + "h": "beamcenter in x axis, in pixel", + "d": 1024.0, + }, + ], + [ + "ybeamcenter", + { + "sec": "Experiment", + "s": "yc", + "h": "beamcenter in y axis, in pixel", + "d": 1024.0, + }, + ], + [ + "distance", + { + "sec": "Experiment", + "s": "dis", + "h": "distance between detector and sample, in mm", + "d": 200.0, + }, + ], + [ + "rotationd", + { + "sec": "Experiment", + "s": "rot", + "h": "rotation angle of tilt plane, in degree", + "d": 0.0, + }, + ], + [ + "tiltd", + { + "sec": "Experiment", + "s": "tilt", + "h": "tilt angle of tilt plane, in degree", + "d": 0.0, + }, + ], + [ + "tthstepd", + { + "sec": "Experiment", + "s": "ts", + "h": "integration step in twotheta space, in degree", + "d": 0.02, + }, + ], + [ + "qstep", + { + "sec": "Experiment", + "s": "qs", + "h": "integration step in q space, in Angstrom^-1", + "d": 0.02, + }, + ], + # Beamline group + [ + "includepattern", + { + "sec": "Beamline", + "header": "n", + "config": "f", + "s": "ipattern", + "h": "list of string, file name patterns for included files", + "n": "*", + "d": ["*.tif", "*.tif.bz2"], + }, + ], + [ + "excludepattern", + { + "sec": "Beamline", + "header": "n", + "config": "f", + "s": "epattern", + "h": "list of string, file name patterns for excluded files", + "n": "*", + "d": ["*.dark.tif", "*.raw.tif"], + }, + ], + [ + "fliphorizontal", + { + "sec": "Beamline", + "h": "flip the image horizontally", + "n": "?", + "co": True, + "d": False, + }, + ], + [ + "flipvertical", + { + "sec": "Beamline", + "h": "flip the image vertically", + "n": "?", + "co": True, + "d": True, + }, + ], + [ + "xdimension", + { + "sec": "Beamline", + "s": "xd", + "h": "detector dimension in x axis, in pixel", + "d": 2048, + }, + ], + [ + "ydimension", + { + "sec": "Beamline", + "s": "yd", + "h": "detector dimension in y axis, in pixel", + "d": 2048, + }, + ], + [ + "xpixelsize", + { + "sec": "Beamline", + "s": "xp", + "h": "detector pixel size in x axis, in mm", + "d": 0.2, + }, + ], + [ + "ypixelsize", + { + "sec": "Beamline", + "s": "yp", + "h": "detector pixel size in y axis, in mm", + "d": 0.2, + }, + ], + # Others Group + [ + "uncertaintyenable", + { + "sec": "Others", + "s": "error", + "h": "enable uncertainty propagation", + "n": "?", + "co": True, + "d": True, + }, + ], + [ + "sacorrectionenable", + { + "sec": "Others", + "s": "sacorr", + "h": "enable solid angle correction", + "n": "?", + "co": True, + "d": True, + }, + ], + [ + "polcorrectionenable", + { + "sec": "Others", + "s": "polarcorr", + "h": "enable polarization correction", + "n": "?", + "co": True, + "d": True, + }, + ], + [ + "polcorrectf", + { + "sec": "Others", + "s": "polarf", + "h": "polarization correction factor", + "d": 0.99, + }, + ], + [ + "brightpixelmask", + { + "sec": "Others", + "h": "mask the bright pixel by comparing their local environments", + "n": "?", + "co": True, + "d": True, + }, + ], + [ + "darkpixelmask", + { + "sec": "Others", + "h": "mask the dark pixel by comparing their local environments", + "n": "?", + "co": True, + "d": True, + }, + ], + [ + "avgmask", + { + "sec": "Others", + "h": ( + "create a dynamic averaging mask that mask pixel with" + " too high or too low intensity compare to the pixels" + " which have similar twotheta value" + ), + "n": "?", + "co": True, + "d": True, + }, + ], + [ + "gsasoutput", + { + "sec": "Others", + "header": "n", + "h": "select if want to output gsas format file", + "c": ["None", "std", "esd", "fxye"], + "d": "None", + }, + ], + [ + "filenameplus", + { + "sec": "Others", + "header": "n", + "h": "string appended to the output filename", + "d": "", + }, + ], + [ + "cropedges", + { + "sec": "Others", + "h": ( + "crop the image, maske pixels around the image edge" + " (left, right, top, bottom), must larger than 0" + ), + "n": 4, + "tt": "array", + "t": "intlist", + "d": [10, 10, 10, 10], + }, + ], + [ + "extracrop", + { + "sec": "Others", + "args": "n", + "config": "n", + "header": "n", + "h": ( + "crop the edge pixels, first four means" + " the number of pixels masked in each edge \ +(left, right, top, bottom), this crop is after all prepare calculation, \ +so change this value does not require a config update," + " value must larger than 0" + ), + "n": 4, + "tt": "array", + "t": "intlist", + "d": [1, 1, 1, 1], + }, + ], + [ + "nocalculation", + { + "sec": "Others", + "config": "n", + "header": "n", + "h": ( + "set True to disable all calculation," + " will automatically set True if createconfig or createmask" + ), + "n": "?", + "co": True, + "d": False, + }, + ], + # masking + [ + "brightpixelr", + { + "sec": "Others", + "args": "n", + "config": "n", + "header": "n", + "h": "a threshold for masked pixels in bright pixel masking", + "d": 1.2, + }, + ], + [ + "brightpixelsize", + { + "sec": "Others", + "args": "n", + "config": "n", + "header": "n", + "h": "size of local testing area in bright pixel masking", + "d": 5, + }, + ], + [ + "darkpixelr", + { + "sec": "Others", + "args": "n", + "config": "n", + "header": "n", + "h": "a threshold for masked pixels in dark pixel masking", + "d": 0.1, + }, + ], + [ + "avgmaskhigh", + { + "sec": "Others", + "args": "n", + "config": "n", + "header": "n", + "h": ( + "a threshold for masked pixels in average masking," + " pixels with (self_int > avg_int * avgmaskhigh)" + " will be masked" + ), + "d": 2.0, + }, + ], + [ + "avgmasklow", + { + "sec": "Others", + "args": "n", + "config": "n", + "header": "n", + "h": ( + "a threshold for masked pixels in average masking," + " pixels with (self_int < avg_int * avgmasklow) will be masked" + ), + "d": 0.5, + }, + ], +] + +_defaultdata = { + "configfile": ["srxplanar.cfg", "SrXplanar.cfg"], + "headertitle": "SrXplanar configuration", +} + + +class SrXplanarConfig(ConfigBase): + """Config class, based on ConfigBase class in diffpy.confutils.""" + + # Text to display before the argument help + _description = _description + + # Text to display after the argument help + _epilog = _epilog + + _optdatalist = _optdatalist + + _defaultdata = _defaultdata + + def _preInit(self, **kwargs): + """Method called in init process, overload it! + + this method will be called before reading config from + file/args/kwargs + + add degree/rad delegation for rotation, tilt, tthstep, tthmax + """ + + for name in ["rotation", "tilt", "tthstep", "tthmax"]: + setattr(self.__class__, name, _configPropertyRad(name + "d")) + # cls._configlist['Experiment'].extend([ + # 'rotation', + # 'tilt', + # 'tthstep', + # 'tthmax', + # ]) + return + + def _preUpdateSelf(self, **kwargs): + """Additional process called in self._updateSelf, this method is + called before self._copySelftoConfig(), i.e. before copy options + value to self.config (config file) + + check the tthmaxd and qmax, and set tthorqmax, tthorqstep + according to integration space + + :param kwargs: optional kwargs + """ + self.tthmaxd, self.qmax = checkMax(self) + if self.integrationspace == "twotheta": + self.tthorqmax = self.tthmax + self.tthorqstep = self.tthstep + elif self.integrationspace == "qspace": + self.tthorqmax = self.qmax + self.tthorqstep = self.qstep + + self.cropedges = [a if a > 1 else 1 for a in self.cropedges] + self.extracrop = [a if a > 1 else 1 for a in self.extracrop] + return + + def _postUpdateConfig(self, **kwargs): + """Post processing after parse args or kwargs, this method is + called after in self._postPocessing and before creating config + file action. + + set nocalculatio flag when create config or create mask + + :param kwargs: optional kwargs + """ + + if (self.createconfig != "") and (self.createconfig is not None): + self.nocalculation = True + if (self.createconfigfull != "") and ( + self.createconfigfull is not None + ): + self.nocalculation = True + if self.createmask != "": + self.nocalculation = True + return + + +def checkMax(config): + """Calculate the max twotheta angle (and q) of a detector with + current geometry. + + :param config: SrXplanarConfig, config instance stores the geometry + parameters + :return: [tthmaxd, qmax], max twotheta angle(in degree) and max q + value of current detector. + """ + xdimension = getattr(config, "xdimension") + ydimension = getattr(config, "ydimension") + xbeamcenter = getattr(config, "xbeamcenter") + ybeamcenter = getattr(config, "ybeamcenter") + xpixelsize = getattr(config, "xpixelsize") + ypixelsize = getattr(config, "ypixelsize") + rotation = getattr(config, "rotation") + tilt = getattr(config, "tilt") + distance = getattr(config, "distance") + wavelength = getattr(config, "wavelength") + + xr = (np.array([0, xdimension + 1]) - xbeamcenter) * xpixelsize + yr = (np.array([0, ydimension + 1]) - ybeamcenter) * ypixelsize + sinr = np.sin(rotation) + cosr = np.cos(rotation) + sint = np.sin(tilt) + cost = np.cos(tilt) + sourcexr = distance * sint * cosr + sourceyr = -distance * sint * sinr + sourcezr = distance * cost + + dmatrix = ( + ((xr - sourcexr) ** 2).reshape(1, 2) + + ((yr - sourceyr) ** 2).reshape(2, 1) + + sourcezr**2 + ) + dmatrix = np.sqrt(dmatrix) + tthmatrix1 = ( + ((-xr + sourcexr) * sourcexr).reshape(1, 2) + + ((-yr + sourceyr) * sourceyr).reshape(2, 1) + + sourcezr * sourcezr + ) + tthmatrix = np.arccos(tthmatrix1 / dmatrix / distance) + qmatrix = 4 * np.pi * np.sin(tthmatrix / 2.0) / wavelength + + tthmaxd = np.degrees(np.max(tthmatrix)) + 0.5 + qmax = np.max(qmatrix) + 0.1 + return tthmaxd, qmax + + +SrXplanarConfig.initConfigClass() + +if __name__ == "__main__": + a = SrXplanarConfig() + a.updateConfig() + a.writeConfig("test.cfg") diff --git a/diffpy/srxplanar/tifffile.py b/src/diffpy/srxplanar/tifffile.py similarity index 58% rename from diffpy/srxplanar/tifffile.py rename to src/diffpy/srxplanar/tifffile.py index 6a2e4c9..3a42f13 100644 --- a/diffpy/srxplanar/tifffile.py +++ b/src/diffpy/srxplanar/tifffile.py @@ -129,36 +129,45 @@ ... if page.is_lsm: ... t = page.cz_lsm_info >>> tif.close() - """ from __future__ import division, print_function -import sys -import os -import re +import collections +import datetime import glob -import math -import zlib -import time import json +import math +import os +import re import struct +import sys +import time import warnings -import datetime -import collections +import zlib from fractions import Fraction from xml.etree import cElementTree as ElementTree import numpy -__version__ = '2014.02.05' -__docformat__ = 'restructuredtext en' -__all__ = ['imsave', 'imread', 'imshow', 'TiffFile', 'TiffSequence'] - - -def imsave(filename, data, photometric=None, planarconfig=None, - resolution=None, description=None, software='tifffile.py', - byteorder=None, bigtiff=False, compress=0, extratags=()): +__version__ = "2014.02.05" +__docformat__ = "restructuredtext en" +__all__ = ["imsave", "imread", "imshow", "TiffFile", "TiffSequence"] + + +def imsave( + filename, + data, + photometric=None, + planarconfig=None, + resolution=None, + description=None, + software="tifffile.py", + byteorder=None, + bigtiff=False, + compress=0, + extratags=(), +): """Write image data to TIFF file. Image data are written in one stripe per plane. @@ -220,49 +229,48 @@ def imsave(filename, data, photometric=None, planarconfig=None, >>> data = numpy.ones((5, 301, 219, 3), 'uint8') + 127 >>> value = u'{"shape": %s}' % str(list(data.shape)) >>> imsave('temp.tif', data, extratags=[(270, 's', 0, value, True)]) - """ - assert(photometric in (None, 'minisblack', 'miniswhite', 'rgb')) - assert(planarconfig in (None, 'contig', 'planar')) - assert(byteorder in (None, '<', '>')) - assert(0 <= compress <= 9) + assert photometric in (None, "minisblack", "miniswhite", "rgb") + assert planarconfig in (None, "contig", "planar") + assert byteorder in (None, "<", ">") + assert 0 <= compress <= 9 if byteorder is None: - byteorder = '<' if sys.byteorder == 'little' else '>' + byteorder = "<" if sys.byteorder == "little" else ">" - data = numpy.asarray(data, dtype=byteorder+data.dtype.char, order='C') + data = numpy.asarray(data, dtype=byteorder + data.dtype.char, order="C") data_shape = shape = data.shape data = numpy.atleast_2d(data) - if not bigtiff and data.size * data.dtype.itemsize < 2000*2**20: + if not bigtiff and data.size * data.dtype.itemsize < 2000 * 2**20: bigtiff = False offset_size = 4 tag_size = 12 - numtag_format = 'H' - offset_format = 'I' - val_format = '4s' + numtag_format = "H" + offset_format = "I" + val_format = "4s" else: bigtiff = True offset_size = 8 tag_size = 20 - numtag_format = 'Q' - offset_format = 'Q' - val_format = '8s' + numtag_format = "Q" + offset_format = "Q" + val_format = "8s" # unify shape of data samplesperpixel = 1 extrasamples = 0 if photometric is None: if data.ndim > 2 and (shape[-3] in (3, 4) or shape[-1] in (3, 4)): - photometric = 'rgb' + photometric = "rgb" else: - photometric = 'minisblack' - if photometric == 'rgb': + photometric = "minisblack" + if photometric == "rgb": if len(shape) < 3: raise ValueError("not a RGB(A) image") if planarconfig is None: - planarconfig = 'planar' if shape[-3] in (3, 4) else 'contig' - if planarconfig == 'contig': + planarconfig = "planar" if shape[-3] in (3, 4) else "contig" + if planarconfig == "contig": if shape[-1] not in (3, 4): raise ValueError("not a contiguous RGB(A) image") data = data.reshape((-1, 1) + shape[-3:]) @@ -270,16 +278,16 @@ def imsave(filename, data, photometric=None, planarconfig=None, else: if shape[-3] not in (3, 4): raise ValueError("not a planar RGB(A) image") - data = data.reshape((-1, ) + shape[-3:] + (1, )) + data = data.reshape((-1,) + shape[-3:] + (1,)) samplesperpixel = shape[-3] if samplesperpixel == 4: extrasamples = 1 elif planarconfig and len(shape) > 2: - if planarconfig == 'contig': + if planarconfig == "contig": data = data.reshape((-1, 1) + shape[-3:]) samplesperpixel = shape[-1] else: - data = data.reshape((-1, ) + shape[-3:] + (1, )) + data = data.reshape((-1,) + shape[-3:] + (1,)) samplesperpixel = shape[-3] extrasamples = samplesperpixel - 1 else: @@ -287,28 +295,61 @@ def imsave(filename, data, photometric=None, planarconfig=None, # remove trailing 1s while len(shape) > 2 and shape[-1] == 1: shape = shape[:-1] - data = data.reshape((-1, 1) + shape[-2:] + (1, )) + data = data.reshape((-1, 1) + shape[-2:] + (1,)) shape = data.shape # (pages, planes, height, width, contig samples) - bytestr = bytes if sys.version[0] == '2' else ( - lambda x: bytes(x, 'utf-8') if isinstance(x, str) else x) - tifftypes = {'B': 1, 's': 2, 'H': 3, 'I': 4, '2I': 5, 'b': 6, - 'h': 8, 'i': 9, 'f': 11, 'd': 12, 'Q': 16, 'q': 17} + bytestr = ( + bytes + if sys.version[0] == "2" + else (lambda x: bytes(x, "utf-8") if isinstance(x, str) else x) + ) + tifftypes = { + "B": 1, + "s": 2, + "H": 3, + "I": 4, + "2I": 5, + "b": 6, + "h": 8, + "i": 9, + "f": 11, + "d": 12, + "Q": 16, + "q": 17, + } tifftags = { - 'new_subfile_type': 254, 'subfile_type': 255, - 'image_width': 256, 'image_length': 257, 'bits_per_sample': 258, - 'compression': 259, 'photometric': 262, 'fill_order': 266, - 'document_name': 269, 'image_description': 270, 'strip_offsets': 273, - 'orientation': 274, 'samples_per_pixel': 277, 'rows_per_strip': 278, - 'strip_byte_counts': 279, 'x_resolution': 282, 'y_resolution': 283, - 'planar_configuration': 284, 'page_name': 285, 'resolution_unit': 296, - 'software': 305, 'datetime': 306, 'predictor': 317, 'color_map': 320, - 'extra_samples': 338, 'sample_format': 339} + "new_subfile_type": 254, + "subfile_type": 255, + "image_width": 256, + "image_length": 257, + "bits_per_sample": 258, + "compression": 259, + "photometric": 262, + "fill_order": 266, + "document_name": 269, + "image_description": 270, + "strip_offsets": 273, + "orientation": 274, + "samples_per_pixel": 277, + "rows_per_strip": 278, + "strip_byte_counts": 279, + "x_resolution": 282, + "y_resolution": 283, + "planar_configuration": 284, + "page_name": 285, + "resolution_unit": 296, + "software": 305, + "datetime": 306, + "predictor": 317, + "color_map": 320, + "extra_samples": 338, + "sample_format": 339, + } tags = [] # list of (code, ifdentry, ifdvalue, writeonce) def pack(fmt, *val): - return struct.pack(byteorder+fmt, *val) + return struct.pack(byteorder + fmt, *val) def addtag(code, dtype, count, value, writeonce=False): # compute ifdentry and ifdvalue bytes from code, dtype, count, value @@ -318,26 +359,25 @@ def addtag(code, dtype, count, value, writeonce=False): raise ValueError("unknown dtype %s" % dtype) tifftype = tifftypes[dtype] rawcount = count - if dtype == 's': - value = bytestr(value) + b'\0' + if dtype == "s": + value = bytestr(value) + b"\0" count = rawcount = len(value) - value = (value, ) + value = (value,) if len(dtype) > 1: count *= int(dtype[:-1]) dtype = dtype[-1] - ifdentry = [pack('HH', code, tifftype), - pack(offset_format, rawcount)] + ifdentry = [pack("HH", code, tifftype), pack(offset_format, rawcount)] ifdvalue = None if count == 1: if isinstance(value, (tuple, list)): value = value[0] ifdentry.append(pack(val_format, pack(dtype, value))) elif struct.calcsize(dtype) * count <= offset_size: - ifdentry.append(pack(val_format, pack(str(count)+dtype, *value))) + ifdentry.append(pack(val_format, pack(str(count) + dtype, *value))) else: ifdentry.append(pack(offset_format, 0)) - ifdvalue = pack(str(count)+dtype, *value) - tags.append((code, b''.join(ifdentry), ifdvalue, writeonce)) + ifdvalue = pack(str(count) + dtype, *value) + tags.append((code, b"".join(ifdentry), ifdvalue, writeonce)) def rational(arg, max_denominator=1000000): # return nominator and denominator from float or two integers @@ -349,48 +389,72 @@ def rational(arg, max_denominator=1000000): return f.numerator, f.denominator if software: - addtag('software', 's', 0, software, writeonce=True) + addtag("software", "s", 0, software, writeonce=True) if description: - addtag('image_description', 's', 0, description, writeonce=True) + addtag("image_description", "s", 0, description, writeonce=True) elif shape != data_shape: - addtag('image_description', 's', 0, - "shape=(%s)" % (",".join('%i' % i for i in data_shape)), - writeonce=True) - addtag('datetime', 's', 0, - datetime.datetime.now().strftime("%Y:%m:%d %H:%M:%S"), - writeonce=True) - addtag('compression', 'H', 1, 32946 if compress else 1) - addtag('orientation', 'H', 1, 1) - addtag('image_width', 'I', 1, shape[-2]) - addtag('image_length', 'I', 1, shape[-3]) - addtag('new_subfile_type', 'I', 1, 0 if shape[0] == 1 else 2) - addtag('sample_format', 'H', 1, - {'u': 1, 'i': 2, 'f': 3, 'c': 6}[data.dtype.kind]) - addtag('photometric', 'H', 1, - {'miniswhite': 0, 'minisblack': 1, 'rgb': 2}[photometric]) - addtag('samples_per_pixel', 'H', 1, samplesperpixel) + addtag( + "image_description", + "s", + 0, + "shape=(%s)" % (",".join("%i" % i for i in data_shape)), + writeonce=True, + ) + addtag( + "datetime", + "s", + 0, + datetime.datetime.now().strftime("%Y:%m:%d %H:%M:%S"), + writeonce=True, + ) + addtag("compression", "H", 1, 32946 if compress else 1) + addtag("orientation", "H", 1, 1) + addtag("image_width", "I", 1, shape[-2]) + addtag("image_length", "I", 1, shape[-3]) + addtag("new_subfile_type", "I", 1, 0 if shape[0] == 1 else 2) + addtag( + "sample_format", + "H", + 1, + {"u": 1, "i": 2, "f": 3, "c": 6}[data.dtype.kind], + ) + addtag( + "photometric", + "H", + 1, + {"miniswhite": 0, "minisblack": 1, "rgb": 2}[photometric], + ) + addtag("samples_per_pixel", "H", 1, samplesperpixel) if planarconfig: - addtag('planar_configuration', 'H', 1, 1 if planarconfig=='contig' - else 2) - addtag('bits_per_sample', 'H', samplesperpixel, - (data.dtype.itemsize * 8, ) * samplesperpixel) + addtag( + "planar_configuration", + "H", + 1, + 1 if planarconfig == "contig" else 2, + ) + addtag( + "bits_per_sample", + "H", + samplesperpixel, + (data.dtype.itemsize * 8,) * samplesperpixel, + ) else: - addtag('bits_per_sample', 'H', 1, data.dtype.itemsize * 8) + addtag("bits_per_sample", "H", 1, data.dtype.itemsize * 8) if extrasamples: - if photometric == 'rgb': - addtag('extra_samples', 'H', 1, 1) # alpha channel + if photometric == "rgb": + addtag("extra_samples", "H", 1, 1) # alpha channel else: - addtag('extra_samples', 'H', extrasamples, (0, ) * extrasamples) + addtag("extra_samples", "H", extrasamples, (0,) * extrasamples) if resolution: - addtag('x_resolution', '2I', 1, rational(resolution[0])) - addtag('y_resolution', '2I', 1, rational(resolution[1])) - addtag('resolution_unit', 'H', 1, 2) - addtag('rows_per_strip', 'I', 1, shape[-3]) + addtag("x_resolution", "2I", 1, rational(resolution[0])) + addtag("y_resolution", "2I", 1, rational(resolution[1])) + addtag("resolution_unit", "H", 1, 2) + addtag("rows_per_strip", "I", 1, shape[-3]) # use one strip per plane - strip_byte_counts = (data[0, 0].size * data.dtype.itemsize, ) * shape[1] - addtag('strip_byte_counts', offset_format, shape[1], strip_byte_counts) - addtag('strip_offsets', offset_format, shape[1], (0, ) * shape[1]) + strip_byte_counts = (data[0, 0].size * data.dtype.itemsize,) * shape[1] + addtag("strip_byte_counts", offset_format, shape[1], strip_byte_counts) + addtag("strip_offsets", offset_format, shape[1], (0,) * shape[1]) # add extra tags from users for t in extratags: @@ -399,18 +463,18 @@ def rational(arg, max_denominator=1000000): # the entries in an IFD must be sorted in ascending order by tag code tags = sorted(tags, key=lambda x: x[0]) - with open(filename, 'wb') as fh: + with open(filename, "wb") as fh: seek = fh.seek tell = fh.tell def write(arg, *args): fh.write(pack(arg, *args) if args else arg) - write({'<': b'II', '>': b'MM'}[byteorder]) + write({"<": b"II", ">": b"MM"}[byteorder]) if bigtiff: - write('HHH', 43, 8, 0) + write("HHH", 43, 8, 0) else: - write('H', 42) + write("H", 42) ifd_offset = tell() write(offset_format, 0) # first IFD @@ -424,7 +488,7 @@ def write(arg, *args): # write ifdentries write(numtag_format, len(tags)) tag_offset = tell() - write(b''.join(t[1] for t in tags)) + write(b"".join(t[1] for t in tags)) ifd_offset = tell() write(offset_format, 0) # offset to next IFD @@ -432,7 +496,7 @@ def write(arg, *args): for tagindex, tag in enumerate(tags): if tag[2]: pos = tell() - seek(tag_offset + tagindex*tag_size + offset_size + 4) + seek(tag_offset + tagindex * tag_size + offset_size + 4) write(offset_format, pos) seek(pos) if tag[0] == 273: @@ -465,7 +529,9 @@ def write(arg, *args): write(offset_format, strip_offset) strip_offset += size else: - seek(tag_offset + tagindex*tag_size + offset_size + 4) + seek( + tag_offset + tagindex * tag_size + offset_size + 4 + ) write(offset_format, data_offset) elif tag[0] == 279: # strip_byte_counts if compress: @@ -474,8 +540,12 @@ def write(arg, *args): for size in strip_byte_counts: write(offset_format, size) else: - seek(tag_offset + tagindex*tag_size + - offset_size + 4) + seek( + tag_offset + + tagindex * tag_size + + offset_size + + 4 + ) write(offset_format, strip_byte_counts[0]) break seek(pos) @@ -512,23 +582,22 @@ def imread(files, *args, **kwargs): >>> ims = imread(['test.tif', 'test.tif']) >>> ims.shape (2, 256, 256, 4) - """ kwargs_file = {} - if 'multifile' in kwargs: - kwargs_file['multifile'] = kwargs['multifile'] - del kwargs['multifile'] + if "multifile" in kwargs: + kwargs_file["multifile"] = kwargs["multifile"] + del kwargs["multifile"] else: - kwargs_file['multifile'] = True + kwargs_file["multifile"] = True kwargs_seq = {} - if 'pattern' in kwargs: - kwargs_seq['pattern'] = kwargs['pattern'] - del kwargs['pattern'] + if "pattern" in kwargs: + kwargs_seq["pattern"] = kwargs["pattern"] + del kwargs["pattern"] - if isinstance(files, basestring) and any(i in files for i in '?*'): + if isinstance(files, basestring) and any(i in files for i in "?*"): files = glob.glob(files) if not files: - raise ValueError('no files found') + raise ValueError("no files found") if len(files) == 1: files = files[0] @@ -542,7 +611,8 @@ def imread(files, *args, **kwargs): class lazyattr(object): """Lazy object attribute whose value is computed on first access.""" - __slots__ = ('func', ) + + __slots__ = ("func",) def __init__(self, func): self.func = func @@ -583,8 +653,8 @@ class TiffFile(object): ... print(e) ... finally: ... tif.close() - """ + def __init__(self, arg, name=None, multifile=False): """Initialize instance from file. @@ -597,11 +667,10 @@ def __init__(self, arg, name=None, multifile=False): Human readable label of open file. multifile : bool If True, series may include pages from multiple files. - """ if isinstance(arg, basestring): filename = os.path.abspath(arg) - self._fh = open(filename, 'rb') + self._fh = open(filename, "rb") else: filename = str(name) self._fh = arg @@ -633,13 +702,14 @@ def _fromfile(self): """Read TIFF header and all page records from file.""" self._fh.seek(0) try: - self.byteorder = {b'II': '<', b'MM': '>'}[self._fh.read(2)] + self.byteorder = {b"II": "<", b"MM": ">"}[self._fh.read(2)] except KeyError: raise ValueError("not a valid TIFF file") - version = struct.unpack(self.byteorder+'H', self._fh.read(2))[0] + version = struct.unpack(self.byteorder + "H", self._fh.read(2))[0] if version == 43: # BigTiff - self.offset_size, zero = struct.unpack(self.byteorder+'HH', - self._fh.read(4)) + self.offset_size, zero = struct.unpack( + self.byteorder + "HH", self._fh.read(4) + ) if zero or self.offset_size != 8: raise ValueError("not a valid BigTIFF file") elif version == 42: @@ -662,74 +732,117 @@ def _fromfile(self): @lazyattr def series(self): - """Return series of TiffPage with compatible shape and properties.""" + """Return series of TiffPage with compatible shape and + properties.""" series = [] if self.is_ome: series = self._omeseries() elif self.is_fluoview: - dims = {b'X': 'X', b'Y': 'Y', b'Z': 'Z', b'T': 'T', - b'WAVELENGTH': 'C', b'TIME': 'T', b'XY': 'R', - b'EVENT': 'V', b'EXPOSURE': 'L'} + dims = { + b"X": "X", + b"Y": "Y", + b"Z": "Z", + b"T": "T", + b"WAVELENGTH": "C", + b"TIME": "T", + b"XY": "R", + b"EVENT": "V", + b"EXPOSURE": "L", + } mmhd = list(reversed(self.pages[0].mm_header.dimensions)) - series = [Record( - axes=''.join(dims.get(i[0].strip().upper(), 'Q') - for i in mmhd if i[1] > 1), - shape=tuple(int(i[1]) for i in mmhd if i[1] > 1), - pages=self.pages, dtype=numpy.dtype(self.pages[0].dtype))] + series = [ + Record( + axes="".join( + dims.get(i[0].strip().upper(), "Q") + for i in mmhd + if i[1] > 1 + ), + shape=tuple(int(i[1]) for i in mmhd if i[1] > 1), + pages=self.pages, + dtype=numpy.dtype(self.pages[0].dtype), + ) + ] elif self.is_lsm: lsmi = self.pages[0].cz_lsm_info axes = CZ_SCAN_TYPES[lsmi.scan_type] if self.pages[0].is_rgb: - axes = axes.replace('C', '').replace('XY', 'XYC') + axes = axes.replace("C", "").replace("XY", "XYC") axes = axes[::-1] shape = [getattr(lsmi, CZ_DIMENSIONS[i]) for i in axes] pages = [p for p in self.pages if not p.is_reduced] - series = [Record(axes=axes, shape=shape, pages=pages, - dtype=numpy.dtype(pages[0].dtype))] + series = [ + Record( + axes=axes, + shape=shape, + pages=pages, + dtype=numpy.dtype(pages[0].dtype), + ) + ] if len(pages) != len(self.pages): # reduced RGB pages pages = [p for p in self.pages if p.is_reduced] cp = 1 i = 0 - while cp < len(pages) and i < len(shape)-2: + while cp < len(pages) and i < len(shape) - 2: cp *= shape[i] i += 1 shape = shape[:i] + list(pages[0].shape) - axes = axes[:i] + 'CYX' - series.append(Record(axes=axes, shape=shape, pages=pages, - dtype=numpy.dtype(pages[0].dtype))) + axes = axes[:i] + "CYX" + series.append( + Record( + axes=axes, + shape=shape, + pages=pages, + dtype=numpy.dtype(pages[0].dtype), + ) + ) elif self.is_imagej: shape = [] axes = [] ij = self.pages[0].imagej_tags - if 'frames' in ij: - shape.append(ij['frames']) - axes.append('T') - if 'slices' in ij: - shape.append(ij['slices']) - axes.append('Z') - if 'channels' in ij and not self.is_rgb: - shape.append(ij['channels']) - axes.append('C') + if "frames" in ij: + shape.append(ij["frames"]) + axes.append("T") + if "slices" in ij: + shape.append(ij["slices"]) + axes.append("Z") + if "channels" in ij and not self.is_rgb: + shape.append(ij["channels"]) + axes.append("C") remain = len(self.pages) // (numpy.prod(shape) if shape else 1) if remain > 1: shape.append(remain) - axes.append('I') + axes.append("I") shape.extend(self.pages[0].shape) axes.extend(self.pages[0].axes) - axes = ''.join(axes) - series = [Record(pages=self.pages, shape=shape, axes=axes, - dtype=numpy.dtype(self.pages[0].dtype))] + axes = "".join(axes) + series = [ + Record( + pages=self.pages, + shape=shape, + axes=axes, + dtype=numpy.dtype(self.pages[0].dtype), + ) + ] elif self.is_nih: - series = [Record(pages=self.pages, - shape=(len(self.pages),) + self.pages[0].shape, - axes='I' + self.pages[0].axes, - dtype=numpy.dtype(self.pages[0].dtype))] + series = [ + Record( + pages=self.pages, + shape=(len(self.pages),) + self.pages[0].shape, + axes="I" + self.pages[0].axes, + dtype=numpy.dtype(self.pages[0].dtype), + ) + ] elif self.pages[0].is_shaped: - shape = self.pages[0].tags['image_description'].value[7:-1] - shape = tuple(int(i) for i in shape.split(b',')) - series = [Record(pages=self.pages, shape=shape, - axes='Q' * len(shape), - dtype=numpy.dtype(self.pages[0].dtype))] + shape = self.pages[0].tags["image_description"].value[7:-1] + shape = tuple(int(i) for i in shape.split(b",")) + series = [ + Record( + pages=self.pages, + shape=shape, + axes="Q" * len(shape), + dtype=numpy.dtype(self.pages[0].dtype), + ) + ] if not series: shapes = [] @@ -737,20 +850,28 @@ def series(self): for page in self.pages: if not page.shape: continue - shape = page.shape + (page.axes, - page.compression in TIFF_DECOMPESSORS) - if not shape in pages: + shape = page.shape + ( + page.axes, + page.compression in TIFF_DECOMPESSORS, + ) + if shape not in pages: shapes.append(shape) pages[shape] = [page] else: pages[shape].append(page) - series = [Record(pages=pages[s], - axes=(('I' + s[-2]) - if len(pages[s]) > 1 else s[-2]), - dtype=numpy.dtype(pages[s][0].dtype), - shape=((len(pages[s]), ) + s[:-2] - if len(pages[s]) > 1 else s[:-2])) - for s in shapes] + series = [ + Record( + pages=pages[s], + axes=(("I" + s[-2]) if len(pages[s]) > 1 else s[-2]), + dtype=numpy.dtype(pages[s][0].dtype), + shape=( + (len(pages[s]),) + s[:-2] + if len(pages[s]) > 1 + else s[:-2] + ), + ) + for s in shapes + ] return series def asarray(self, key=None, series=None, memmap=False): @@ -766,7 +887,6 @@ def asarray(self, key=None, series=None, memmap=False): Defines which series of pages to return as array. memmap : bool If True, use numpy.memmap to read arrays from file if possible. - """ if key is None and series is None: series = 0 @@ -791,7 +911,8 @@ def asarray(self, key=None, series=None, memmap=False): elif self.is_nih: result = numpy.vstack( p.asarray(colormapped=False, squeeze=False, memmap=memmap) - for p in pages) + for p in pages + ) if pages[0].is_palette: result = numpy.take(pages[0].color_map, result, axis=1) result = numpy.swapaxes(result, 0, 1) @@ -799,14 +920,17 @@ def asarray(self, key=None, series=None, memmap=False): if self.is_ome and any(p is None for p in pages): firstpage = next(p for p in pages if p) nopage = numpy.zeros_like(firstpage.asarray(memmap=memmap)) - result = numpy.vstack((p.asarray(memmap=memmap) if p else nopage) - for p in pages) + result = numpy.vstack( + (p.asarray(memmap=memmap) if p else nopage) for p in pages + ) if key is None: try: result.shape = self.series[series].shape except ValueError: - warnings.warn("failed to reshape %s to %s" % ( - result.shape, self.series[series].shape)) + warnings.warn( + "failed to reshape %s to %s" + % (result.shape, self.series[series].shape) + ) result.shape = (-1,) + pages[0].shape else: result.shape = (-1,) + pages[0].shape @@ -814,63 +938,68 @@ def asarray(self, key=None, series=None, memmap=False): def _omeseries(self): """Return image series in OME-TIFF file(s).""" - root = ElementTree.XML(self.pages[0].tags['image_description'].value) - uuid = root.attrib.get('UUID', None) + root = ElementTree.XML(self.pages[0].tags["image_description"].value) + uuid = root.attrib.get("UUID", None) self._tiffs = {uuid: self} modulo = {} result = [] for element in root: - if element.tag.endswith('BinaryOnly'): + if element.tag.endswith("BinaryOnly"): warnings.warn("not an OME-TIFF master file") break - if element.tag.endswith('StructuredAnnotations'): + if element.tag.endswith("StructuredAnnotations"): for annot in element: - if not annot.attrib.get('Namespace', - '').endswith('modulo'): + if not annot.attrib.get("Namespace", "").endswith( + "modulo" + ): continue for value in annot: - for modul in value: - for along in modul: - if not along.tag[:-1].endswith('Along'): + for modulo in value: + for along in modulo: + if not along.tag[:-1].endswith("Along"): continue axis = along.tag[-1] - newaxis = along.attrib.get('Type', 'other') + newaxis = along.attrib.get("Type", "other") newaxis = AXES_LABELS[newaxis] - if 'Start' in along.attrib: + if "Start" in along.attrib: labels = range( - int(along.attrib['Start']), - int(along.attrib['End']) + 1, - int(along.attrib.get('Step', 1))) + int(along.attrib["Start"]), + int(along.attrib["End"]) + 1, + int(along.attrib.get("Step", 1)), + ) else: - labels = [label.text for label in along - if label.tag.endswith('Label')] + labels = [ + label.text + for label in along + if label.tag.endswith("Label") + ] modulo[axis] = (newaxis, labels) - if not element.tag.endswith('Image'): + if not element.tag.endswith("Image"): continue for pixels in element: - if not pixels.tag.endswith('Pixels'): + if not pixels.tag.endswith("Pixels"): continue atr = pixels.attrib - axes = "".join(reversed(atr['DimensionOrder'])) - shape = list(int(atr['Size'+ax]) for ax in axes) + axes = "".join(reversed(atr["DimensionOrder"])) + shape = list(int(atr["Size" + ax]) for ax in axes) size = numpy.prod(shape[:-2]) ifds = [None] * size for data in pixels: - if not data.tag.endswith('TiffData'): + if not data.tag.endswith("TiffData"): continue atr = data.attrib - ifd = int(atr.get('IFD', 0)) - num = int(atr.get('NumPlanes', 1 if 'IFD' in atr else 0)) - num = int(atr.get('PlaneCount', num)) - idx = [int(atr.get('First'+ax, 0)) for ax in axes[:-2]] + ifd = int(atr.get("IFD", 0)) + num = int(atr.get("NumPlanes", 1 if "IFD" in atr else 0)) + num = int(atr.get("PlaneCount", num)) + idx = [int(atr.get("First" + ax, 0)) for ax in axes[:-2]] idx = numpy.ravel_multi_index(idx, shape[:-2]) for uuid in data: - if uuid.tag.endswith('UUID'): + if uuid.tag.endswith("UUID"): if uuid.text not in self._tiffs: if not self._multifile: # abort reading multi file OME series return [] - fn = uuid.attrib['FileName'] + fn = uuid.attrib["FileName"] try: tf = TiffFile(os.path.join(self.fpath, fn)) except (IOError, ValueError): @@ -891,8 +1020,14 @@ def _omeseries(self): ifds[idx + i] = pages[ifd + i] except IndexError: warnings.warn("ome-xml: index out of range") - result.append(Record(axes=axes, shape=shape, pages=ifds, - dtype=numpy.dtype(ifds[0].dtype))) + result.append( + Record( + axes=axes, + shape=shape, + pages=ifds, + dtype=numpy.dtype(ifds[0].dtype), + ) + ) for record in result: for axis, (newaxis, labels) in modulo.items(): @@ -902,8 +1037,8 @@ def _omeseries(self): record.axes = record.axes.replace(axis, newaxis, 1) else: record.shape[i] //= size - record.shape.insert(i+1, size) - record.axes = record.axes.replace(axis, axis+newaxis, 1) + record.shape.insert(i + 1, size) + record.axes = record.axes.replace(axis, axis + newaxis, 1) return result @@ -924,7 +1059,8 @@ def __str__(self): result = [ self.fname.capitalize(), format_size(self._fsize), - {'<': 'little endian', '>': 'big endian'}[self.byteorder]] + {"<": "little endian", ">": "big endian"}[self.byteorder], + ] if self.is_bigtiff: result.append("bigtiff") if len(self.pages) > 1: @@ -1028,8 +1164,8 @@ class TiffPage(object): Consolidated ImageJ description and metadata tags, if exists. All attributes are read-only. - """ + def __init__(self, parent): """Initialize instance from file.""" self.parent = parent @@ -1045,17 +1181,16 @@ def __init__(self, parent): def _fromfile(self): """Read TIFF IFD structure and its tags from file. - File cursor must be at storage position of IFD offset and is left at - offset to next IFD. + File cursor must be at storage position of IFD offset and is + left at offset to next IFD. Raises StopIteration if offset (first bytes read) is 0. - """ fh = self.parent._fh byteorder = self.parent.byteorder offset_size = self.parent.offset_size - fmt = {4: 'I', 8: 'Q'}[offset_size] + fmt = {4: "I", 8: "Q"}[offset_size] offset = struct.unpack(byteorder + fmt, fh.read(offset_size))[0] if not offset: raise StopIteration() @@ -1063,7 +1198,7 @@ def _fromfile(self): # read standard tags tags = self.tags fh.seek(offset) - fmt, size = {4: ('H', 2), 8: ('Q', 8)}[offset_size] + fmt, size = {4: ("H", 2), 8: ("Q", 8)}[offset_size] try: numtags = struct.unpack(byteorder + fmt, fh.read(size))[0] except Exception: @@ -1080,14 +1215,14 @@ def _fromfile(self): if tagcode > tag.code: warnings.warn("tags are not ordered by code") tagcode = tag.code - if not tag.name in tags: + if tag.name not in tags: tags[tag.name] = tag else: # some files contain multiple IFD with same code # e.g. MicroManager files contain two image_description - for ext in ('_1', '_2', '_3'): + for ext in ("_1", "_2", "_3"): name = tag.name + ext - if not name in tags: + if name not in tags: tags[name] = tag break @@ -1096,14 +1231,14 @@ def _fromfile(self): pos = fh.tell() for name, reader in CZ_LSM_INFO_READERS.items(): try: - offset = self.cz_lsm_info['offset_'+name] + offset = self.cz_lsm_info["offset_" + name] except KeyError: continue if not offset: continue fh.seek(offset) try: - setattr(self, 'cz_lsm_'+name, reader(fh, byteorder)) + setattr(self, "cz_lsm_" + name, reader(fh, byteorder)) except ValueError: pass fh.seek(pos) @@ -1112,51 +1247,61 @@ def _process_tags(self): """Validate standard tags and initialize attributes. Raise ValueError if tag values are not supported. - """ tags = self.tags for code, (name, default, dtype, count, validate) in TIFF_TAGS.items(): if not (name in tags or default is None): - tags[name] = TiffTag(code, dtype=dtype, count=count, - value=default, name=name) + tags[name] = TiffTag( + code, dtype=dtype, count=count, value=default, name=name + ) if name in tags and validate: try: if tags[name].count == 1: setattr(self, name, validate[tags[name].value]) else: - setattr(self, name, tuple( - validate[value] for value in tags[name].value)) + setattr( + self, + name, + tuple( + validate[value] for value in tags[name].value + ), + ) except KeyError: - raise ValueError("%s.value (%s) not supported" % - (name, tags[name].value)) + raise ValueError( + "%s.value (%s) not supported" + % (name, tags[name].value) + ) - tag = tags['bits_per_sample'] + tag = tags["bits_per_sample"] if tag.count == 1: self.bits_per_sample = tag.value else: - value = tag.value[:self.samples_per_pixel] - if any((v-value[0] for v in value)): + value = tag.value[: self.samples_per_pixel] + if any((v - value[0] for v in value)): self.bits_per_sample = value else: self.bits_per_sample = value[0] - tag = tags['sample_format'] + tag = tags["sample_format"] if tag.count == 1: self.sample_format = TIFF_SAMPLE_FORMATS[tag.value] else: - value = tag.value[:self.samples_per_pixel] - if any((v-value[0] for v in value)): + value = tag.value[: self.samples_per_pixel] + if any((v - value[0] for v in value)): self.sample_format = [TIFF_SAMPLE_FORMATS[v] for v in value] else: self.sample_format = TIFF_SAMPLE_FORMATS[value[0]] - if not 'photometric' in tags: + if "photometric" not in tags: self.photometric = None - if 'image_length' in tags: - self.strips_per_image = int(math.floor( - float(self.image_length + self.rows_per_strip - 1) / - self.rows_per_strip)) + if "image_length" in tags: + self.strips_per_image = int( + math.floor( + float(self.image_length + self.rows_per_strip - 1) + / self.rows_per_strip + ) + ) else: self.strips_per_image = 0 @@ -1165,37 +1310,40 @@ def _process_tags(self): if self.is_imagej: # consolidate imagej meta data - if 'image_description_1' in self.tags: # MicroManager - adict = imagej_description(tags['image_description_1'].value) + if "image_description_1" in self.tags: # MicroManager + adict = imagej_description(tags["image_description_1"].value) else: - adict = imagej_description(tags['image_description'].value) - if 'imagej_metadata' in tags: + adict = imagej_description(tags["image_description"].value) + if "imagej_metadata" in tags: try: - adict.update(imagej_metadata( - tags['imagej_metadata'].value, - tags['imagej_byte_counts'].value, - self.parent.byteorder)) + adict.update( + imagej_metadata( + tags["imagej_metadata"].value, + tags["imagej_byte_counts"].value, + self.parent.byteorder, + ) + ) except Exception as e: warnings.warn(str(e)) self.imagej_tags = Record(adict) - if not 'image_length' in self.tags or not 'image_width' in self.tags: + if "image_length" not in self.tags or "image_width" not in self.tags: # some GEL file pages are missing image data self.image_length = 0 self.image_width = 0 self.strip_offsets = 0 self._shape = () self.shape = () - self.axes = '' + self.axes = "" if self.is_palette: - self.dtype = self.tags['color_map'].dtype[1] + self.dtype = self.tags["color_map"].dtype[1] self.color_map = numpy.array(self.color_map, self.dtype) dmax = self.color_map.max() if dmax < 256: self.dtype = numpy.uint8 self.color_map = self.color_map.astype(self.dtype) - #else: + # else: # self.dtype = numpy.uint8 # self.color_map >>= 8 # self.color_map = self.color_map.astype(self.dtype) @@ -1203,65 +1351,99 @@ def _process_tags(self): if self.is_stk: # consolidate mm_uci tags - planes = tags['mm_uic2'].count - self.mm_uic_tags = Record(tags['mm_uic2'].value) - for key in ('mm_uic3', 'mm_uic4', 'mm_uic1'): + planes = tags["mm_uic2"].count + self.mm_uic_tags = Record(tags["mm_uic2"].value) + for key in ("mm_uic3", "mm_uic4", "mm_uic1"): if key in tags: self.mm_uic_tags.update(tags[key].value) - if self.planar_configuration == 'contig': - self._shape = (planes, 1, self.image_length, self.image_width, - self.samples_per_pixel) + if self.planar_configuration == "contig": + self._shape = ( + planes, + 1, + self.image_length, + self.image_width, + self.samples_per_pixel, + ) self.shape = tuple(self._shape[i] for i in (0, 2, 3, 4)) - self.axes = 'PYXS' + self.axes = "PYXS" else: - self._shape = (planes, self.samples_per_pixel, - self.image_length, self.image_width, 1) + self._shape = ( + planes, + self.samples_per_pixel, + self.image_length, + self.image_width, + 1, + ) self.shape = self._shape[:4] - self.axes = 'PSYX' - if self.is_palette and (self.color_map.shape[1] - >= 2**self.bits_per_sample): + self.axes = "PSYX" + if self.is_palette and ( + self.color_map.shape[1] >= 2**self.bits_per_sample + ): self.shape = (3, planes, self.image_length, self.image_width) - self.axes = 'CPYX' + self.axes = "CPYX" else: warnings.warn("palette cannot be applied") self.is_palette = False elif self.is_palette: samples = 1 - if 'extra_samples' in self.tags: + if "extra_samples" in self.tags: samples += len(self.extra_samples) - if self.planar_configuration == 'contig': + if self.planar_configuration == "contig": self._shape = ( - 1, 1, self.image_length, self.image_width, samples) + 1, + 1, + self.image_length, + self.image_width, + samples, + ) else: self._shape = ( - 1, samples, self.image_length, self.image_width, 1) + 1, + samples, + self.image_length, + self.image_width, + 1, + ) if self.color_map.shape[1] >= 2**self.bits_per_sample: self.shape = (3, self.image_length, self.image_width) - self.axes = 'CYX' + self.axes = "CYX" else: warnings.warn("palette cannot be applied") self.is_palette = False self.shape = (self.image_length, self.image_width) - self.axes = 'YX' + self.axes = "YX" elif self.is_rgb or self.samples_per_pixel > 1: - if self.planar_configuration == 'contig': - self._shape = (1, 1, self.image_length, self.image_width, - self.samples_per_pixel) - self.shape = (self.image_length, self.image_width, - self.samples_per_pixel) - self.axes = 'YXS' + if self.planar_configuration == "contig": + self._shape = ( + 1, + 1, + self.image_length, + self.image_width, + self.samples_per_pixel, + ) + self.shape = ( + self.image_length, + self.image_width, + self.samples_per_pixel, + ) + self.axes = "YXS" else: - self._shape = (1, self.samples_per_pixel, self.image_length, - self.image_width, 1) + self._shape = ( + 1, + self.samples_per_pixel, + self.image_length, + self.image_width, + 1, + ) self.shape = self._shape[1:-1] - self.axes = 'SYX' - if self.is_rgb and 'extra_samples' in self.tags: + self.axes = "SYX" + if self.is_rgb and "extra_samples" in self.tags: extra_samples = self.extra_samples - if self.tags['extra_samples'].count == 1: - extra_samples = (extra_samples, ) + if self.tags["extra_samples"].count == 1: + extra_samples = (extra_samples,) for exs in extra_samples: - if exs in ('unassalpha', 'assocalpha', 'unspecified'): - if self.planar_configuration == 'contig': + if exs in ("unassalpha", "assocalpha", "unspecified"): + if self.planar_configuration == "contig": self.shape = self.shape[:2] + (4,) else: self.shape = (4,) + self.shape[1:] @@ -1269,14 +1451,16 @@ def _process_tags(self): else: self._shape = (1, 1, self.image_length, self.image_width, 1) self.shape = self._shape[2:4] - self.axes = 'YX' + self.axes = "YX" - if not self.compression and not 'strip_byte_counts' in tags: + if not self.compression and "strip_byte_counts" not in tags: self.strip_byte_counts = numpy.prod(self.shape) * ( - self.bits_per_sample // 8) + self.bits_per_sample // 8 + ) - def asarray(self, squeeze=True, colormapped=True, rgbonly=True, - memmap=False): + def asarray( + self, squeeze=True, colormapped=True, rgbonly=True, memmap=False + ): """Read image data from file and return as numpy array. Raise ValueError if format is unsupported. @@ -1294,21 +1478,23 @@ def asarray(self, squeeze=True, colormapped=True, rgbonly=True, If True, return RGB(A) image without additional extra samples. memmap : bool If True, use numpy.memmap to read array if possible. - """ fh = self.parent._fh if not fh: raise IOError("TIFF file is not open") if self.dtype is None: - raise ValueError("data type not supported: %s%i" % ( - self.sample_format, self.bits_per_sample)) + raise ValueError( + "data type not supported: %s%i" + % (self.sample_format, self.bits_per_sample) + ) if self.compression not in TIFF_DECOMPESSORS: raise ValueError("cannot decompress %s" % self.compression) - if ('ycbcr_subsampling' in self.tags - and self.tags['ycbcr_subsampling'].value not in (1, (1, 1))): + if "ycbcr_subsampling" in self.tags and self.tags[ + "ycbcr_subsampling" + ].value not in (1, (1, 1)): raise ValueError("YCbCr subsampling not supported") - tag = self.tags['sample_format'] - if tag.count != 1 and any((i-tag.value[0] for i in tag.value)): + tag = self.tags["sample_format"] + if tag.count != 1 and any((i - tag.value[0] for i in tag.value)): raise ValueError("sample formats don't match %s" % str(tag.value)) dtype = self._dtype @@ -1321,11 +1507,12 @@ def asarray(self, squeeze=True, colormapped=True, rgbonly=True, image_length = self.image_length typecode = self.parent.byteorder + dtype bits_per_sample = self.bits_per_sample - byteorder_is_native = ({'big': '>', 'little': '<'}[sys.byteorder] == - self.parent.byteorder) + byteorder_is_native = {"big": ">", "little": "<"}[ + sys.byteorder + ] == self.parent.byteorder if self.is_tiled: - if 'tile_offsets' in self.tags: + if "tile_offsets" in self.tags: byte_counts = self.tile_byte_counts offsets = self.tile_offsets else: @@ -1335,7 +1522,7 @@ def asarray(self, squeeze=True, colormapped=True, rgbonly=True, tile_length = self.tile_length tw = (image_width + tile_width - 1) // tile_width tl = (image_length + tile_length - 1) // tile_length - shape = shape[:-3] + (tl*tile_length, tw*tile_width, shape[-1]) + shape = shape[:-3] + (tl * tile_length, tw * tile_width, shape[-1]) tile_shape = (tile_length, tile_width, shape[-1]) runlen = tile_width else: @@ -1346,27 +1533,37 @@ def asarray(self, squeeze=True, colormapped=True, rgbonly=True, try: offsets[0] except TypeError: - offsets = (offsets, ) - byte_counts = (byte_counts, ) + offsets = (offsets,) + byte_counts = (byte_counts,) if any(o < 2 for o in offsets): raise ValueError("corrupted page") - if (not self.is_tiled and (self.is_stk or (not self.compression - and bits_per_sample in (8, 16, 32, 64) - and all(offsets[i] == offsets[i+1] - byte_counts[i] - for i in range(len(offsets)-1))))): + if not self.is_tiled and ( + self.is_stk + or ( + not self.compression + and bits_per_sample in (8, 16, 32, 64) + and all( + offsets[i] == offsets[i + 1] - byte_counts[i] + for i in range(len(offsets) - 1) + ) + ) + ): # contiguous data - if (memmap and not (self.is_tiled or self.predictor or - ('extra_samples' in self.tags) or - (colormapped and self.is_palette) or - (not byteorder_is_native))): - result = numpy.memmap(fh, typecode, 'r', offsets[0], shape) + if memmap and not ( + self.is_tiled + or self.predictor + or ("extra_samples" in self.tags) + or (colormapped and self.is_palette) + or (not byteorder_is_native) + ): + result = numpy.memmap(fh, typecode, "r", offsets[0], shape) else: fh.seek(offsets[0]) result = numpy_fromfile(fh, typecode, numpy.prod(shape)) - result = result.astype('=' + dtype) + result = result.astype("=" + dtype) else: - if self.planar_configuration == 'contig': + if self.planar_configuration == "contig": runlen *= self.samples_per_pixel if bits_per_sample in (8, 16, 32, 64, 128): if (bits_per_sample * runlen) % 8: @@ -1374,12 +1571,17 @@ def asarray(self, squeeze=True, colormapped=True, rgbonly=True, def unpack(x): return numpy.fromstring(x, typecode) + elif isinstance(bits_per_sample, tuple): + def unpack(x): return unpackrgb(x, typecode, bits_per_sample) + else: + def unpack(x): return unpackints(x, typecode, bits_per_sample, runlen) + decompress = TIFF_DECOMPESSORS[self.compression] if self.is_tiled: result = numpy.empty(shape, dtype) @@ -1388,10 +1590,11 @@ def unpack(x): fh.seek(offset) tile = unpack(decompress(fh.read(bytecount))) tile.shape = tile_shape - if self.predictor == 'horizontal': + if self.predictor == "horizontal": numpy.cumsum(tile, axis=-2, dtype=dtype, out=tile) - result[0, pl, tl:tl+tile_length, - tw:tw+tile_width, :] = tile + result[ + 0, pl, tl : tl + tile_length, tw : tw + tile_width, : + ] = tile del tile tw += tile_width if tw >= shape[-2]: @@ -1400,23 +1603,30 @@ def unpack(x): tl, pl = 0, pl + 1 result = result[..., :image_length, :image_width, :] else: - strip_size = (self.rows_per_strip * self.image_width * - self.samples_per_pixel) + strip_size = ( + self.rows_per_strip + * self.image_width + * self.samples_per_pixel + ) result = numpy.empty(shape, dtype).reshape(-1) index = 0 for offset, bytecount in zip(offsets, byte_counts): fh.seek(offset) strip = fh.read(bytecount) strip = unpack(decompress(strip)) - size = min(result.size, strip.size, strip_size, - result.size - index) - result[index:index+size] = strip[:size] + size = min( + result.size, + strip.size, + strip_size, + result.size - index, + ) + result[index : index + size] = strip[:size] del strip index += size result.shape = self._shape - if self.predictor == 'horizontal' and not self.is_tiled: + if self.predictor == "horizontal" and not self.is_tiled: # work around bug in LSM510 software if not (self.parent.is_lsm and not self.compression): numpy.cumsum(result, axis=-2, dtype=dtype, out=result) @@ -1424,22 +1634,23 @@ def unpack(x): if colormapped and self.is_palette: if self.color_map.shape[1] >= 2**bits_per_sample: # FluoView and LSM might fail here - result = numpy.take(self.color_map, - result[:, 0, :, :, 0], axis=1) - elif rgbonly and self.is_rgb and 'extra_samples' in self.tags: + result = numpy.take( + self.color_map, result[:, 0, :, :, 0], axis=1 + ) + elif rgbonly and self.is_rgb and "extra_samples" in self.tags: # return only RGB and first alpha channel if exists extra_samples = self.extra_samples - if self.tags['extra_samples'].count == 1: - extra_samples = (extra_samples, ) + if self.tags["extra_samples"].count == 1: + extra_samples = (extra_samples,) for i, exs in enumerate(extra_samples): - if exs in ('unassalpha', 'assocalpha', 'unspecified'): - if self.planar_configuration == 'contig': - result = result[..., [0, 1, 2, 3+i]] + if exs in ("unassalpha", "assocalpha", "unspecified"): + if self.planar_configuration == "contig": + result = result[..., [0, 1, 2, 3 + i]] else: - result = result[:, [0, 1, 2, 3+i]] + result = result[:, [0, 1, 2, 3 + i]] break else: - if self.planar_configuration == 'contig': + if self.planar_configuration == "contig": result = result[..., :3] else: result = result[:, :3] @@ -1448,23 +1659,43 @@ def unpack(x): try: result.shape = self.shape except ValueError: - warnings.warn("failed to reshape from %s to %s" % ( - str(result.shape), str(self.shape))) + warnings.warn( + "failed to reshape from %s to %s" + % (str(result.shape), str(self.shape)) + ) return result def __str__(self): """Return string containing information about page.""" - s = ', '.join(s for s in ( - ' x '.join(str(i) for i in self.shape), - str(numpy.dtype(self.dtype)), - '%s bit' % str(self.bits_per_sample), - self.photometric if 'photometric' in self.tags else '', - self.compression if self.compression else 'raw', - '|'.join(t[3:] for t in ( - 'is_stk', 'is_lsm', 'is_nih', 'is_ome', 'is_imagej', - 'is_micromanager', 'is_fluoview', 'is_mdgel', 'is_mediacy', - 'is_reduced', 'is_tiled') if getattr(self, t))) if s) + s = ", ".join( + s + for s in ( + " x ".join(str(i) for i in self.shape), + str(numpy.dtype(self.dtype)), + "%s bit" % str(self.bits_per_sample), + self.photometric if "photometric" in self.tags else "", + self.compression if self.compression else "raw", + "|".join( + t[3:] + for t in ( + "is_stk", + "is_lsm", + "is_nih", + "is_ome", + "is_imagej", + "is_micromanager", + "is_fluoview", + "is_mdgel", + "is_mediacy", + "is_reduced", + "is_tiled", + ) + if getattr(self, t) + ), + ) + if s + ) return "Page %i: %s" % (self.index, s) def __getattr__(self, name): @@ -1478,81 +1709,88 @@ def __getattr__(self, name): @lazyattr def is_rgb(self): """True if page contains a RGB image.""" - return ('photometric' in self.tags and - self.tags['photometric'].value == 2) + return ( + "photometric" in self.tags and self.tags["photometric"].value == 2 + ) @lazyattr def is_palette(self): """True if page contains a palette-colored image.""" - return ('photometric' in self.tags and - self.tags['photometric'].value == 3) + return ( + "photometric" in self.tags and self.tags["photometric"].value == 3 + ) @lazyattr def is_tiled(self): """True if page contains tiled image.""" - return 'tile_width' in self.tags + return "tile_width" in self.tags @lazyattr def is_reduced(self): """True if page is a reduced image of another image.""" - return bool(self.tags['new_subfile_type'].value & 1) + return bool(self.tags["new_subfile_type"].value & 1) @lazyattr def is_mdgel(self): """True if page contains md_file_tag tag.""" - return 'md_file_tag' in self.tags + return "md_file_tag" in self.tags @lazyattr def is_mediacy(self): """True if page contains Media Cybernetics Id tag.""" - return ('mc_id' in self.tags and - self.tags['mc_id'].value.startswith(b'MC TIFF')) + return "mc_id" in self.tags and self.tags["mc_id"].value.startswith( + b"MC TIFF" + ) @lazyattr def is_stk(self): """True if page contains MM_UIC2 tag.""" - return 'mm_uic2' in self.tags + return "mm_uic2" in self.tags @lazyattr def is_lsm(self): """True if page contains LSM CZ_LSM_INFO tag.""" - return 'cz_lsm_info' in self.tags + return "cz_lsm_info" in self.tags @lazyattr def is_fluoview(self): """True if page contains FluoView MM_STAMP tag.""" - return 'mm_stamp' in self.tags + return "mm_stamp" in self.tags @lazyattr def is_nih(self): """True if page contains NIH image header.""" - return 'nih_image_header' in self.tags + return "nih_image_header" in self.tags @lazyattr def is_ome(self): """True if page contains OME-XML in image_description tag.""" - return ('image_description' in self.tags and self.tags[ - 'image_description'].value.startswith(b' parent.offset_size or code in CUSTOM_TAGS: pos = fh.tell() - tof = {4: 'I', 8: 'Q'}[parent.offset_size] - self.value_offset = offset = struct.unpack(byteorder+tof, value)[0] + tof = {4: "I", 8: "Q"}[parent.offset_size] + self.value_offset = offset = struct.unpack(byteorder + tof, value)[ + 0 + ] if offset < 0 or offset > parent._fsize: raise TiffTag.Error("corrupt file - invalid tag value offset") elif offset < 4: @@ -1640,7 +1891,7 @@ def _fromfile(self, parent): fh.seek(0, 2) # bug in numpy/Python 3.x ? if isinstance(value, dict): # numpy.core.records.record value = Record(value) - elif code in TIFF_TAGS or dtype[-1] == 's': + elif code in TIFF_TAGS or dtype[-1] == "s": value = struct.unpack(fmt, fh.read(size)) else: value = read_numpy(fh, byteorder, dtype, count) @@ -1649,11 +1900,11 @@ def _fromfile(self, parent): else: value = struct.unpack(fmt, value[:size]) - if not code in CUSTOM_TAGS: + if code not in CUSTOM_TAGS: if len(value) == 1: value = value[0] - if dtype.endswith('s') and isinstance(value, bytes): + if dtype.endswith("s") and isinstance(value, bytes): value = stripnull(value) self.code = code @@ -1664,7 +1915,7 @@ def _fromfile(self, parent): def __str__(self): """Return string containing information about tag.""" - return ' '.join(str(getattr(self, s)) for s in self.__slots__) + return " ".join(str(getattr(self, s)) for s in self.__slots__) class TiffSequence(object): @@ -1685,9 +1936,9 @@ class TiffSequence(object): >>> ims = ims.asarray() >>> ims.shape (2, 100, 256, 256) - """ - _axes_pattern = """ + + _axes_pattern = r""" # matches Olympus OIF and Leica TIFF series _?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4})) _?(?:(q|l|p|a|c|t|x|y|z|ch|tp)(\d{1,4}))? @@ -1701,7 +1952,7 @@ class TiffSequence(object): class _ParseError(Exception): pass - def __init__(self, files, imread=TiffFile, pattern='axes'): + def __init__(self, files, imread=TiffFile, pattern="axes"): """Initialize instance from multiple files. Parameters @@ -1714,18 +1965,17 @@ def __init__(self, files, imread=TiffFile, pattern='axes'): pattern : str Regular expression pattern that matches axes names and sequence indices in file names. - """ if isinstance(files, basestring): files = natural_sorted(glob.glob(files)) files = list(files) if not files: raise ValueError("no files found") - #if not os.path.isfile(files[0]): + # if not os.path.isfile(files[0]): # raise ValueError("file not found") self.files = files - if hasattr(imread, 'asarray'): + if hasattr(imread, "asarray"): _imread = imread def imread(fname, *args, **kwargs): @@ -1734,24 +1984,27 @@ def imread(fname, *args, **kwargs): self.imread = imread - self.pattern = self._axes_pattern if pattern == 'axes' else pattern + self.pattern = self._axes_pattern if pattern == "axes" else pattern try: self._parse() if not self.axes: - self.axes = 'I' + self.axes = "I" except self._ParseError: - self.axes = 'I' + self.axes = "I" self.shape = (len(files),) self._start_index = (0,) self._indices = ((i,) for i in range(len(files))) def __str__(self): """Return string with information about image sequence.""" - return "\n".join([ - self.files[0], - '* files: %i' % len(self.files), - '* axes: %s' % self.axes, - '* shape: %s' % str(self.shape)]) + return "\n".join( + [ + self.files[0], + "* files: %i" % len(self.files), + "* axes: %s" % self.axes, + "* shape: %s" % str(self.shape), + ] + ) def __len__(self): return len(self.files) @@ -1766,17 +2019,17 @@ def close(self): pass def asarray(self, *args, **kwargs): - """Read image data from all files and return as single numpy array. + """Read image data from all files and return as single numpy + array. Raise IndexError if image shapes don't match. - """ im = self.imread(self.files[0]) result_shape = self.shape + im.shape result = numpy.zeros(result_shape, dtype=im.dtype) result = result.reshape(-1, *im.shape) for index, fname in zip(self._indices, self.files): - index = [i-j for i, j in zip(index, self._start_index)] + index = [i - j for i, j in zip(index, self._start_index)] index = numpy.ravel_multi_index(index, self.shape) im = self.imread(fname, *args, **kwargs) result[index] = im @@ -1794,19 +2047,19 @@ def _parse(self): matches = matches[-1] if len(matches) % 2: raise self._ParseError("pattern doesn't match axis name and index") - axes = ''.join(m for m in matches[::2] if m) + axes = "".join(m for m in matches[::2] if m) if not axes: raise self._ParseError("pattern doesn't match file names") indices = [] for fname in self.files: matches = pattern.findall(fname)[-1] - if axes != ''.join(m for m in matches[::2] if m): + if axes != "".join(m for m in matches[::2] if m): raise ValueError("axes don't match within the image sequence") indices.append([int(m) for m in matches[1::2] if m]) shape = tuple(numpy.max(indices, axis=0)) start_index = tuple(numpy.min(indices, axis=0)) - shape = tuple(i-j+1 for i, j in zip(shape, start_index)) + shape = tuple(i - j + 1 for i, j in zip(shape, start_index)) if numpy.prod(shape) != len(self.files): warnings.warn("files are missing. Missing data are zeroed") @@ -1820,8 +2073,8 @@ class Record(dict): """Dictionary with attribute access. Can also be initialized with numpy.core.records.record. - """ + __slots__ = () def __init__(self, arg=None, **kwargs): @@ -1834,7 +2087,7 @@ def __init__(self, arg=None, **kwargs): except (TypeError, ValueError): for i, name in enumerate(arg.dtype.names): v = arg[i] - self[name] = v if v.dtype.char != 'S' else stripnull(v) + self[name] = v if v.dtype.char != "S" else stripnull(v) def __getattr__(self, name): return self[name] @@ -1847,7 +2100,7 @@ def __str__(self): s = [] lists = [] for k in sorted(self): - if k.startswith('_'): # does not work with byte + if k.startswith("_"): # does not work with byte continue v = self[k] if isinstance(v, (list, tuple)) and len(v): @@ -1857,43 +2110,51 @@ def __str__(self): elif isinstance(v[0], TiffPage): v = [i.index for i in v if i] s.append( - ("* %s: %s" % (k, str(v))).split("\n", 1)[0] - [:PRINT_LINE_LEN].rstrip()) + ("* %s: %s" % (k, str(v))) + .split("\n", 1)[0][:PRINT_LINE_LEN] + .rstrip() + ) for k, v in lists: - l = [] + parse_list = [] for i, w in enumerate(v): - l.append("* %s[%i]\n %s" % (k, i, - str(w).replace("\n", "\n "))) - s.append('\n'.join(l)) - return '\n'.join(s) + parse_list.append( + "* %s[%i]\n %s" % (k, i, str(w).replace("\n", "\n ")) + ) + s.append("\n".join(parse_list)) + return "\n".join(s) class TiffTags(Record): """Dictionary of TiffTags with attribute access.""" + def __str__(self): """Return string with information about all tags.""" s = [] for tag in sorted(self.values(), key=lambda x: x.code): typecode = "%i%s" % (tag.count * int(tag.dtype[0]), tag.dtype[1]) - line = "* %i %s (%s) %s" % (tag.code, tag.name, typecode, - str(tag.value).split('\n', 1)[0]) + line = "* %i %s (%s) %s" % ( + tag.code, + tag.name, + typecode, + str(tag.value).split("\n", 1)[0], + ) s.append(line[:PRINT_LINE_LEN].lstrip()) - return '\n'.join(s) + return "\n".join(s) def read_bytes(fh, byteorder, dtype, count): """Read tag data from file and return as byte string.""" - return numpy_fromfile(fh, byteorder+dtype[-1], count).tostring() + return numpy_fromfile(fh, byteorder + dtype[-1], count).tostring() def read_numpy(fh, byteorder, dtype, count): """Read tag data from file and return as numpy array.""" - return numpy_fromfile(fh, byteorder+dtype[-1], count) + return numpy_fromfile(fh, byteorder + dtype[-1], count) def read_json(fh, byteorder, dtype, count): """Read tag data from file and return as object.""" - return json.loads(unicode(stripnull(fh.read(count)), 'utf-8')) + return json.loads(unicode(stripnull(fh.read(count)), "utf-8")) def read_mm_header(fh, byteorder, dtype, count): @@ -1903,65 +2164,66 @@ def read_mm_header(fh, byteorder, dtype, count): def read_mm_stamp(fh, byteorder, dtype, count): """Read MM_STAMP tag from file and return as numpy.array.""" - return numpy_fromfile(fh, byteorder+'8f8', 1)[0] + return numpy_fromfile(fh, byteorder + "8f8", 1)[0] def read_mm_uic1(fh, byteorder, dtype, count): """Read MM_UIC1 tag from file and return as dictionary.""" - t = fh.read(8*count) - t = struct.unpack('%s%iI' % (byteorder, 2*count), t) - return dict((MM_TAG_IDS[k], v) for k, v in zip(t[::2], t[1::2]) - if k in MM_TAG_IDS) + t = fh.read(8 * count) + t = struct.unpack("%s%iI" % (byteorder, 2 * count), t) + return dict( + (MM_TAG_IDS[k], v) for k, v in zip(t[::2], t[1::2]) if k in MM_TAG_IDS + ) def read_mm_uic2(fh, byteorder, dtype, count): """Read MM_UIC2 tag from file and return as dictionary.""" - result = {'number_planes': count} - values = numpy_fromfile(fh, byteorder+'I', 6*count) - result['z_distance'] = values[0::6] // values[1::6] - #result['date_created'] = tuple(values[2::6]) - #result['time_created'] = tuple(values[3::6]) - #result['date_modified'] = tuple(values[4::6]) - #result['time_modified'] = tuple(values[5::6]) + result = {"number_planes": count} + values = numpy_fromfile(fh, byteorder + "I", 6 * count) + result["z_distance"] = values[0::6] // values[1::6] + # result['date_created'] = tuple(values[2::6]) + # result['time_created'] = tuple(values[3::6]) + # result['date_modified'] = tuple(values[4::6]) + # result['time_modified'] = tuple(values[5::6]) return result def read_mm_uic3(fh, byteorder, dtype, count): """Read MM_UIC3 tag from file and return as dictionary.""" - t = numpy_fromfile(fh, byteorder+'I', 2*count) - return {'wavelengths': t[0::2] // t[1::2]} + t = numpy_fromfile(fh, byteorder + "I", 2 * count) + return {"wavelengths": t[0::2] // t[1::2]} def read_mm_uic4(fh, byteorder, dtype, count): """Read MM_UIC4 tag from file and return as dictionary.""" - t = struct.unpack(byteorder + 'hI'*count, fh.read(6*count)) - return dict((MM_TAG_IDS[k], v) for k, v in zip(t[::2], t[1::2]) - if k in MM_TAG_IDS) + t = struct.unpack(byteorder + "hI" * count, fh.read(6 * count)) + return dict( + (MM_TAG_IDS[k], v) for k, v in zip(t[::2], t[1::2]) if k in MM_TAG_IDS + ) def read_cz_lsm_info(fh, byteorder, dtype, count): """Read CS_LSM_INFO tag from file and return as numpy.rec.array.""" - result = numpy.rec.fromfile(fh, CZ_LSM_INFO, 1, - byteorder=byteorder)[0] - {50350412: '1.3', 67127628: '2.0'}[result.magic_number] # validation + result = numpy.rec.fromfile(fh, CZ_LSM_INFO, 1, byteorder=byteorder)[0] + {50350412: "1.3", 67127628: "2.0"}[result.magic_number] # validation return result def read_cz_lsm_time_stamps(fh, byteorder): """Read LSM time stamps from file and return as list.""" - size, count = struct.unpack(byteorder+'II', fh.read(8)) + size, count = struct.unpack(byteorder + "II", fh.read(8)) if size != (8 + 8 * count): raise ValueError("lsm_time_stamps block is too short") - return struct.unpack(('%s%dd' % (byteorder, count)), - fh.read(8*count)) + return struct.unpack(("%s%dd" % (byteorder, count)), fh.read(8 * count)) def read_cz_lsm_event_list(fh, byteorder): - """Read LSM events from file and return as list of (time, type, text).""" - count = struct.unpack(byteorder+'II', fh.read(8))[1] + """Read LSM events from file and return as list of (time, type, + text).""" + count = struct.unpack(byteorder + "II", fh.read(8))[1] events = [] while count > 0: - esize, etime, etype = struct.unpack(byteorder+'IdI', fh.read(16)) + esize, etime, etype = struct.unpack(byteorder + "IdI", fh.read(16)) etext = stripnull(fh.read(esize - 16)) events.append((etime, etype, etext)) count -= 1 @@ -1973,17 +2235,17 @@ def read_cz_lsm_scan_info(fh, byteorder): block = Record() blocks = [block] unpack = struct.unpack - if 0x10000000 != struct.unpack(byteorder+"I", fh.read(4))[0]: + if 0x10000000 != struct.unpack(byteorder + "I", fh.read(4))[0]: raise ValueError("not a lsm_scan_info structure") fh.read(8) while True: - entry, dtype, size = unpack(byteorder+"III", fh.read(12)) + entry, dtype, size = unpack(byteorder + "III", fh.read(12)) if dtype == 2: value = stripnull(fh.read(size)) elif dtype == 4: - value = unpack(byteorder+"i", fh.read(4))[0] + value = unpack(byteorder + "i", fh.read(4))[0] elif dtype == 5: - value = unpack(byteorder+"d", fh.read(8))[0] + value = unpack(byteorder + "d", fh.read(8))[0] else: value = 0 if entry in CZ_LSM_SCAN_INFO_ARRAYS: @@ -2000,7 +2262,7 @@ def read_cz_lsm_scan_info(fh, byteorder): elif entry in CZ_LSM_SCAN_INFO_ATTRIBUTES: name = CZ_LSM_SCAN_INFO_ATTRIBUTES[entry] setattr(block, name, value) - elif entry == 0xffffffff: + elif entry == 0xFFFFFFFF: block = blocks.pop() else: setattr(block, "unknown_%x" % entry, value) @@ -2010,43 +2272,46 @@ def read_cz_lsm_scan_info(fh, byteorder): def read_nih_image_header(fh, byteorder, dtype, count): - """Read NIH_IMAGE_HEADER tag from file and return as numpy.rec.array.""" + """Read NIH_IMAGE_HEADER tag from file and return as + numpy.rec.array.""" a = numpy.rec.fromfile(fh, NIH_IMAGE_HEADER, 1, byteorder=byteorder)[0] a = a.newbyteorder(byteorder) - a.xunit = a.xunit[:a._xunit_len] - a.um = a.um[:a._um_len] + a.xunit = a.xunit[: a._xunit_len] + a.um = a.um[: a._um_len] return a def imagej_metadata(data, bytecounts, byteorder): """Return dict from ImageJ meta data tag value.""" - _str = str if sys.version_info[0] < 3 else lambda x: str(x, 'cp1252') + _str = str if sys.version_info[0] < 3 else lambda x: str(x, "cp1252") def read_string(data, byteorder): - return _str(stripnull(data[0 if byteorder == '<' else 1::2])) + return _str(stripnull(data[0 if byteorder == "<" else 1 :: 2])) def read_double(data, byteorder): - return struct.unpack(byteorder+('d' * (len(data) // 8)), data) + return struct.unpack(byteorder + ("d" * (len(data) // 8)), data) def read_bytes(data, byteorder): - #return struct.unpack('b' * len(data), data) - return numpy.fromstring(data, 'uint8') + # return struct.unpack('b' * len(data), data) + return numpy.fromstring(data, "uint8") metadata_types = { # big endian - b'info': ('info', read_string), - b'labl': ('labels', read_string), - b'rang': ('ranges', read_double), - b'luts': ('luts', read_bytes), - b'roi ': ('roi', read_bytes), - b'over': ('overlays', read_bytes)} + b"info": ("info", read_string), + b"labl": ("labels", read_string), + b"rang": ("ranges", read_double), + b"luts": ("luts", read_bytes), + b"roi ": ("roi", read_bytes), + b"over": ("overlays", read_bytes), + } metadata_types.update( # little endian - dict((k[::-1], v) for k, v in metadata_types.items())) + dict((k[::-1], v) for k, v in metadata_types.items()) + ) if not bytecounts: raise ValueError("no ImageJ meta data") - if not data[:4] in (b'IJIJ', b'JIJI'): + if not data[:4] in (b"IJIJ", b"JIJI"): raise ValueError("invalid ImageJ meta data") header_size = bytecounts[0] @@ -2054,7 +2319,9 @@ def read_bytes(data, byteorder): raise ValueError("invalid ImageJ meta data header size") ntypes = (header_size - 4) // 8 - header = struct.unpack(byteorder+'4sI'*ntypes, data[4:4+ntypes*8]) + header = struct.unpack( + byteorder + "4sI" * ntypes, data[4 : 4 + ntypes * 8] + ) pos = 4 + ntypes * 8 counter = 0 result = {} @@ -2072,14 +2339,15 @@ def read_bytes(data, byteorder): def imagej_description(description): """Return dict from ImageJ image_description tag.""" + def _bool(val): - return {b'true': True, b'false': False}[val.lower()] + return {b"true": True, b"false": False}[val.lower()] - _str = str if sys.version_info[0] < 3 else lambda x: str(x, 'cp1252') + _str = str if sys.version_info[0] < 3 else lambda x: str(x, "cp1252") result = {} for line in description.splitlines(): try: - key, val = line.split(b'=') + key, val = line.split(b"=") except Exception: continue key = key.strip() @@ -2095,28 +2363,37 @@ def _bool(val): def read_micromanager_metadata(fh): - """Read MicroManager non-TIFF settings from open file and return as dict. + """Read MicroManager non-TIFF settings from open file and return as + dict. - The settings can be used to read image data without parsing the TIFF file. - - Raise ValueError if file does not contain valid MicroManager metadata. + The settings can be used to read image data without parsing the TIFF + file. + Raise ValueError if file does not contain valid MicroManager + metadata. """ fh.seek(0) try: - byteorder = {b'II': '<', b'MM': '>'}[fh.read(2)] + byteorder = {b"II": "<", b"MM": ">"}[fh.read(2)] except IndexError: raise ValueError("not a MicroManager TIFF file") results = {} fh.seek(8) - (index_header, index_offset, display_header, display_offset, - comments_header, comments_offset, summary_header, summary_length - ) = struct.unpack(byteorder + "IIIIIIII", fh.read(32)) + ( + index_header, + index_offset, + display_header, + display_offset, + comments_header, + comments_offset, + summary_header, + summary_length, + ) = struct.unpack(byteorder + "IIIIIIII", fh.read(32)) if summary_header != 2355492: raise ValueError("invalid MicroManager summary_header") - results['summary'] = read_json(fh, byteorder, None, summary_length) + results["summary"] = read_json(fh, byteorder, None, summary_length) if index_header != 54773648: raise ValueError("invalid MicroManager index_header") @@ -2124,10 +2401,14 @@ def read_micromanager_metadata(fh): header, count = struct.unpack(byteorder + "II", fh.read(8)) if header != 3453623: raise ValueError("invalid MicroManager index_header") - data = struct.unpack(byteorder + "IIIII"*count, fh.read(20*count)) - results['index_map'] = { - 'channel': data[::5], 'slice': data[1::5], 'frame': data[2::5], - 'position': data[3::5], 'offset': data[4::5]} + data = struct.unpack(byteorder + "IIIII" * count, fh.read(20 * count)) + results["index_map"] = { + "channel": data[::5], + "slice": data[1::5], + "frame": data[2::5], + "position": data[3::5], + "offset": data[4::5], + } if display_header != 483765892: raise ValueError("invalid MicroManager display_header") @@ -2135,7 +2416,7 @@ def read_micromanager_metadata(fh): header, count = struct.unpack(byteorder + "II", fh.read(8)) if header != 347834724: raise ValueError("invalid MicroManager display_header") - results['display_settings'] = read_json(fh, byteorder, None, count) + results["display_settings"] = read_json(fh, byteorder, None, count) if comments_header != 99384722: raise ValueError("invalid MicroManager comments_header") @@ -2143,7 +2424,7 @@ def read_micromanager_metadata(fh): header, count = struct.unpack(byteorder + "II", fh.read(8)) if header != 84720485: raise ValueError("invalid MicroManager comments_header") - results['comments'] = read_json(fh, byteorder, None, count) + results["comments"] = read_json(fh, byteorder, None, count) return results @@ -2153,18 +2434,18 @@ def _replace_by(module_function, package=None, warn=True): try: from importlib import import_module except ImportError: - warnings.warn('Could not import module importlib') + warnings.warn("Could not import module importlib") return lambda func: func def decorate(func, module_function=module_function, warn=warn): try: - module, function = module_function.split('.') + module, function = module_function.split(".") if not package: module = import_module(module) else: - module = import_module('.' + module, package=package) + module = import_module("." + module, package=package) func, oldfunc = getattr(module, function), func - globals()['__old_' + func.__name__] = oldfunc + globals()["__old_" + func.__name__] = oldfunc except Exception: if warn: warnings.warn("failed to import %s" % module_function) @@ -2173,14 +2454,13 @@ def decorate(func, module_function=module_function, warn=warn): return decorate -@_replace_by('_tifffile.decodepackbits') +@_replace_by("_tifffile.decodepackbits") def decodepackbits(encoded): """Decompress PackBits encoded byte string. PackBits is a simple byte-oriented run-length compression scheme. - """ - func = ord if sys.version[0] == '2' else lambda x: x + func = ord if sys.version[0] == "2" else lambda x: x result = [] result_extend = result.extend i = 0 @@ -2189,53 +2469,56 @@ def decodepackbits(encoded): n = func(encoded[i]) + 1 i += 1 if n < 129: - result_extend(encoded[i:i+n]) + result_extend(encoded[i : i + n]) i += n elif n > 129: - result_extend(encoded[i:i+1] * (258-n)) + result_extend(encoded[i : i + 1] * (258 - n)) i += 1 except IndexError: pass - return b''.join(result) if sys.version[0] == '2' else bytes(result) + return b"".join(result) if sys.version[0] == "2" else bytes(result) -@_replace_by('_tifffile.decodelzw') +@_replace_by("_tifffile.decodelzw") def decodelzw(encoded): - """Decompress LZW (Lempel-Ziv-Welch) encoded TIFF strip (byte string). + """Decompress LZW (Lempel-Ziv-Welch) encoded TIFF strip (byte + string). The strip must begin with a CLEAR code and end with an EOI code. - This is an implementation of the LZW decoding algorithm described in (1). - It is not compatible with old style LZW compressed files like quad-lzw.tif. - + This is an implementation of the LZW decoding algorithm described in + (1). It is not compatible with old style LZW compressed files like + quad-lzw.tif. """ len_encoded = len(encoded) bitcount_max = len_encoded * 8 unpack = struct.unpack - if sys.version[0] == '2': + if sys.version[0] == "2": newtable = [chr(i) for i in range(256)] else: newtable = [bytes([i]) for i in range(256)] newtable.extend((0, 0)) def next_code(): - """Return integer of `bitw` bits at `bitcount` position in encoded.""" + """Return integer of `bitw` bits at `bitcount` position in + encoded.""" start = bitcount // 8 - s = encoded[start:start+4] + s = encoded[start : start + 4] try: - code = unpack('>I', s)[0] + code = unpack(">I", s)[0] except Exception: - code = unpack('>I', s + b'\x00'*(4-len(s)))[0] + code = unpack(">I", s + b"\x00" * (4 - len(s)))[0] code <<= bitcount % 8 code &= mask return code >> shr switchbitch = { # code: bit-width, shr-bits, bit-mask - 255: (9, 23, int(9*'1'+'0'*23, 2)), - 511: (10, 22, int(10*'1'+'0'*22, 2)), - 1023: (11, 21, int(11*'1'+'0'*21, 2)), - 2047: (12, 20, int(12*'1'+'0'*20, 2)), } + 255: (9, 23, int(9 * "1" + "0" * 23, 2)), + 511: (10, 22, int(10 * "1" + "0" * 22, 2)), + 1023: (11, 21, int(11 * "1" + "0" * 21, 2)), + 2047: (12, 20, int(12 * "1" + "0" * 20, 2)), + } bitw, shr, mask = switchbitch[255] bitcount = 0 @@ -2281,14 +2564,16 @@ def next_code(): if code != 257: warnings.warn( - "decodelzw encountered unexpected end of stream (code %i)" % code) + "decodelzw encountered unexpected end of stream (code %i)" % code + ) - return b''.join(result) + return b"".join(result) -@_replace_by('_tifffile.unpackints') +@_replace_by("_tifffile.unpackints") def unpackints(data, dtype, itemsize, runlen=0): - """Decompress byte string to array of integers of any bit size <= 32. + """Decompress byte string to array of integers of any bit size <= + 32. Parameters ---------- @@ -2300,10 +2585,9 @@ def unpackints(data, dtype, itemsize, runlen=0): Number of bits per integer. runlen : int Number of consecutive integers, after which to start at next byte. - """ if itemsize == 1: # bitarray - data = numpy.fromstring(data, '|B') + data = numpy.fromstring(data, "|B") data = numpy.unpackbits(data) if runlen % 8: data = data.reshape(-1, runlen + (8 - runlen % 8)) @@ -2323,34 +2607,34 @@ def unpackints(data, dtype, itemsize, runlen=0): raise ValueError("dtype.itemsize too small") if runlen == 0: runlen = len(data) // itembytes - skipbits = runlen*itemsize % 8 + skipbits = runlen * itemsize % 8 if skipbits: skipbits = 8 - skipbits - shrbits = itembytes*8 - itemsize - bitmask = int(itemsize*'1'+'0'*shrbits, 2) - dtypestr = '>' + dtype.char # dtype always big endian? + shrbits = itembytes * 8 - itemsize + bitmask = int(itemsize * "1" + "0" * shrbits, 2) + dtypestr = ">" + dtype.char # dtype always big endian? unpack = struct.unpack - l = runlen * (len(data)*8 // (runlen*itemsize + skipbits)) - result = numpy.empty((l, ), dtype) + length = runlen * (len(data) * 8 // (runlen * itemsize + skipbits)) + result = numpy.empty((length,), dtype) bitcount = 0 for i in range(len(result)): start = bitcount // 8 - s = data[start:start+itembytes] + s = data[start : start + itembytes] try: code = unpack(dtypestr, s)[0] except Exception: - code = unpack(dtypestr, s + b'\x00'*(itembytes-len(s)))[0] + code = unpack(dtypestr, s + b"\x00" * (itembytes - len(s)))[0] code <<= bitcount % 8 code &= bitmask result[i] = code >> shrbits bitcount += itemsize - if (i+1) % runlen == 0: + if (i + 1) % runlen == 0: bitcount += skipbits return result -def unpackrgb(data, dtype='>> print(unpackrgb(data, '= bits) - data = numpy.fromstring(data, dtype.byteorder+dt) + dt = next(i for i in "BHI" if numpy.dtype(i).itemsize * 8 >= bits) + data = numpy.fromstring(data, dtype.byteorder + dt) result = numpy.empty((data.size, len(bitspersample)), dtype.char) for i, bps in enumerate(bitspersample): - t = data >> int(numpy.sum(bitspersample[i+1:])) - t &= int('0b'+'1'*bps, 2) + t = data >> int(numpy.sum(bitspersample[i + 1 :])) + t &= int("0b" + "1" * bps, 2) if rescale: o = ((dtype.itemsize * 8) // bps + 1) * bps if o > data.dtype.itemsize * 8: - t = t.astype('I') + t = t.astype("I") t *= (2**o - 1) // (2**bps - 1) - t //= 2**(o - (dtype.itemsize * 8)) + t //= 2 ** (o - (dtype.itemsize * 8)) result[:, i] = t return result.reshape(-1) @@ -2413,33 +2698,32 @@ def reorient(image, orientation): Axes -3 and -2 must be image length and width respectively. orientation : int or str One of TIFF_ORIENTATIONS keys or values. - """ o = TIFF_ORIENTATIONS.get(orientation, orientation) - if o == 'top_left': + if o == "top_left": return image - elif o == 'top_right': + elif o == "top_right": return image[..., ::-1, :] - elif o == 'bottom_left': + elif o == "bottom_left": return image[..., ::-1, :, :] - elif o == 'bottom_right': + elif o == "bottom_right": return image[..., ::-1, ::-1, :] - elif o == 'left_top': + elif o == "left_top": return numpy.swapaxes(image, -3, -2) - elif o == 'right_top': + elif o == "right_top": return numpy.swapaxes(image, -3, -2)[..., ::-1, :] - elif o == 'left_bottom': + elif o == "left_bottom": return numpy.swapaxes(image, -3, -2)[..., ::-1, :, :] - elif o == 'right_bottom': + elif o == "right_bottom": return numpy.swapaxes(image, -3, -2)[..., ::-1, ::-1, :] -def numpy_fromfile(arg, dtype=float, count=-1, sep=''): +def numpy_fromfile(arg, dtype=float, count=-1, sep=""): """Return array from data in binary file. - Work around numpy issue #2230, "numpy.fromfile does not accept StringIO - object" https://github.com/numpy/numpy/issues/2230. - + Work around numpy issue #2230, "numpy.fromfile does not accept + StringIO object" + https://github.com/numpy/numpy/issues/2230. """ try: return numpy.fromfile(arg, dtype, count, sep) @@ -2454,13 +2738,13 @@ def numpy_fromfile(arg, dtype=float, count=-1, sep=''): def stripnull(string): """Return string truncated at first null character.""" - i = string.find(b'\x00') + i = string.find(b"\x00") return string if (i < 0) else string[:i] def format_size(size): """Return file size as string from byte size.""" - for unit in ('B', 'KB', 'MB', 'GB', 'TB'): + for unit in ("B", "KB", "MB", "GB", "TB"): if size < 2048: return "%.f %s" % (size, unit) size /= 1024.0 @@ -2471,11 +2755,12 @@ def natural_sorted(iterable): >>> natural_sorted(['f1', 'f2', 'f10']) ['f1', 'f2', 'f10'] - """ + def sortkey(x): return [(int(c) if c.isdigit() else c) for c in re.split(numbers, x)] - numbers = re.compile('(\d+)') + + numbers = re.compile(r"(\d+)") return sorted(iterable, key=sortkey) @@ -2486,31 +2771,29 @@ def datetime_from_timestamp(n, epoch=datetime.datetime.fromordinal(693594)): -------- >>> datetime_from_timestamp(40237.029999999795) datetime.datetime(2010, 2, 28, 0, 43, 11, 999982) - """ return epoch + datetime.timedelta(n) -def test_tifffile(directory='testimages', verbose=True): +def test_tifffile(directory="testimages", verbose=True): """Read all images in directory. Print error message on failure. Examples -------- >>> test_tifffile(verbose=False) - """ successful = 0 failed = 0 start = time.time() - for f in glob.glob(os.path.join(directory, '*.*')): + for f in glob.glob(os.path.join(directory, "*.*")): if verbose: - print("\n%s>\n" % f.lower(), end='') + print("\n%s>\n" % f.lower(), end="") t0 = time.time() try: tif = TiffFile(f, multifile=True) except Exception as e: if not verbose: - print(f, end=' ') + print(f, end=" ") print("ERROR:", e) failed += 1 continue @@ -2521,7 +2804,7 @@ def test_tifffile(directory='testimages', verbose=True): img = tif[0].asarray() except Exception as e: if not verbose: - print(f, end=' ') + print(f, end=" ") print("ERROR:", e) failed += 1 continue @@ -2529,441 +2812,450 @@ def test_tifffile(directory='testimages', verbose=True): tif.close() successful += 1 if verbose: - print("%s, %s %s, %s, %.0f ms" % ( - str(tif), str(img.shape), img.dtype, tif[0].compression, - (time.time()-t0) * 1e3)) + print( + "%s, %s %s, %s, %.0f ms" + % ( + str(tif), + str(img.shape), + img.dtype, + tif[0].compression, + (time.time() - t0) * 1e3, + ) + ) if verbose: - print("\nSuccessfully read %i of %i files in %.3f s\n" % ( - successful, successful+failed, time.time()-start)) + print( + "\nSuccessfully read %i of %i files in %.3f s\n" + % (successful, successful + failed, time.time() - start) + ) class TIFF_SUBFILE_TYPES(object): def __getitem__(self, key): result = [] if key & 1: - result.append('reduced_image') + result.append("reduced_image") if key & 2: - result.append('page') + result.append("page") if key & 4: - result.append('mask') + result.append("mask") return tuple(result) TIFF_PHOTOMETRICS = { - 0: 'miniswhite', - 1: 'minisblack', - 2: 'rgb', - 3: 'palette', - 4: 'mask', - 5: 'separated', - 6: 'cielab', - 7: 'icclab', - 8: 'itulab', - 32844: 'logl', - 32845: 'logluv', + 0: "miniswhite", + 1: "minisblack", + 2: "rgb", + 3: "palette", + 4: "mask", + 5: "separated", + 6: "cielab", + 7: "icclab", + 8: "itulab", + 32844: "logl", + 32845: "logluv", } TIFF_COMPESSIONS = { 1: None, - 2: 'ccittrle', - 3: 'ccittfax3', - 4: 'ccittfax4', - 5: 'lzw', - 6: 'ojpeg', - 7: 'jpeg', - 8: 'adobe_deflate', - 9: 't85', - 10: 't43', - 32766: 'next', - 32771: 'ccittrlew', - 32773: 'packbits', - 32809: 'thunderscan', - 32895: 'it8ctpad', - 32896: 'it8lw', - 32897: 'it8mp', - 32898: 'it8bl', - 32908: 'pixarfilm', - 32909: 'pixarlog', - 32946: 'deflate', - 32947: 'dcs', - 34661: 'jbig', - 34676: 'sgilog', - 34677: 'sgilog24', - 34712: 'jp2000', - 34713: 'nef', + 2: "ccittrle", + 3: "ccittfax3", + 4: "ccittfax4", + 5: "lzw", + 6: "ojpeg", + 7: "jpeg", + 8: "adobe_deflate", + 9: "t85", + 10: "t43", + 32766: "next", + 32771: "ccittrlew", + 32773: "packbits", + 32809: "thunderscan", + 32895: "it8ctpad", + 32896: "it8lw", + 32897: "it8mp", + 32898: "it8bl", + 32908: "pixarfilm", + 32909: "pixarlog", + 32946: "deflate", + 32947: "dcs", + 34661: "jbig", + 34676: "sgilog", + 34677: "sgilog24", + 34712: "jp2000", + 34713: "nef", } TIFF_DECOMPESSORS = { None: lambda x: x, - 'adobe_deflate': zlib.decompress, - 'deflate': zlib.decompress, - 'packbits': decodepackbits, - 'lzw': decodelzw, + "adobe_deflate": zlib.decompress, + "deflate": zlib.decompress, + "packbits": decodepackbits, + "lzw": decodelzw, } TIFF_DATA_TYPES = { - 1: '1B', # BYTE 8-bit unsigned integer. - 2: '1s', # ASCII 8-bit byte that contains a 7-bit ASCII code; - # the last byte must be NULL (binary zero). - 3: '1H', # SHORT 16-bit (2-byte) unsigned integer - 4: '1I', # LONG 32-bit (4-byte) unsigned integer. - 5: '2I', # RATIONAL Two LONGs: the first represents the numerator of - # a fraction; the second, the denominator. - 6: '1b', # SBYTE An 8-bit signed (twos-complement) integer. - 7: '1B', # UNDEFINED An 8-bit byte that may contain anything, - # depending on the definition of the field. - 8: '1h', # SSHORT A 16-bit (2-byte) signed (twos-complement) integer. - 9: '1i', # SLONG A 32-bit (4-byte) signed (twos-complement) integer. - 10: '2i', # SRATIONAL Two SLONGs: the first represents the numerator - # of a fraction, the second the denominator. - 11: '1f', # FLOAT Single precision (4-byte) IEEE format. - 12: '1d', # DOUBLE Double precision (8-byte) IEEE format. - 13: '1I', # IFD unsigned 4 byte IFD offset. - #14: '', # UNICODE - #15: '', # COMPLEX - 16: '1Q', # LONG8 unsigned 8 byte integer (BigTiff) - 17: '1q', # SLONG8 signed 8 byte integer (BigTiff) - 18: '1Q', # IFD8 unsigned 8 byte IFD offset (BigTiff) + 1: "1B", # BYTE 8-bit unsigned integer. + 2: "1s", # ASCII 8-bit byte that contains a 7-bit ASCII code; + # the last byte must be NULL (binary zero). + 3: "1H", # SHORT 16-bit (2-byte) unsigned integer + 4: "1I", # LONG 32-bit (4-byte) unsigned integer. + 5: "2I", # RATIONAL Two LONGs: the first represents the numerator of + # a fraction; the second, the denominator. + 6: "1b", # SBYTE An 8-bit signed (twos-complement) integer. + 7: "1B", # UNDEFINED An 8-bit byte that may contain anything, + # depending on the definition of the field. + 8: "1h", # SSHORT A 16-bit (2-byte) signed (twos-complement) integer. + 9: "1i", # SLONG A 32-bit (4-byte) signed (twos-complement) integer. + 10: "2i", # SRATIONAL Two SLONGs: the first represents the numerator + # of a fraction, the second the denominator. + 11: "1f", # FLOAT Single precision (4-byte) IEEE format. + 12: "1d", # DOUBLE Double precision (8-byte) IEEE format. + 13: "1I", # IFD unsigned 4 byte IFD offset. + # 14: '', # UNICODE + # 15: '', # COMPLEX + 16: "1Q", # LONG8 unsigned 8 byte integer (BigTiff) + 17: "1q", # SLONG8 signed 8 byte integer (BigTiff) + 18: "1Q", # IFD8 unsigned 8 byte IFD offset (BigTiff) } TIFF_SAMPLE_FORMATS = { - 1: 'uint', - 2: 'int', - 3: 'float', - #4: 'void', - #5: 'complex_int', - 6: 'complex', + 1: "uint", + 2: "int", + 3: "float", + # 4: 'void', + # 5: 'complex_int', + 6: "complex", } TIFF_SAMPLE_DTYPES = { - ('uint', 1): '?', # bitmap - ('uint', 2): 'B', - ('uint', 3): 'B', - ('uint', 4): 'B', - ('uint', 5): 'B', - ('uint', 6): 'B', - ('uint', 7): 'B', - ('uint', 8): 'B', - ('uint', 9): 'H', - ('uint', 10): 'H', - ('uint', 11): 'H', - ('uint', 12): 'H', - ('uint', 13): 'H', - ('uint', 14): 'H', - ('uint', 15): 'H', - ('uint', 16): 'H', - ('uint', 17): 'I', - ('uint', 18): 'I', - ('uint', 19): 'I', - ('uint', 20): 'I', - ('uint', 21): 'I', - ('uint', 22): 'I', - ('uint', 23): 'I', - ('uint', 24): 'I', - ('uint', 25): 'I', - ('uint', 26): 'I', - ('uint', 27): 'I', - ('uint', 28): 'I', - ('uint', 29): 'I', - ('uint', 30): 'I', - ('uint', 31): 'I', - ('uint', 32): 'I', - ('uint', 64): 'Q', - ('int', 8): 'b', - ('int', 16): 'h', - ('int', 32): 'i', - ('int', 64): 'q', - ('float', 16): 'e', - ('float', 32): 'f', - ('float', 64): 'd', - ('complex', 64): 'F', - ('complex', 128): 'D', - ('uint', (5, 6, 5)): 'B', + ("uint", 1): "?", # bitmap + ("uint", 2): "B", + ("uint", 3): "B", + ("uint", 4): "B", + ("uint", 5): "B", + ("uint", 6): "B", + ("uint", 7): "B", + ("uint", 8): "B", + ("uint", 9): "H", + ("uint", 10): "H", + ("uint", 11): "H", + ("uint", 12): "H", + ("uint", 13): "H", + ("uint", 14): "H", + ("uint", 15): "H", + ("uint", 16): "H", + ("uint", 17): "I", + ("uint", 18): "I", + ("uint", 19): "I", + ("uint", 20): "I", + ("uint", 21): "I", + ("uint", 22): "I", + ("uint", 23): "I", + ("uint", 24): "I", + ("uint", 25): "I", + ("uint", 26): "I", + ("uint", 27): "I", + ("uint", 28): "I", + ("uint", 29): "I", + ("uint", 30): "I", + ("uint", 31): "I", + ("uint", 32): "I", + ("uint", 64): "Q", + ("int", 8): "b", + ("int", 16): "h", + ("int", 32): "i", + ("int", 64): "q", + ("float", 16): "e", + ("float", 32): "f", + ("float", 64): "d", + ("complex", 64): "F", + ("complex", 128): "D", + ("uint", (5, 6, 5)): "B", } TIFF_ORIENTATIONS = { - 1: 'top_left', - 2: 'top_right', - 3: 'bottom_right', - 4: 'bottom_left', - 5: 'left_top', - 6: 'right_top', - 7: 'right_bottom', - 8: 'left_bottom', + 1: "top_left", + 2: "top_right", + 3: "bottom_right", + 4: "bottom_left", + 5: "left_top", + 6: "right_top", + 7: "right_bottom", + 8: "left_bottom", } AXES_LABELS = { - 'X': 'width', - 'Y': 'height', - 'Z': 'depth', - 'S': 'sample', # rgb(a) - 'P': 'plane', # page - 'T': 'time', - 'C': 'channel', # color, emission wavelength - 'A': 'angle', - 'F': 'phase', - 'R': 'tile', # region, point - 'H': 'lifetime', # histogram - 'E': 'lambda', # excitation wavelength - 'L': 'exposure', # lux - 'V': 'event', - 'Q': 'other', + "X": "width", + "Y": "height", + "Z": "depth", + "S": "sample", # rgb(a) + "P": "plane", # page + "T": "time", + "C": "channel", # color, emission wavelength + "A": "angle", + "F": "phase", + "R": "tile", # region, point + "H": "lifetime", # histogram + "E": "lambda", # excitation wavelength + "L": "exposure", # lux + "V": "event", + "Q": "other", } AXES_LABELS.update(dict((v, k) for k, v in AXES_LABELS.items())) # NIH Image PicHeader v1.63 NIH_IMAGE_HEADER = [ - ('fileid', 'a8'), - ('nlines', 'i2'), - ('pixelsperline', 'i2'), - ('version', 'i2'), - ('oldlutmode', 'i2'), - ('oldncolors', 'i2'), - ('colors', 'u1', (3, 32)), - ('oldcolorstart', 'i2'), - ('colorwidth', 'i2'), - ('extracolors', 'u2', (6, 3)), - ('nextracolors', 'i2'), - ('foregroundindex', 'i2'), - ('backgroundindex', 'i2'), - ('xscale', 'f8'), - ('_x0', 'i2'), - ('_x1', 'i2'), - ('units_t', 'i2'), - ('p1', [('x', 'i2'), ('y', 'i2')]), - ('p2', [('x', 'i2'), ('y', 'i2')]), - ('curvefit_t', 'i2'), - ('ncoefficients', 'i2'), - ('coeff', 'f8', 6), - ('_um_len', 'u1'), - ('um', 'a15'), - ('_x2', 'u1'), - ('binarypic', 'b1'), - ('slicestart', 'i2'), - ('sliceend', 'i2'), - ('scalemagnification', 'f4'), - ('nslices', 'i2'), - ('slicespacing', 'f4'), - ('currentslice', 'i2'), - ('frameinterval', 'f4'), - ('pixelaspectratio', 'f4'), - ('colorstart', 'i2'), - ('colorend', 'i2'), - ('ncolors', 'i2'), - ('fill1', '3u2'), - ('fill2', '3u2'), - ('colortable_t', 'u1'), - ('lutmode_t', 'u1'), - ('invertedtable', 'b1'), - ('zeroclip', 'b1'), - ('_xunit_len', 'u1'), - ('xunit', 'a11'), - ('stacktype_t', 'i2'), + ("fileid", "a8"), + ("nlines", "i2"), + ("pixelsperline", "i2"), + ("version", "i2"), + ("oldlutmode", "i2"), + ("oldncolors", "i2"), + ("colors", "u1", (3, 32)), + ("oldcolorstart", "i2"), + ("colorwidth", "i2"), + ("extracolors", "u2", (6, 3)), + ("nextracolors", "i2"), + ("foregroundindex", "i2"), + ("backgroundindex", "i2"), + ("xscale", "f8"), + ("_x0", "i2"), + ("_x1", "i2"), + ("units_t", "i2"), + ("p1", [("x", "i2"), ("y", "i2")]), + ("p2", [("x", "i2"), ("y", "i2")]), + ("curvefit_t", "i2"), + ("ncoefficients", "i2"), + ("coeff", "f8", 6), + ("_um_len", "u1"), + ("um", "a15"), + ("_x2", "u1"), + ("binarypic", "b1"), + ("slicestart", "i2"), + ("sliceend", "i2"), + ("scalemagnification", "f4"), + ("nslices", "i2"), + ("slicespacing", "f4"), + ("currentslice", "i2"), + ("frameinterval", "f4"), + ("pixelaspectratio", "f4"), + ("colorstart", "i2"), + ("colorend", "i2"), + ("ncolors", "i2"), + ("fill1", "3u2"), + ("fill2", "3u2"), + ("colortable_t", "u1"), + ("lutmode_t", "u1"), + ("invertedtable", "b1"), + ("zeroclip", "b1"), + ("_xunit_len", "u1"), + ("xunit", "a11"), + ("stacktype_t", "i2"), ] -#NIH_COLORTABLE_TYPE = ( +# NIH_COLORTABLE_TYPE = ( # 'CustomTable', 'AppleDefault', 'Pseudo20', 'Pseudo32', 'Rainbow', # 'Fire1', 'Fire2', 'Ice', 'Grays', 'Spectrum') -#NIH_LUTMODE_TYPE = ( +# NIH_LUTMODE_TYPE = ( # 'PseudoColor', 'OldAppleDefault', 'OldSpectrum', 'GrayScale', # 'ColorLut', 'CustomGrayscale') -#NIH_CURVEFIT_TYPE = ( +# NIH_CURVEFIT_TYPE = ( # 'StraightLine', 'Poly2', 'Poly3', 'Poly4', 'Poly5', 'ExpoFit', # 'PowerFit', 'LogFit', 'RodbardFit', 'SpareFit1', 'Uncalibrated', # 'UncalibratedOD') -#NIH_UNITS_TYPE = ( +# NIH_UNITS_TYPE = ( # 'Nanometers', 'Micrometers', 'Millimeters', 'Centimeters', 'Meters', # 'Kilometers', 'Inches', 'Feet', 'Miles', 'Pixels', 'OtherUnits') -#NIH_STACKTYPE_TYPE = ( +# NIH_STACKTYPE_TYPE = ( # 'VolumeStack', 'RGBStack', 'MovieStack', 'HSVStack') # MetaMorph STK tags MM_TAG_IDS = { - 0: 'auto_scale', - 1: 'min_scale', - 2: 'max_scale', - 3: 'spatial_calibration', - #4: 'x_calibration', - #5: 'y_calibration', - #6: 'calibration_units', - #7: 'name', - 8: 'thresh_state', - 9: 'thresh_state_red', - 11: 'thresh_state_green', - 12: 'thresh_state_blue', - 13: 'thresh_state_lo', - 14: 'thresh_state_hi', - 15: 'zoom', - #16: 'create_time', - #17: 'last_saved_time', - 18: 'current_buffer', - 19: 'gray_fit', - 20: 'gray_point_count', - #21: 'gray_x', - #22: 'gray_y', - #23: 'gray_min', - #24: 'gray_max', - #25: 'gray_unit_name', - 26: 'standard_lut', - 27: 'wavelength', - #28: 'stage_position', - #29: 'camera_chip_offset', - #30: 'overlay_mask', - #31: 'overlay_compress', - #32: 'overlay', - #33: 'special_overlay_mask', - #34: 'special_overlay_compress', - #35: 'special_overlay', - 36: 'image_property', - #37: 'stage_label', - #38: 'autoscale_lo_info', - #39: 'autoscale_hi_info', - #40: 'absolute_z', - #41: 'absolute_z_valid', - #42: 'gamma', - #43: 'gamma_red', - #44: 'gamma_green', - #45: 'gamma_blue', - #46: 'camera_bin', - 47: 'new_lut', - #48: 'image_property_ex', - 49: 'plane_property', - #50: 'user_lut_table', - 51: 'red_autoscale_info', - #52: 'red_autoscale_lo_info', - #53: 'red_autoscale_hi_info', - 54: 'red_minscale_info', - 55: 'red_maxscale_info', - 56: 'green_autoscale_info', - #57: 'green_autoscale_lo_info', - #58: 'green_autoscale_hi_info', - 59: 'green_minscale_info', - 60: 'green_maxscale_info', - 61: 'blue_autoscale_info', - #62: 'blue_autoscale_lo_info', - #63: 'blue_autoscale_hi_info', - 64: 'blue_min_scale_info', - 65: 'blue_max_scale_info', - #66: 'overlay_plane_color' + 0: "auto_scale", + 1: "min_scale", + 2: "max_scale", + 3: "spatial_calibration", + # 4: 'x_calibration', + # 5: 'y_calibration', + # 6: 'calibration_units', + # 7: 'name', + 8: "thresh_state", + 9: "thresh_state_red", + 11: "thresh_state_green", + 12: "thresh_state_blue", + 13: "thresh_state_lo", + 14: "thresh_state_hi", + 15: "zoom", + # 16: 'create_time', + # 17: 'last_saved_time', + 18: "current_buffer", + 19: "gray_fit", + 20: "gray_point_count", + # 21: 'gray_x', + # 22: 'gray_y', + # 23: 'gray_min', + # 24: 'gray_max', + # 25: 'gray_unit_name', + 26: "standard_lut", + 27: "wavelength", + # 28: 'stage_position', + # 29: 'camera_chip_offset', + # 30: 'overlay_mask', + # 31: 'overlay_compress', + # 32: 'overlay', + # 33: 'special_overlay_mask', + # 34: 'special_overlay_compress', + # 35: 'special_overlay', + 36: "image_property", + # 37: 'stage_label', + # 38: 'autoscale_lo_info', + # 39: 'autoscale_hi_info', + # 40: 'absolute_z', + # 41: 'absolute_z_valid', + # 42: 'gamma', + # 43: 'gamma_red', + # 44: 'gamma_green', + # 45: 'gamma_blue', + # 46: 'camera_bin', + 47: "new_lut", + # 48: 'image_property_ex', + 49: "plane_property", + # 50: 'user_lut_table', + 51: "red_autoscale_info", + # 52: 'red_autoscale_lo_info', + # 53: 'red_autoscale_hi_info', + 54: "red_minscale_info", + 55: "red_maxscale_info", + 56: "green_autoscale_info", + # 57: 'green_autoscale_lo_info', + # 58: 'green_autoscale_hi_info', + 59: "green_minscale_info", + 60: "green_maxscale_info", + 61: "blue_autoscale_info", + # 62: 'blue_autoscale_lo_info', + # 63: 'blue_autoscale_hi_info', + 64: "blue_min_scale_info", + 65: "blue_max_scale_info", + # 66: 'overlay_plane_color' } # Olympus FluoView MM_DIMENSION = [ - ('name', 'a16'), - ('size', 'i4'), - ('origin', 'f8'), - ('resolution', 'f8'), - ('unit', 'a64'), + ("name", "a16"), + ("size", "i4"), + ("origin", "f8"), + ("resolution", "f8"), + ("unit", "a64"), ] MM_HEADER = [ - ('header_flag', 'i2'), - ('image_type', 'u1'), - ('image_name', 'a257'), - ('offset_data', 'u4'), - ('palette_size', 'i4'), - ('offset_palette0', 'u4'), - ('offset_palette1', 'u4'), - ('comment_size', 'i4'), - ('offset_comment', 'u4'), - ('dimensions', MM_DIMENSION, 10), - ('offset_position', 'u4'), - ('map_type', 'i2'), - ('map_min', 'f8'), - ('map_max', 'f8'), - ('min_value', 'f8'), - ('max_value', 'f8'), - ('offset_map', 'u4'), - ('gamma', 'f8'), - ('offset', 'f8'), - ('gray_channel', MM_DIMENSION), - ('offset_thumbnail', 'u4'), - ('voice_field', 'i4'), - ('offset_voice_field', 'u4'), + ("header_flag", "i2"), + ("image_type", "u1"), + ("image_name", "a257"), + ("offset_data", "u4"), + ("palette_size", "i4"), + ("offset_palette0", "u4"), + ("offset_palette1", "u4"), + ("comment_size", "i4"), + ("offset_comment", "u4"), + ("dimensions", MM_DIMENSION, 10), + ("offset_position", "u4"), + ("map_type", "i2"), + ("map_min", "f8"), + ("map_max", "f8"), + ("min_value", "f8"), + ("max_value", "f8"), + ("offset_map", "u4"), + ("gamma", "f8"), + ("offset", "f8"), + ("gray_channel", MM_DIMENSION), + ("offset_thumbnail", "u4"), + ("voice_field", "i4"), + ("offset_voice_field", "u4"), ] # Carl Zeiss LSM CZ_LSM_INFO = [ - ('magic_number', 'i4'), - ('structure_size', 'i4'), - ('dimension_x', 'i4'), - ('dimension_y', 'i4'), - ('dimension_z', 'i4'), - ('dimension_channels', 'i4'), - ('dimension_time', 'i4'), - ('dimension_data_type', 'i4'), - ('thumbnail_x', 'i4'), - ('thumbnail_y', 'i4'), - ('voxel_size_x', 'f8'), - ('voxel_size_y', 'f8'), - ('voxel_size_z', 'f8'), - ('origin_x', 'f8'), - ('origin_y', 'f8'), - ('origin_z', 'f8'), - ('scan_type', 'u2'), - ('spectral_scan', 'u2'), - ('data_type', 'u4'), - ('offset_vector_overlay', 'u4'), - ('offset_input_lut', 'u4'), - ('offset_output_lut', 'u4'), - ('offset_channel_colors', 'u4'), - ('time_interval', 'f8'), - ('offset_channel_data_types', 'u4'), - ('offset_scan_information', 'u4'), - ('offset_ks_data', 'u4'), - ('offset_time_stamps', 'u4'), - ('offset_event_list', 'u4'), - ('offset_roi', 'u4'), - ('offset_bleach_roi', 'u4'), - ('offset_next_recording', 'u4'), - ('display_aspect_x', 'f8'), - ('display_aspect_y', 'f8'), - ('display_aspect_z', 'f8'), - ('display_aspect_time', 'f8'), - ('offset_mean_of_roi_overlay', 'u4'), - ('offset_topo_isoline_overlay', 'u4'), - ('offset_topo_profile_overlay', 'u4'), - ('offset_linescan_overlay', 'u4'), - ('offset_toolbar_flags', 'u4'), + ("magic_number", "i4"), + ("structure_size", "i4"), + ("dimension_x", "i4"), + ("dimension_y", "i4"), + ("dimension_z", "i4"), + ("dimension_channels", "i4"), + ("dimension_time", "i4"), + ("dimension_data_type", "i4"), + ("thumbnail_x", "i4"), + ("thumbnail_y", "i4"), + ("voxel_size_x", "f8"), + ("voxel_size_y", "f8"), + ("voxel_size_z", "f8"), + ("origin_x", "f8"), + ("origin_y", "f8"), + ("origin_z", "f8"), + ("scan_type", "u2"), + ("spectral_scan", "u2"), + ("data_type", "u4"), + ("offset_vector_overlay", "u4"), + ("offset_input_lut", "u4"), + ("offset_output_lut", "u4"), + ("offset_channel_colors", "u4"), + ("time_interval", "f8"), + ("offset_channel_data_types", "u4"), + ("offset_scan_information", "u4"), + ("offset_ks_data", "u4"), + ("offset_time_stamps", "u4"), + ("offset_event_list", "u4"), + ("offset_roi", "u4"), + ("offset_bleach_roi", "u4"), + ("offset_next_recording", "u4"), + ("display_aspect_x", "f8"), + ("display_aspect_y", "f8"), + ("display_aspect_z", "f8"), + ("display_aspect_time", "f8"), + ("offset_mean_of_roi_overlay", "u4"), + ("offset_topo_isoline_overlay", "u4"), + ("offset_topo_profile_overlay", "u4"), + ("offset_linescan_overlay", "u4"), + ("offset_toolbar_flags", "u4"), ] # Import functions for LSM_INFO sub-records CZ_LSM_INFO_READERS = { - 'scan_information': read_cz_lsm_scan_info, - 'time_stamps': read_cz_lsm_time_stamps, - 'event_list': read_cz_lsm_event_list, + "scan_information": read_cz_lsm_scan_info, + "time_stamps": read_cz_lsm_time_stamps, + "event_list": read_cz_lsm_event_list, } # Map cz_lsm_info.scan_type to dimension order CZ_SCAN_TYPES = { - 0: 'XYZCT', # x-y-z scan - 1: 'XYZCT', # z scan (x-z plane) - 2: 'XYZCT', # line scan - 3: 'XYTCZ', # time series x-y - 4: 'XYZTC', # time series x-z - 5: 'XYTCZ', # time series 'Mean of ROIs' - 6: 'XYZTC', # time series x-y-z - 7: 'XYCTZ', # spline scan - 8: 'XYCZT', # spline scan x-z - 9: 'XYTCZ', # time series spline plane x-z - 10: 'XYZCT', # point mode + 0: "XYZCT", # x-y-z scan + 1: "XYZCT", # z scan (x-z plane) + 2: "XYZCT", # line scan + 3: "XYTCZ", # time series x-y + 4: "XYZTC", # time series x-z + 5: "XYTCZ", # time series 'Mean of ROIs' + 6: "XYZTC", # time series x-y-z + 7: "XYCTZ", # spline scan + 8: "XYCZT", # spline scan x-z + 9: "XYTCZ", # time series spline plane x-z + 10: "XYZCT", # point mode } # Map dimension codes to cz_lsm_info attribute CZ_DIMENSIONS = { - 'X': 'dimension_x', - 'Y': 'dimension_y', - 'Z': 'dimension_z', - 'C': 'dimension_channels', - 'T': 'dimension_time', + "X": "dimension_x", + "Y": "dimension_y", + "Z": "dimension_z", + "C": "dimension_channels", + "T": "dimension_time", } # Descriptions of cz_lsm_info.data_type CZ_DATA_TYPES = { - 0: 'varying data types', - 2: '12 bit unsigned integer', - 5: '32 bit float', + 0: "varying data types", + 2: "12 bit unsigned integer", + 5: "32 bit float", } CZ_LSM_SCAN_INFO_ARRAYS = { @@ -2971,8 +3263,8 @@ def __getitem__(self, key): 0x30000000: "lasers", 0x60000000: "detectionchannels", 0x80000000: "illuminationchannels", - 0xa0000000: "beamsplitters", - 0xc0000000: "datachannels", + 0xA0000000: "beamsplitters", + 0xC0000000: "datachannels", 0x13000000: "markers", 0x11000000: "timers", } @@ -2982,8 +3274,8 @@ def __getitem__(self, key): 0x50000000: "lasers", 0x70000000: "detectionchannels", 0x90000000: "illuminationchannels", - 0xb0000000: "beamsplitters", - 0xd0000000: "datachannels", + 0xB0000000: "beamsplitters", + 0xD0000000: "datachannels", 0x14000000: "markers", 0x12000000: "timers", } @@ -2998,12 +3290,12 @@ def __getitem__(self, key): 0x10000007: "oledb_recording_scan_type", 0x10000008: "oledb_recording_scan_mode", 0x10000009: "number_of_stacks", - 0x1000000a: "lines_per_plane", - 0x1000000b: "samples_per_line", - 0x1000000c: "planes_per_volume", - 0x1000000d: "images_width", - 0x1000000e: "images_height", - 0x1000000f: "images_number_planes", + 0x1000000A: "lines_per_plane", + 0x1000000B: "samples_per_line", + 0x1000000C: "planes_per_volume", + 0x1000000D: "images_width", + 0x1000000E: "images_height", + 0x1000000F: "images_number_planes", 0x10000010: "images_number_stacks", 0x10000011: "images_number_channels", 0x10000012: "linscan_xy_size", @@ -3014,12 +3306,12 @@ def __getitem__(self, key): 0x10000017: "zoom_y", 0x10000018: "zoom_z", 0x10000019: "sample_0x", - 0x1000001a: "sample_0y", - 0x1000001b: "sample_0z", - 0x1000001c: "sample_spacing", - 0x1000001d: "line_spacing", - 0x1000001e: "plane_spacing", - 0x1000001f: "plane_width", + 0x1000001A: "sample_0y", + 0x1000001B: "sample_0z", + 0x1000001C: "sample_spacing", + 0x1000001D: "line_spacing", + 0x1000001E: "plane_spacing", + 0x1000001F: "plane_width", 0x10000020: "plane_height", 0x10000021: "volume_depth", 0x10000023: "nutation", @@ -3059,11 +3351,11 @@ def __getitem__(self, key): 0x40000005: "sampling_number", 0x40000006: "acquire", 0x40000007: "sample_observation_time", - 0x4000000b: "time_between_stacks", - 0x4000000c: "name", - 0x4000000d: "collimator1_name", - 0x4000000e: "collimator1_position", - 0x4000000f: "collimator2_name", + 0x4000000B: "time_between_stacks", + 0x4000000C: "name", + 0x4000000D: "collimator1_name", + 0x4000000E: "collimator1_position", + 0x4000000F: "collimator2_name", 0x40000010: "collimator2_position", 0x40000011: "is_bleach_track", 0x40000012: "is_bleach_after_scan_number", @@ -3092,9 +3384,9 @@ def __getitem__(self, key): 0x40000037: "id_tubelens", 0x40000038: "id_tubelens_position", 0x40000039: "transmitted_light", - 0x4000003a: "reflected_light", - 0x4000003b: "simultan_grab_and_bleach", - 0x4000003c: "bleach_pixel_time", + 0x4000003A: "reflected_light", + 0x4000003B: "simultan_grab_and_bleach", + 0x4000003C: "bleach_pixel_time", # detection_channels 0x70000001: "integration_mode", 0x70000002: "special_mode", @@ -3105,12 +3397,12 @@ def __getitem__(self, key): 0x70000007: "amplifier_offs_first", 0x70000008: "amplifier_offs_last", 0x70000009: "pinhole_diameter", - 0x7000000a: "counting_trigger", - 0x7000000b: "acquire", - 0x7000000c: "point_detector_name", - 0x7000000d: "amplifier_name", - 0x7000000e: "pinhole_name", - 0x7000000f: "filter_set_name", + 0x7000000A: "counting_trigger", + 0x7000000B: "acquire", + 0x7000000C: "point_detector_name", + 0x7000000D: "amplifier_name", + 0x7000000E: "pinhole_name", + 0x7000000F: "filter_set_name", 0x70000010: "filter_name", 0x70000013: "integrator_name", 0x70000014: "detection_channel_name", @@ -3129,37 +3421,37 @@ def __getitem__(self, key): 0x90000001: "name", 0x90000002: "power", 0x90000003: "wavelength", - 0x90000004: "aquire", + 0x90000004: "acquire", 0x90000005: "detchannel_name", 0x90000006: "power_bc1", 0x90000007: "power_bc2", # beam_splitters - 0xb0000001: "filter_set", - 0xb0000002: "filter", - 0xb0000003: "name", + 0xB0000001: "filter_set", + 0xB0000002: "filter", + 0xB0000003: "name", # data_channels - 0xd0000001: "name", - 0xd0000003: "acquire", - 0xd0000004: "color", - 0xd0000005: "sample_type", - 0xd0000006: "bits_per_sample", - 0xd0000007: "ratio_type", - 0xd0000008: "ratio_track1", - 0xd0000009: "ratio_track2", - 0xd000000a: "ratio_channel1", - 0xd000000b: "ratio_channel2", - 0xd000000c: "ratio_const1", - 0xd000000d: "ratio_const2", - 0xd000000e: "ratio_const3", - 0xd000000f: "ratio_const4", - 0xd0000010: "ratio_const5", - 0xd0000011: "ratio_const6", - 0xd0000012: "ratio_first_images1", - 0xd0000013: "ratio_first_images2", - 0xd0000014: "dye_name", - 0xd0000015: "dye_folder", - 0xd0000016: "spectrum", - 0xd0000017: "acquire", + 0xD0000001: "name", + 0xD0000003: "acquire", + 0xD0000004: "color", + 0xD0000005: "sample_type", + 0xD0000006: "bits_per_sample", + 0xD0000007: "ratio_type", + 0xD0000008: "ratio_track1", + 0xD0000009: "ratio_track2", + 0xD000000A: "ratio_channel1", + 0xD000000B: "ratio_channel2", + 0xD000000C: "ratio_const1", + 0xD000000D: "ratio_const2", + 0xD000000E: "ratio_const3", + 0xD000000F: "ratio_const4", + 0xD0000010: "ratio_const5", + 0xD0000011: "ratio_const6", + 0xD0000012: "ratio_first_images1", + 0xD0000013: "ratio_first_images2", + 0xD0000014: "dye_name", + 0xD0000015: "dye_folder", + 0xD0000016: "spectrum", + 0xD0000017: "acquire", # markers 0x14000001: "name", 0x14000002: "description", @@ -3177,113 +3469,135 @@ def __getitem__(self, key): # Map TIFF tag code to attribute name, default value, type, count, validator TIFF_TAGS = { - 254: ('new_subfile_type', 0, 4, 1, TIFF_SUBFILE_TYPES()), - 255: ('subfile_type', None, 3, 1, - {0: 'undefined', 1: 'image', 2: 'reduced_image', 3: 'page'}), - 256: ('image_width', None, 4, 1, None), - 257: ('image_length', None, 4, 1, None), - 258: ('bits_per_sample', 1, 3, 1, None), - 259: ('compression', 1, 3, 1, TIFF_COMPESSIONS), - 262: ('photometric', None, 3, 1, TIFF_PHOTOMETRICS), - 266: ('fill_order', 1, 3, 1, {1: 'msb2lsb', 2: 'lsb2msb'}), - 269: ('document_name', None, 2, None, None), - 270: ('image_description', None, 2, None, None), - 271: ('make', None, 2, None, None), - 272: ('model', None, 2, None, None), - 273: ('strip_offsets', None, 4, None, None), - 274: ('orientation', 1, 3, 1, TIFF_ORIENTATIONS), - 277: ('samples_per_pixel', 1, 3, 1, None), - 278: ('rows_per_strip', 2**32-1, 4, 1, None), - 279: ('strip_byte_counts', None, 4, None, None), - 280: ('min_sample_value', None, 3, None, None), - 281: ('max_sample_value', None, 3, None, None), # 2**bits_per_sample - 282: ('x_resolution', None, 5, 1, None), - 283: ('y_resolution', None, 5, 1, None), - 284: ('planar_configuration', 1, 3, 1, {1: 'contig', 2: 'separate'}), - 285: ('page_name', None, 2, None, None), - 286: ('x_position', None, 5, 1, None), - 287: ('y_position', None, 5, 1, None), - 296: ('resolution_unit', 2, 4, 1, {1: 'none', 2: 'inch', 3: 'centimeter'}), - 297: ('page_number', None, 3, 2, None), - 305: ('software', None, 2, None, None), - 306: ('datetime', None, 2, None, None), - 315: ('artist', None, 2, None, None), - 316: ('host_computer', None, 2, None, None), - 317: ('predictor', 1, 3, 1, {1: None, 2: 'horizontal'}), - 320: ('color_map', None, 3, None, None), - 322: ('tile_width', None, 4, 1, None), - 323: ('tile_length', None, 4, 1, None), - 324: ('tile_offsets', None, 4, None, None), - 325: ('tile_byte_counts', None, 4, None, None), - 338: ('extra_samples', None, 3, None, - {0: 'unspecified', 1: 'assocalpha', 2: 'unassalpha'}), - 339: ('sample_format', 1, 3, 1, TIFF_SAMPLE_FORMATS), - 347: ('jpeg_tables', None, None, None, None), - 530: ('ycbcr_subsampling', 1, 3, 2, None), - 531: ('ycbcr_positioning', 1, 3, 1, None), - 32997: ('image_depth', None, 4, 1, None), - 32998: ('tile_depth', None, 4, 1, None), - 33432: ('copyright', None, 1, None, None), - 33445: ('md_file_tag', None, 4, 1, None), - 33446: ('md_scale_pixel', None, 5, 1, None), - 33447: ('md_color_table', None, 3, None, None), - 33448: ('md_lab_name', None, 2, None, None), - 33449: ('md_sample_info', None, 2, None, None), - 33450: ('md_prep_date', None, 2, None, None), - 33451: ('md_prep_time', None, 2, None, None), - 33452: ('md_file_units', None, 2, None, None), - 33550: ('model_pixel_scale', None, 12, 3, None), - 33922: ('model_tie_point', None, 12, None, None), - 37510: ('user_comment', None, None, None, None), - 34665: ('exif_ifd', None, None, 1, None), - 34735: ('geo_key_directory', None, 3, None, None), - 34736: ('geo_double_params', None, 12, None, None), - 34737: ('geo_ascii_params', None, 2, None, None), - 34853: ('gps_ifd', None, None, 1, None), - 42112: ('gdal_metadata', None, 2, None, None), - 42113: ('gdal_nodata', None, 2, None, None), - 50838: ('imagej_byte_counts', None, None, None, None), - 50289: ('mc_xy_position', None, 12, 2, None), - 50290: ('mc_z_position', None, 12, 1, None), - 50291: ('mc_xy_calibration', None, 12, 3, None), - 50292: ('mc_lens_lem_na_n', None, 12, 3, None), - 50293: ('mc_channel_name', None, 1, None, None), - 50294: ('mc_ex_wavelength', None, 12, 1, None), - 50295: ('mc_time_stamp', None, 12, 1, None), - 65200: ('flex_xml', None, 2, None, None), + 254: ("new_subfile_type", 0, 4, 1, TIFF_SUBFILE_TYPES()), + 255: ( + "subfile_type", + None, + 3, + 1, + {0: "undefined", 1: "image", 2: "reduced_image", 3: "page"}, + ), + 256: ("image_width", None, 4, 1, None), + 257: ("image_length", None, 4, 1, None), + 258: ("bits_per_sample", 1, 3, 1, None), + 259: ("compression", 1, 3, 1, TIFF_COMPESSIONS), + 262: ("photometric", None, 3, 1, TIFF_PHOTOMETRICS), + 266: ("fill_order", 1, 3, 1, {1: "msb2lsb", 2: "lsb2msb"}), + 269: ("document_name", None, 2, None, None), + 270: ("image_description", None, 2, None, None), + 271: ("make", None, 2, None, None), + 272: ("model", None, 2, None, None), + 273: ("strip_offsets", None, 4, None, None), + 274: ("orientation", 1, 3, 1, TIFF_ORIENTATIONS), + 277: ("samples_per_pixel", 1, 3, 1, None), + 278: ("rows_per_strip", 2**32 - 1, 4, 1, None), + 279: ("strip_byte_counts", None, 4, None, None), + 280: ("min_sample_value", None, 3, None, None), + 281: ("max_sample_value", None, 3, None, None), # 2**bits_per_sample + 282: ("x_resolution", None, 5, 1, None), + 283: ("y_resolution", None, 5, 1, None), + 284: ("planar_configuration", 1, 3, 1, {1: "contig", 2: "separate"}), + 285: ("page_name", None, 2, None, None), + 286: ("x_position", None, 5, 1, None), + 287: ("y_position", None, 5, 1, None), + 296: ("resolution_unit", 2, 4, 1, {1: "none", 2: "inch", 3: "centimeter"}), + 297: ("page_number", None, 3, 2, None), + 305: ("software", None, 2, None, None), + 306: ("datetime", None, 2, None, None), + 315: ("artist", None, 2, None, None), + 316: ("host_computer", None, 2, None, None), + 317: ("predictor", 1, 3, 1, {1: None, 2: "horizontal"}), + 320: ("color_map", None, 3, None, None), + 322: ("tile_width", None, 4, 1, None), + 323: ("tile_length", None, 4, 1, None), + 324: ("tile_offsets", None, 4, None, None), + 325: ("tile_byte_counts", None, 4, None, None), + 338: ( + "extra_samples", + None, + 3, + None, + {0: "unspecified", 1: "assocalpha", 2: "unassalpha"}, + ), + 339: ("sample_format", 1, 3, 1, TIFF_SAMPLE_FORMATS), + 347: ("jpeg_tables", None, None, None, None), + 530: ("ycbcr_subsampling", 1, 3, 2, None), + 531: ("ycbcr_positioning", 1, 3, 1, None), + 32997: ("image_depth", None, 4, 1, None), + 32998: ("tile_depth", None, 4, 1, None), + 33432: ("copyright", None, 1, None, None), + 33445: ("md_file_tag", None, 4, 1, None), + 33446: ("md_scale_pixel", None, 5, 1, None), + 33447: ("md_color_table", None, 3, None, None), + 33448: ("md_lab_name", None, 2, None, None), + 33449: ("md_sample_info", None, 2, None, None), + 33450: ("md_prep_date", None, 2, None, None), + 33451: ("md_prep_time", None, 2, None, None), + 33452: ("md_file_units", None, 2, None, None), + 33550: ("model_pixel_scale", None, 12, 3, None), + 33922: ("model_tie_point", None, 12, None, None), + 37510: ("user_comment", None, None, None, None), + 34665: ("exif_ifd", None, None, 1, None), + 34735: ("geo_key_directory", None, 3, None, None), + 34736: ("geo_double_params", None, 12, None, None), + 34737: ("geo_ascii_params", None, 2, None, None), + 34853: ("gps_ifd", None, None, 1, None), + 42112: ("gdal_metadata", None, 2, None, None), + 42113: ("gdal_nodata", None, 2, None, None), + 50838: ("imagej_byte_counts", None, None, None, None), + 50289: ("mc_xy_position", None, 12, 2, None), + 50290: ("mc_z_position", None, 12, 1, None), + 50291: ("mc_xy_calibration", None, 12, 3, None), + 50292: ("mc_lens_lem_na_n", None, 12, 3, None), + 50293: ("mc_channel_name", None, 1, None, None), + 50294: ("mc_ex_wavelength", None, 12, 1, None), + 50295: ("mc_time_stamp", None, 12, 1, None), + 65200: ("flex_xml", None, 2, None, None), # code: (attribute name, default value, type, count, validator) } # Map custom TIFF tag codes to attribute names and import functions CUSTOM_TAGS = { - 700: ('xmp', read_bytes), - 34377: ('photoshop', read_numpy), - 33723: ('iptc', read_bytes), - 34675: ('icc_profile', read_numpy), - 33628: ('mm_uic1', read_mm_uic1), - 33629: ('mm_uic2', read_mm_uic2), - 33630: ('mm_uic3', read_mm_uic3), - 33631: ('mm_uic4', read_mm_uic4), - 34361: ('mm_header', read_mm_header), - 34362: ('mm_stamp', read_mm_stamp), - 34386: ('mm_user_block', read_bytes), - 34412: ('cz_lsm_info', read_cz_lsm_info), - 43314: ('nih_image_header', read_nih_image_header), + 700: ("xmp", read_bytes), + 34377: ("photoshop", read_numpy), + 33723: ("iptc", read_bytes), + 34675: ("icc_profile", read_numpy), + 33628: ("mm_uic1", read_mm_uic1), + 33629: ("mm_uic2", read_mm_uic2), + 33630: ("mm_uic3", read_mm_uic3), + 33631: ("mm_uic4", read_mm_uic4), + 34361: ("mm_header", read_mm_header), + 34362: ("mm_stamp", read_mm_stamp), + 34386: ("mm_user_block", read_bytes), + 34412: ("cz_lsm_info", read_cz_lsm_info), + 43314: ("nih_image_header", read_nih_image_header), # 40001: ('mc_ipwinscal', read_bytes), - 40100: ('mc_id_old', read_bytes), - 50288: ('mc_id', read_bytes), - 50296: ('mc_frame_properties', read_bytes), - 50839: ('imagej_metadata', read_bytes), - 51123: ('micromanager_metadata', read_json), + 40100: ("mc_id_old", read_bytes), + 50288: ("mc_id", read_bytes), + 50296: ("mc_frame_properties", read_bytes), + 50839: ("imagej_metadata", read_bytes), + 51123: ("micromanager_metadata", read_json), } # Max line length of printed output PRINT_LINE_LEN = 79 -def imshow(data, title=None, vmin=0, vmax=None, cmap=None, - bitspersample=None, photometric='rgb', interpolation='nearest', - dpi=96, figure=None, subplot=111, maxdim=8192, **kwargs): +def imshow( + data, + title=None, + vmin=0, + vmax=None, + cmap=None, + bitspersample=None, + photometric="rgb", + interpolation="nearest", + dpi=96, + figure=None, + subplot=111, + maxdim=8192, + **kwargs, +): """Plot n-dimensional images using matplotlib.pyplot. Return figure, subplot and plot axis. @@ -3305,14 +3619,13 @@ def imshow(data, title=None, vmin=0, vmax=None, cmap=None, maximum image size in any dimension. kwargs : optional Arguments for matplotlib.pyplot.imshow. - """ - #if photometric not in ('miniswhite', 'minisblack', 'rgb', 'palette'): + # if photometric not in ('miniswhite', 'minisblack', 'rgb', 'palette'): # raise ValueError("Can't handle %s photometrics" % photometric) # TODO: handle photometric == 'separated' (CMYK) - isrgb = photometric in ('rgb', 'palette') + isrgb = photometric in ("rgb", "palette") data = numpy.atleast_2d(data.squeeze()) - data = data[(slice(0, maxdim), ) * len(data.shape)] + data = data[(slice(0, maxdim),) * len(data.shape)] dims = data.ndim if dims < 2: @@ -3330,12 +3643,12 @@ def imshow(data, title=None, vmin=0, vmax=None, cmap=None, isrgb = isrgb and data.shape[-1] in (3, 4) dims -= 3 if isrgb else 2 - if photometric == 'palette' and isrgb: + if photometric == "palette" and isrgb: datamax = data.max() if datamax > 255: data >>= 8 # possible precision loss - data = data.astype('B') - elif data.dtype.kind in 'ui': + data = data.astype("B") + elif data.dtype.kind in "ui": if not (isrgb and data.dtype.itemsize <= 1) or bitspersample is None: try: bitspersample = int(math.ceil(math.log(data.max(), 2))) @@ -3350,28 +3663,28 @@ def imshow(data, title=None, vmin=0, vmax=None, cmap=None, data <<= 8 - bitspersample elif bitspersample > 8: data >>= bitspersample - 8 # precision loss - data = data.astype('B') - elif data.dtype.kind == 'f': + data = data.astype("B") + elif data.dtype.kind == "f": datamax = data.max() if isrgb and datamax > 1.0: - if data.dtype.char == 'd': - data = data.astype('f') + if data.dtype.char == "d": + data = data.astype("f") data /= datamax - elif data.dtype.kind == 'b': + elif data.dtype.kind == "b": datamax = 1 - elif data.dtype.kind == 'c': + elif data.dtype.kind == "c": raise NotImplementedError("complex type") # TODO: handle complex types if not isrgb: if vmax is None: vmax = datamax if vmin is None: - if data.dtype.kind == 'i': + if data.dtype.kind == "i": dtmin = numpy.iinfo(data.dtype).min vmin = numpy.min(data) if vmin == dtmin: vmin = numpy.min(data > dtmin) - if data.dtype.kind == 'f': + if data.dtype.kind == "f": dtmin = numpy.finfo(data.dtype).min vmin = numpy.min(data) if vmin == dtmin: @@ -3379,37 +3692,54 @@ def imshow(data, title=None, vmin=0, vmax=None, cmap=None, else: vmin = 0 - pyplot = sys.modules['matplotlib.pyplot'] + pyplot = sys.modules["matplotlib.pyplot"] if figure is None: - pyplot.rc('font', family='sans-serif', weight='normal', size=8) - figure = pyplot.figure(dpi=dpi, figsize=(10.3, 6.3), frameon=True, - facecolor='1.0', edgecolor='w') + pyplot.rc("font", family="sans-serif", weight="normal", size=8) + figure = pyplot.figure( + dpi=dpi, + figsize=(10.3, 6.3), + frameon=True, + facecolor="1.0", + edgecolor="w", + ) try: figure.canvas.manager.window.title(title) except Exception: pass - pyplot.subplots_adjust(bottom=0.03*(dims+2), top=0.9, - left=0.1, right=0.95, hspace=0.05, wspace=0.0) + pyplot.subplots_adjust( + bottom=0.03 * (dims + 2), + top=0.9, + left=0.1, + right=0.95, + hspace=0.05, + wspace=0.0, + ) subplot = pyplot.subplot(subplot) if title: try: - title = unicode(title, 'Windows-1252') + title = unicode(title, "Windows-1252") except TypeError: pass pyplot.title(title, size=11) if cmap is None: - if data.dtype.kind in 'ub' and vmin == 0: - cmap = 'gray' + if data.dtype.kind in "ub" and vmin == 0: + cmap = "gray" else: - cmap = 'coolwarm' - if photometric == 'miniswhite': - cmap += '_r' - - image = pyplot.imshow(data[(0, ) * dims].squeeze(), vmin=vmin, vmax=vmax, - cmap=cmap, interpolation=interpolation, **kwargs) + cmap = "coolwarm" + if photometric == "miniswhite": + cmap += "_r" + + image = pyplot.imshow( + data[(0,) * dims].squeeze(), + vmin=vmin, + vmax=vmax, + cmap=cmap, + interpolation=interpolation, + **kwargs, + ) if not isrgb: pyplot.colorbar() # panchor=(0.55, 0.5), fraction=0.05 @@ -3420,8 +3750,12 @@ def format_coord(x, y): y = int(y + 0.5) try: if dims: - return "%s @ %s [%4i, %4i]" % (cur_ax_dat[1][y, x], - current, x, y) + return "%s @ %s [%4i, %4i]" % ( + cur_ax_dat[1][y, x], + current, + x, + y, + ) else: return "%s @ [%4i, %4i]" % (data[y, x], x, y) except IndexError: @@ -3430,12 +3764,20 @@ def format_coord(x, y): pyplot.gca().format_coord = format_coord if dims: - current = list((0, ) * dims) + current = list((0,) * dims) cur_ax_dat = [0, data[tuple(current)].squeeze()] - sliders = [pyplot.Slider( - pyplot.axes([0.125, 0.03*(axis+1), 0.725, 0.025]), - 'Dimension %i' % axis, 0, data.shape[axis]-1, 0, facecolor='0.5', - valfmt='%%.0f [%i]' % data.shape[axis]) for axis in range(dims)] + sliders = [ + pyplot.Slider( + pyplot.axes([0.125, 0.03 * (axis + 1), 0.725, 0.025]), + "Dimension %i" % axis, + 0, + data.shape[axis] - 1, + 0, + facecolor="0.5", + valfmt="%%.0f [%i]" % data.shape[axis], + ) + for axis in range(dims) + ] for slider in sliders: slider.drawon = False @@ -3466,22 +3808,22 @@ def on_keypressed(event, data=data, current=current): # callback function for key press event key = event.key axis = cur_ax_dat[0] - if str(key) in '0123456789': + if str(key) in "0123456789": on_changed(key, axis) - elif key == 'right': + elif key == "right": on_changed(current[axis] + 1, axis) - elif key == 'left': + elif key == "left": on_changed(current[axis] - 1, axis) - elif key == 'up': - cur_ax_dat[0] = 0 if axis == len(data.shape)-1 else axis + 1 - elif key == 'down': - cur_ax_dat[0] = len(data.shape)-1 if axis == 0 else axis - 1 - elif key == 'end': + elif key == "up": + cur_ax_dat[0] = 0 if axis == len(data.shape) - 1 else axis + 1 + elif key == "down": + cur_ax_dat[0] = len(data.shape) - 1 if axis == 0 else axis - 1 + elif key == "end": on_changed(data.shape[axis] - 1, axis) - elif key == 'home': + elif key == "home": on_changed(0, axis) - figure.canvas.mpl_connect('key_press_event', on_keypressed) + figure.canvas.mpl_connect("key_press_event", on_keypressed) for axis, ctrl in enumerate(sliders): ctrl.on_changed(lambda k, a=axis: on_changed(k, a)) @@ -3489,8 +3831,11 @@ def on_keypressed(event, data=data, current=current): def _app_show(): - """Block the GUI. For use as skimage plugin.""" - pyplot = sys.modules['matplotlib.pyplot'] + """Block the GUI. + + For use as skimage plugin. + """ + pyplot = sys.modules["matplotlib.pyplot"] pyplot.show() @@ -3505,38 +3850,85 @@ def main(argv=None): import optparse - search_doc = lambda r, d: re.search(r, __doc__).group(1) if __doc__ else d + def search_doc(r, d): + return re.search(r, __doc__).group(1) if __doc__ else d + parser = optparse.OptionParser( usage="usage: %prog [options] path", - description=search_doc("\n\n([^|]*?)\n\n", ''), - version="%%prog %s" % search_doc(":Version: (.*)", "Unknown")) + description=search_doc("\n\n([^|]*?)\n\n", ""), + version="%%prog %s" % search_doc(":Version: (.*)", "Unknown"), + ) opt = parser.add_option - opt('-p', '--page', dest='page', type='int', default=-1, - help="display single page") - opt('-s', '--series', dest='series', type='int', default=-1, - help="display series of pages of same shape") - opt('--nomultifile', dest='nomultifile', action='store_true', - default=False, help="don't read OME series from multiple files") - opt('--noplot', dest='noplot', action='store_true', default=False, - help="don't display images") - opt('--interpol', dest='interpol', metavar='INTERPOL', default='bilinear', - help="image interpolation method") - opt('--dpi', dest='dpi', type='int', default=96, - help="set plot resolution") - opt('--debug', dest='debug', action='store_true', default=False, - help="raise exception on failures") - opt('--test', dest='test', action='store_true', default=False, - help="try read all images in path") - opt('--doctest', dest='doctest', action='store_true', default=False, - help="runs the internal tests") - opt('-v', '--verbose', dest='verbose', action='store_true', default=True) - opt('-q', '--quiet', dest='verbose', action='store_false') + opt( + "-p", + "--page", + dest="page", + type="int", + default=-1, + help="display single page", + ) + opt( + "-s", + "--series", + dest="series", + type="int", + default=-1, + help="display series of pages of same shape", + ) + opt( + "--nomultifile", + dest="nomultifile", + action="store_true", + default=False, + help="don't read OME series from multiple files", + ) + opt( + "--noplot", + dest="noplot", + action="store_true", + default=False, + help="don't display images", + ) + opt( + "--interpol", + dest="interpol", + metavar="INTERPOL", + default="bilinear", + help="image interpolation method", + ) + opt( + "--dpi", dest="dpi", type="int", default=96, help="set plot resolution" + ) + opt( + "--debug", + dest="debug", + action="store_true", + default=False, + help="raise exception on failures", + ) + opt( + "--test", + dest="test", + action="store_true", + default=False, + help="try read all images in path", + ) + opt( + "--doctest", + dest="doctest", + action="store_true", + default=False, + help="runs the internal tests", + ) + opt("-v", "--verbose", dest="verbose", action="store_true", default=True) + opt("-q", "--quiet", dest="verbose", action="store_false") settings, path = parser.parse_args() - path = ' '.join(path) + path = " ".join(path) if settings.doctest: import doctest + doctest.testmod() return 0 if not path: @@ -3545,16 +3937,16 @@ def main(argv=None): test_tifffile(path, settings.verbose) return 0 - if any(i in path for i in '?*'): + if any(i in path for i in "?*"): path = glob.glob(path) if not path: - print('no files match the pattern') + print("no files match the pattern") return 0 # TODO: handle image sequences - #if len(path) == 1: + # if len(path) == 1: path = path[0] - print("Reading file structure...", end=' ') + print("Reading file structure...", end=" ") start = time.time() try: tif = TiffFile(path, multifile=not settings.nomultifile) @@ -3564,39 +3956,45 @@ def main(argv=None): else: print("\n", e) sys.exit(0) - print("%.3f ms" % ((time.time()-start) * 1e3)) + print("%.3f ms" % ((time.time() - start) * 1e3)) if tif.is_ome: settings.norgb = True images = [(None, tif[0 if settings.page < 0 else settings.page])] if not settings.noplot: - print("Reading image data... ", end=' ') + print("Reading image data... ", end=" ") def notnone(x): return next(i for i in x if i is not None) + start = time.time() try: if settings.page >= 0: - images = [(tif.asarray(key=settings.page), - tif[settings.page])] + images = [(tif.asarray(key=settings.page), tif[settings.page])] elif settings.series >= 0: - images = [(tif.asarray(series=settings.series), - notnone(tif.series[settings.series].pages))] + images = [ + ( + tif.asarray(series=settings.series), + notnone(tif.series[settings.series].pages), + ) + ] else: images = [] for i, s in enumerate(tif.series): try: images.append( - (tif.asarray(series=i), notnone(s.pages))) + (tif.asarray(series=i), notnone(s.pages)) + ) except ValueError as e: images.append((None, notnone(s.pages))) if settings.debug: raise else: - print("\n* series %i failed: %s... " % (i, e), - end='') - print("%.3f ms" % ((time.time()-start) * 1e3)) + print( + "\n* series %i failed: %s... " % (i, e), end="" + ) + print("%.3f ms" % ((time.time() - start) * 1e3)) except Exception as e: if settings.debug: raise @@ -3608,7 +4006,7 @@ def notnone(x): print("\nTIFF file:", tif) print() for i, s in enumerate(tif.series): - print ("Series %i" % i) + print("Series %i" % i) print(s) print() for i, page in images: @@ -3616,20 +4014,27 @@ def notnone(x): print(page.tags) if page.is_palette: print("\nColor Map:", page.color_map.shape, page.color_map.dtype) - for attr in ('cz_lsm_info', 'cz_lsm_scan_information', 'mm_uic_tags', - 'mm_header', 'imagej_tags', 'micromanager_metadata', - 'nih_image_header'): + for attr in ( + "cz_lsm_info", + "cz_lsm_scan_information", + "mm_uic_tags", + "mm_header", + "imagej_tags", + "micromanager_metadata", + "nih_image_header", + ): if hasattr(page, attr): print("", attr.upper(), Record(getattr(page, attr)), sep="\n") print() if page.is_micromanager: - print('MICROMANAGER_FILE_METADATA') + print("MICROMANAGER_FILE_METADATA") print(Record(tif.micromanager_metadata)) if images and not settings.noplot: try: import matplotlib - matplotlib.use('TkAgg') + + matplotlib.use("TkAgg") from matplotlib import pyplot except ImportError as e: warnings.warn("failed to import matplotlib.\n%s" % e) @@ -3638,23 +4043,28 @@ def notnone(x): if img is None: continue vmin, vmax = None, None - if 'gdal_nodata' in page.tags: + if "gdal_nodata" in page.tags: vmin = numpy.min(img[img > float(page.gdal_nodata)]) if page.is_stk: try: - vmin = page.mm_uic_tags['min_scale'] - vmax = page.mm_uic_tags['max_scale'] + vmin = page.mm_uic_tags["min_scale"] + vmax = page.mm_uic_tags["max_scale"] except KeyError: pass else: if vmax <= vmin: vmin, vmax = None, None title = "%s\n %s" % (str(tif), str(page)) - imshow(img, title=title, vmin=vmin, vmax=vmax, - bitspersample=page.bits_per_sample, - photometric=page.photometric, - interpolation=settings.interpol, - dpi=settings.dpi) + imshow( + img, + title=title, + vmin=vmin, + vmax=vmax, + bitspersample=page.bits_per_sample, + photometric=page.photometric, + interpolation=settings.interpol, + dpi=settings.dpi, + ) pyplot.show() @@ -3665,4 +4075,4 @@ def notnone(x): unicode = str if __name__ == "__main__": - sys.exit(main()) \ No newline at end of file + sys.exit(main()) diff --git a/src/diffpy/srxplanar/version.py b/src/diffpy/srxplanar/version.py new file mode 100644 index 0000000..3523646 --- /dev/null +++ b/src/diffpy/srxplanar/version.py @@ -0,0 +1,26 @@ +#!/usr/bin/env python +############################################################################## +# +# (c) 2010-2025 The Trustees of Columbia University in the City of New York. +# All rights reserved. +# +# File coded by: Xiaohao Yang, Simon Billinge, Billinge Group members. +# +# See GitHub contributions for a more detailed list of contributors. +# https://github.com/diffpy/diffpy.srxplanar/graphs/contributors # noqa: E501 +# +# See LICENSE.rst for license information. +# +############################################################################## +"""Definition of __version__.""" + +# We do not use the other three variables, but can be added back if needed. +# __all__ = ["__date__", "__git_commit__", "__timestamp__", "__version__"] + +# obtain version information +from importlib.metadata import PackageNotFoundError, version + +try: + __version__ = version("diffpy.srxplanar") +except PackageNotFoundError: + __version__ = "unknown" diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..e3b6313 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,19 @@ +import json +from pathlib import Path + +import pytest + + +@pytest.fixture +def user_filesystem(tmp_path): + base_dir = Path(tmp_path) + home_dir = base_dir / "home_dir" + home_dir.mkdir(parents=True, exist_ok=True) + cwd_dir = base_dir / "cwd_dir" + cwd_dir.mkdir(parents=True, exist_ok=True) + + home_config_data = {"username": "home_username", "email": "home@email.com"} + with open(home_dir / "diffpyconfig.json", "w") as f: + json.dump(home_config_data, f) + + yield tmp_path diff --git a/tests/test_version.py b/tests/test_version.py new file mode 100644 index 0000000..999ff52 --- /dev/null +++ b/tests/test_version.py @@ -0,0 +1,10 @@ +"""Unit tests for __version__.py.""" + +import diffpy.srxplanar + + +def test_package_version(): + """Ensure the package version is defined and not set to the initial + placeholder.""" + assert hasattr(diffpy.srxplanar, "__version__") + assert diffpy.srxplanar.__version__ != "0.0.0"