diff --git a/.ansible-lint b/.ansible-lint new file mode 100644 index 0000000..0e80b05 --- /dev/null +++ b/.ansible-lint @@ -0,0 +1,22 @@ +--- +# See https://ansible-lint.readthedocs.io/en/latest/configuring.html +# for a list of the configuration elements that can exist in this +# file. +enable_list: + # Useful checks that one must opt-into. See here for more details: + # https://ansible-lint.readthedocs.io/en/latest/rules.html + - fcqn-builtins + - no-log-password + - no-same-owner +exclude_paths: + # This exclusion is implicit, unless exclude_paths is defined + - .cache + # Seems wise to ignore this too + - .github +kinds: + # This will force our systemd specific molecule configurations to be treated + # as plain yaml files by ansible-lint. This mirrors the default kind + # configuration in ansible-lint for molecule configurations: + # yaml: "**/molecule/*/{base,molecule}.{yaml,yml}" + - yaml: "**/molecule/*/molecule-{no,with}-systemd.yml" +use_default_rules: true diff --git a/.bandit.yml b/.bandit.yml new file mode 100644 index 0000000..0b53a96 --- /dev/null +++ b/.bandit.yml @@ -0,0 +1,14 @@ +--- +# Configuration file for the Bandit python security scanner +# https://bandit.readthedocs.io/en/latest/config.html +# This config is applied to bandit when scanning the "tests" tree + +# Tests are first included by `tests`, and then excluded by `skips`. +# If `tests` is empty, all tests are are considered included. + +tests: +# - B101 +# - B102 + +skips: + - B101 # skip "assert used" check since assertions are required in pytests diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..e5f4366 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,12 @@ +# This is the configuration for code coverage checks +# https://coverage.readthedocs.io/en/latest/config.html + +[run] +source = src/trustymail +omit = +branch = true + +[report] +exclude_lines = + if __name__ == "__main__": +show_missing = true diff --git a/.flake8 b/.flake8 index 0f60d3e..92ff826 100644 --- a/.flake8 +++ b/.flake8 @@ -1,25 +1,25 @@ -[flake8] -max-line-length = 80 -# Select (turn on) -# * Complexity violations reported by mccabe (C) - -# http://flake8.pycqa.org/en/latest/user/error-codes.html#error-violation-codes -# * Documentation conventions compliance reported by pydocstyle (D) - -# http://www.pydocstyle.org/en/stable/error_codes.html -# * Default errors and warnings reported by pycodestyle (E and W) - -# https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes -# * Default errors reported by pyflakes (F) - -# http://flake8.pycqa.org/en/latest/glossary.html#term-pyflakes -# * Default warnings reported by flake8-bugbear (B) - -# https://github.com/PyCQA/flake8-bugbear#list-of-warnings -# * The B950 flake8-bugbear opinionated warning - -# https://github.com/PyCQA/flake8-bugbear#opinionated-warnings -select = C,D,E,F,W,B,B950 -# Ignore flake8's default warning about maximum line length, which has -# a hard stop at the configured value. Instead we use -# flake8-bugbear's B950, which allows up to 10% overage. -# -# Also ignore flake8's warning about line breaks before binary -# operators. It no longer agrees with PEP8. See, for example, here: -# https://github.com/ambv/black/issues/21. Guido agrees here: -# https://github.com/python/peps/commit/c59c4376ad233a62ca4b3a6060c81368bd21e85b. -ignore = E501,W503 +[flake8] +max-line-length = 80 +# Select (turn on) +# * Complexity violations reported by mccabe (C) - +# http://flake8.pycqa.org/en/latest/user/error-codes.html#error-violation-codes +# * Documentation conventions compliance reported by pydocstyle (D) - +# http://www.pydocstyle.org/en/stable/error_codes.html +# * Default errors and warnings reported by pycodestyle (E and W) - +# https://pycodestyle.readthedocs.io/en/latest/intro.html#error-codes +# * Default errors reported by pyflakes (F) - +# http://flake8.pycqa.org/en/latest/glossary.html#term-pyflakes +# * Default warnings reported by flake8-bugbear (B) - +# https://github.com/PyCQA/flake8-bugbear#list-of-warnings +# * The B950 flake8-bugbear opinionated warning - +# https://github.com/PyCQA/flake8-bugbear#opinionated-warnings +select = C,D,E,F,W,B,B950 +# Ignore flake8's default warning about maximum line length, which has +# a hard stop at the configured value. Instead we use +# flake8-bugbear's B950, which allows up to 10% overage. +# +# Also ignore flake8's warning about line breaks before binary +# operators. It no longer agrees with PEP8. See, for example, here: +# https://github.com/ambv/black/issues/21. Guido agrees here: +# https://github.com/python/peps/commit/c59c4376ad233a62ca4b3a6060c81368bd21e85b. +ignore = E501,W503 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..371258c --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1,10 @@ +# Each line is a file pattern followed by one or more owners. + +# These owners will be the default owners for everything in the +# repo. Unless a later match takes precedence, these owners will be +# requested for review when someone opens a pull request. +* @dav3r @felddy @jsf9k @mcdonnnj + +# These folks own any files in the .github directory at the root of +# the repository and any of its subdirectories. +/.github/ @dav3r @felddy @jsf9k @mcdonnnj diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..f8e8d44 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,34 @@ +--- + +# Any ignore directives should be uncommented in downstream projects to disable +# Dependabot updates for the given dependency. Downstream projects will get +# these updates when the pull request(s) in the appropriate skeleton are merged +# and Lineage processes these changes. + +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + ignore: + # Managed by cisagov/skeleton-generic + - dependency-name: actions/cache + - dependency-name: actions/checkout + - dependency-name: actions/setup-go + - dependency-name: actions/setup-python + - dependency-name: hashicorp/setup-terraform + - dependency-name: mxschmitt/action-tmate + # Managed by cisagov/skeleton-python-library + - dependency-name: actions/download-artifact + - dependency-name: actions/upload-artifact + + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "weekly" + + - package-ecosystem: "terraform" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/labels.yml b/.github/labels.yml new file mode 100644 index 0000000..7cdfd79 --- /dev/null +++ b/.github/labels.yml @@ -0,0 +1,70 @@ +--- +# Rather than breaking up descriptions into multiline strings we disable that +# specific rule in yamllint for this file. +# yamllint disable rule:line-length +- color: "eb6420" + description: This issue or pull request is awaiting the outcome of another issue or pull request + name: blocked +- color: "000000" + description: This issue or pull request involves changes to existing functionality + name: breaking change +- color: "d73a4a" + description: This issue or pull request addresses broken functionality + name: bug +- color: "07648d" + description: This issue will be advertised on code.gov's Open Tasks page (https://code.gov/open-tasks) + name: code.gov +- color: "0366d6" + description: Pull requests that update a dependency file + name: dependencies +- color: "5319e7" + description: This issue or pull request improves or adds to documentation + name: documentation +- color: "cfd3d7" + description: This issue or pull request already exists or is covered in another issue or pull request + name: duplicate +- color: "b005bc" + description: A high-level objective issue encompassing multiple issues instead of a specific unit of work + name: epic +- color: "000000" + description: Pull requests that update GitHub Actions code + name: github-actions +- color: "0e8a16" + description: This issue or pull request is well-defined and good for newcomers + name: good first issue +- color: "ff7518" + description: Pull request that should count toward Hacktoberfest participation + name: hacktoberfest-accepted +- color: "a2eeef" + description: This issue or pull request will add or improve functionality, maintainability, or ease of use + name: improvement +- color: "fef2c0" + description: This issue or pull request is not applicable, incorrect, or obsolete + name: invalid +- color: "ce099a" + description: This pull request is ready to merge during the next Lineage Kraken release + name: kraken 🐙 +- color: "a4fc5d" + description: This issue or pull request requires further information + name: need info +- color: "fcdb45" + description: This pull request is awaiting an action or decision to move forward + name: on hold +- color: "3772a4" + description: Pull requests that update Python code + name: python +- color: "ef476c" + description: This issue is a request for information or needs discussion + name: question +- color: "00008b" + description: This issue or pull request adds or otherwise modifies test code + name: test +- color: "1d76db" + description: This issue or pull request pulls in upstream updates + name: upstream update +- color: "d4c5f9" + description: This issue or pull request increments the version number + name: version bump +- color: "ffffff" + description: This issue will not be incorporated + name: wontfix diff --git a/.github/lineage.yml b/.github/lineage.yml new file mode 100644 index 0000000..df04737 --- /dev/null +++ b/.github/lineage.yml @@ -0,0 +1,5 @@ +--- +lineage: + skeleton: + remote-url: https://github.com/cisagov/skeleton-python-library.git +version: '1' diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000..baa8c3b --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,304 @@ +--- +name: build + +on: + push: + pull_request: + repository_dispatch: + types: [apb] + +env: + CURL_CACHE_DIR: ~/.cache/curl + PIP_CACHE_DIR: ~/.cache/pip + PRE_COMMIT_CACHE_DIR: ~/.cache/pre-commit + RUN_TMATE: ${{ secrets.RUN_TMATE }} + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - id: setup-env + uses: cisagov/setup-env-github-action@develop + - uses: actions/checkout@v3 + - id: setup-python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + # We need the Go version and Go cache location for the actions/cache step, + # so the Go installation must happen before that. + - id: setup-go + uses: actions/setup-go@v3 + with: + go-version: "1.19" + - name: Lookup Go cache directory + id: go-cache + run: | + echo "dir=$(go env GOCACHE)" >> $GITHUB_OUTPUT + - uses: actions/cache@v3 + env: + BASE_CACHE_KEY: "${{ github.job }}-${{ runner.os }}-\ + py${{ steps.setup-python.outputs.python-version }}-\ + go${{ steps.setup-go.outputs.go-version }}-\ + packer${{ steps.setup-env.outputs.packer-version }}-\ + tf${{ steps.setup-env.outputs.terraform-version }}-" + with: + # Note that the .terraform directory IS NOT included in the + # cache because if we were caching, then we would need to use + # the `-upgrade=true` option. This option blindly pulls down the + # latest modules and providers instead of checking to see if an + # update is required. That behavior defeats the benefits of caching. + # so there is no point in doing it for the .terraform directory. + path: | + ${{ env.PIP_CACHE_DIR }} + ${{ env.PRE_COMMIT_CACHE_DIR }} + ${{ env.CURL_CACHE_DIR }} + ${{ steps.go-cache.outputs.dir }} + # We do not use '**/setup.py' in the cache key so only the 'setup.py' + # file in the root of the repository is used. This is in case a Python + # package were to have a 'setup.py' as part of its internal codebase. + key: "${{ env.BASE_CACHE_KEY }}\ + ${{ hashFiles('**/requirements-test.txt') }}-\ + ${{ hashFiles('**/requirements.txt') }}-\ + ${{ hashFiles('**/.pre-commit-config.yaml') }}-\ + ${{ hashFiles('setup.py') }}" + restore-keys: | + ${{ env.BASE_CACHE_KEY }} + - name: Setup curl cache + run: mkdir -p ${{ env.CURL_CACHE_DIR }} + - name: Install Packer + env: + PACKER_VERSION: ${{ steps.setup-env.outputs.packer-version }} + run: | + PACKER_ZIP="packer_${PACKER_VERSION}_linux_amd64.zip" + curl --output ${{ env.CURL_CACHE_DIR }}/"${PACKER_ZIP}" \ + --time-cond ${{ env.CURL_CACHE_DIR }}/"${PACKER_ZIP}" \ + --location \ + "https://releases.hashicorp.com/packer/${PACKER_VERSION}/${PACKER_ZIP}" + sudo unzip -d /opt/packer \ + ${{ env.CURL_CACHE_DIR }}/"${PACKER_ZIP}" + sudo mv /usr/local/bin/packer /usr/local/bin/packer-default + sudo ln -s /opt/packer/packer /usr/local/bin/packer + - uses: hashicorp/setup-terraform@v2 + with: + terraform_version: ${{ steps.setup-env.outputs.terraform-version }} + - name: Install shfmt + env: + PACKAGE_URL: mvdan.cc/sh/v3/cmd/shfmt + PACKAGE_VERSION: ${{ steps.setup-env.outputs.shfmt-version }} + run: go install ${PACKAGE_URL}@${PACKAGE_VERSION} + - name: Install Terraform-docs + env: + PACKAGE_URL: github.com/terraform-docs/terraform-docs + PACKAGE_VERSION: ${{ steps.setup-env.outputs.terraform-docs-version }} + run: go install ${PACKAGE_URL}@${PACKAGE_VERSION} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install --upgrade --requirement requirements-test.txt + - name: Set up pre-commit hook environments + run: pre-commit install-hooks + - name: Run pre-commit on all files + run: pre-commit run --all-files + - name: Setup tmate debug session + uses: mxschmitt/action-tmate@v3 + if: env.RUN_TMATE + test: + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + python-version: + - "3.7" + - "3.8" + - "3.9" + - "3.10" + - "3.11" + include: + - os: ubuntu-20.04 + python-version: "3.6" + steps: + - uses: actions/checkout@v3 + - id: setup-python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - uses: actions/cache@v3 + env: + BASE_CACHE_KEY: "${{ github.job }}-${{ runner.os }}-\ + py${{ steps.setup-python.outputs.python-version }}-" + with: + path: ${{ env.PIP_CACHE_DIR }} + # We do not use '**/setup.py' in the cache key so only the 'setup.py' + # file in the root of the repository is used. This is in case a Python + # package were to have a 'setup.py' as part of its internal codebase. + key: "${{ env.BASE_CACHE_KEY }}\ + ${{ hashFiles('**/requirements-test.txt') }}-\ + ${{ hashFiles('**/requirements.txt') }}-\ + ${{ hashFiles('setup.py') }}" + restore-keys: | + ${{ env.BASE_CACHE_KEY }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install --upgrade --requirement requirements-test.txt + - name: Run tests + env: + RELEASE_TAG: ${{ github.event.release.tag_name }} + run: pytest + - name: Upload coverage report + run: coveralls + env: + COVERALLS_FLAG_NAME: "py${{ matrix.python-version }}" + COVERALLS_PARALLEL: true + COVERALLS_SERVICE_NAME: github + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + if: success() + - name: Setup tmate debug session + uses: mxschmitt/action-tmate@v3 + if: env.RUN_TMATE + coveralls-finish: + runs-on: ubuntu-latest + needs: + - test + steps: + - uses: actions/checkout@v3 + - id: setup-python + uses: actions/setup-python@v4 + with: + python-version: "3.10" + - uses: actions/cache@v3 + env: + BASE_CACHE_KEY: "${{ github.job }}-${{ runner.os }}-\ + py${{ steps.setup-python.outputs.python-version }}-" + with: + path: ${{ env.PIP_CACHE_DIR }} + # We do not use '**/setup.py' in the cache key so only the 'setup.py' + # file in the root of the repository is used. This is in case a Python + # package were to have a 'setup.py' as part of its internal codebase. + key: "${{ env.BASE_CACHE_KEY }}\ + ${{ hashFiles('**/requirements-test.txt') }}-\ + ${{ hashFiles('**/requirements.txt') }}-\ + ${{ hashFiles('setup.py') }}" + restore-keys: | + ${{ env.BASE_CACHE_KEY }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install --upgrade --requirement requirements-test.txt + - name: Finished coveralls reports + run: coveralls --finish + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Setup tmate debug session + uses: mxschmitt/action-tmate@v3 + if: env.RUN_TMATE + build: + runs-on: ${{ matrix.os }} + needs: + - lint + - test + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + python-version: + - "3.7" + - "3.8" + - "3.9" + - "3.10" + - "3.11" + include: + - os: ubuntu-20.04 + python-version: "3.6" + steps: + - uses: actions/checkout@v3 + - id: setup-python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - uses: actions/cache@v3 + env: + BASE_CACHE_KEY: "${{ github.job }}-${{ runner.os }}-\ + py${{ steps.setup-python.outputs.python-version }}-" + with: + path: ${{ env.PIP_CACHE_DIR }} + # We do not use '**/setup.py' in the cache key so only the 'setup.py' + # file in the root of the repository is used. This is in case a Python + # package were to have a 'setup.py' as part of its internal codebase. + key: "${{ env.BASE_CACHE_KEY }}\ + ${{ hashFiles('**/requirements.txt') }}-\ + ${{ hashFiles('setup.py') }}" + restore-keys: | + ${{ env.BASE_CACHE_KEY }} + - name: Install build dependencies + run: | + python -m pip install --upgrade pip setuptools wheel + python -m pip install --upgrade build + - name: Build artifacts + run: python -m build + - name: Upload artifacts + uses: actions/upload-artifact@v3 + with: + name: dist-${{ matrix.python-version }} + path: dist + - name: Setup tmate debug session + uses: mxschmitt/action-tmate@v3 + if: env.RUN_TMATE + test-build: + runs-on: ${{ matrix.os }} + needs: + - build + strategy: + fail-fast: false + matrix: + os: + - ubuntu-latest + python-version: + - "3.7" + - "3.8" + - "3.9" + - "3.10" + - "3.11" + include: + - os: ubuntu-20.04 + python-version: "3.6" + steps: + - uses: actions/checkout@v3 + - id: setup-python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + - uses: actions/cache@v3 + env: + BASE_CACHE_KEY: "${{ github.job }}-${{ runner.os }}-\ + py${{ steps.setup-python.outputs.python-version }}-" + with: + path: ${{ env.PIP_CACHE_DIR }} + # We do not use '**/setup.py' in the cache key so only the 'setup.py' + # file in the root of the repository is used. This is in case a Python + # package were to have a 'setup.py' as part of its internal codebase. + key: "${{ env.BASE_CACHE_KEY }}\ + ${{ hashFiles('**/requirements.txt') }}-\ + ${{ hashFiles('setup.py') }}" + restore-keys: | + ${{ env.BASE_CACHE_KEY }} + - name: Retrieve the built wheel + uses: actions/download-artifact@v3 + with: + name: dist-${{ matrix.python-version }} + - name: Install testing dependencies + run: | + python -m pip install --upgrade pip setuptools wheel + python -m pip install --upgrade pytest pytest-cov + - name: Install the built wheel (there should only be one) + run: python -m pip install *.whl + - name: Run tests + env: + RELEASE_TAG: ${{ github.event.release.tag_name }} + run: pytest + - name: Setup tmate debug session + uses: mxschmitt/action-tmate@v3 + if: env.RUN_TMATE diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 0000000..5e344d1 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,67 @@ +--- +# For most projects, this workflow file will not need changing; you simply need +# to commit it to your repository. +# +# You may wish to alter this file to override the set of languages analyzed, +# or to provide custom queries or build logic. +name: "CodeQL" + +on: + push: + # Dependabot triggered push events have read-only access, but uploading code + # scanning requires write access. + branches-ignore: + - dependabot/** + pull_request: + # The branches below must be a subset of the branches above + branches: + - develop + schedule: + - cron: '0 14 * * 6' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + # required for all workflows + security-events: write + strategy: + fail-fast: false + matrix: + # Override automatic language detection by changing the below list + # Supported options are go, javascript, csharp, python, cpp, and java + language: + - python + # Learn more... + # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + + # Autobuild attempts to build any compiled languages (C/C++, C#, or + # Java). If this step fails, then you should remove it and run the build + # manually (see below). + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + # ℹī¸ Command-line programs to run using the OS shell. + # 📚 https://git.io/JvXDl + + # ✏ī¸ If the Autobuild fails above, remove it and uncomment the following + # three lines and modify them (or add more) to build your code if your + # project uses a compiled language + + # - run: | + # make bootstrap + # make release + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/sync-labels.yml b/.github/workflows/sync-labels.yml new file mode 100644 index 0000000..ae7c091 --- /dev/null +++ b/.github/workflows/sync-labels.yml @@ -0,0 +1,29 @@ +--- +name: sync-labels + +on: + push: + paths: + - '.github/labels.yml' + - '.github/workflows/sync-labels.yml' + +permissions: + contents: read + +jobs: + labeler: + permissions: + # actions/checkout needs this to fetch code + contents: read + # crazy-max/ghaction-github-labeler needs this to manage repository labels + issues: write + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - name: Sync repository labels + if: success() + uses: crazy-max/ghaction-github-labeler@v4 + with: + # This is a hideous ternary equivalent so we only do a dry run unless + # this workflow is triggered by the develop branch. + dry-run: ${{ github.ref_name == 'develop' && 'false' || 'true' }} diff --git a/.gitignore b/.gitignore index 02d609e..242b4aa 100644 --- a/.gitignore +++ b/.gitignore @@ -1,104 +1,12 @@ -*.xlsx -*.csv -todo.txt -*.json -*.sqlite +# This file specifies intentionally untracked files that Git should ignore. +# Files already tracked by Git are not affected. +# See: https://git-scm.com/docs/gitignore -.cache/ -cache/ - -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -env/ -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -*.egg-info/ -.installed.cfg -*.egg - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ +## Python ## +__pycache__ .coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*,cover -.hypothesis/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -target/ - -# IPython Notebook -.ipynb_checkpoints - -# pyenv +.mypy_cache +.pytest_cache .python-version - -# celery beat schedule file -celerybeat-schedule - -# dotenv -.env - -# virtualenv -venv/ -ENV/ - -# Spyder project settings -.spyderproject - -# Rope project settings -.ropeproject - -# PyCharm project settings -.idea/ - -# Cached Public Suffix List -*.dat +*.egg-info +dist diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 0000000..46d45f3 --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,10 @@ +[settings] +combine_star=true +force_sort_within_sections=true + +import_heading_stdlib=Standard Python Libraries +import_heading_thirdparty=Third-Party Libraries +import_heading_firstparty=cisagov Libraries + +# Run isort under the black profile to align with our other Python linting +profile=black diff --git a/.mdl_config.yaml b/.mdl_config.yaml new file mode 100644 index 0000000..4a650c1 --- /dev/null +++ b/.mdl_config.yaml @@ -0,0 +1,60 @@ +--- + +# Default state for all rules +default: true + +# MD003/heading-style/header-style - Heading style +MD003: + # Enforce the ATX-closed style of header + style: "atx_closed" + +# MD004/ul-style - Unordered list style +MD004: + # Enforce dashes for unordered lists + style: "dash" + +# MD013/line-length - Line length +MD013: + # Do not enforce for code blocks + code_blocks: false + # Do not enforce for tables + tables: false + +# MD024/no-duplicate-heading/no-duplicate-header - Multiple headings with the +# same content +MD024: + # Allow headers with the same content as long as they are not in the same + # parent heading + allow_different_nesting: true + +# MD029/ol-prefix - Ordered list item prefix +MD029: + # Enforce the `1.` style for ordered lists + style: "one" + +# MD033/no-inline-html - Inline HTML +MD033: + # The h1 and img elements are allowed to permit header images + allowed_elements: + - h1 + - img + +# MD035/hr-style - Horizontal rule style +MD035: + # Enforce dashes for horizontal rules + style: "---" + +# MD046/code-block-style - Code block style +MD046: + # Enforce the fenced style for code blocks + style: "fenced" + +# MD049/emphasis-style - Emphasis style should be consistent +MD049: + # Enforce asterisks as the style to use for emphasis + style: "asterisk" + +# MD050/strong-style - Strong style should be consistent +MD050: + # Enforce asterisks as the style to use for strong + style: "asterisk" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..12b0024 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,152 @@ +--- +default_language_version: + # force all unspecified python hooks to run python3 + python: python3 + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.3.0 + hooks: + - id: check-case-conflict + - id: check-executables-have-shebangs + - id: check-json + - id: check-merge-conflict + - id: check-toml + - id: check-xml + - id: debug-statements + - id: detect-aws-credentials + args: + - --allow-missing-credentials + - id: detect-private-key + - id: end-of-file-fixer + exclude: files/(issue|motd) + - id: mixed-line-ending + args: + - --fix=lf + - id: pretty-format-json + args: + - --autofix + - id: requirements-txt-fixer + - id: trailing-whitespace + + # Text file hooks + - repo: https://github.com/igorshubovych/markdownlint-cli + rev: v0.32.2 + hooks: + - id: markdownlint + args: + - --config=.mdl_config.yaml + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v3.0.0-alpha.4 + hooks: + - id: prettier + - repo: https://github.com/adrienverge/yamllint + rev: v1.28.0 + hooks: + - id: yamllint + args: + - --strict + + # GitHub Actions hooks + - repo: https://github.com/python-jsonschema/check-jsonschema + rev: 0.18.4 + hooks: + - id: check-github-actions + - id: check-github-workflows + + # pre-commit hooks + - repo: https://github.com/pre-commit/pre-commit + rev: v2.20.0 + hooks: + - id: validate_manifest + + # Shell script hooks + - repo: https://github.com/cisagov/pre-commit-shfmt + rev: v0.0.2 + hooks: + - id: shfmt + args: + # Indent by two spaces + - -i + - '2' + # Binary operators may start a line + - -bn + # Switch cases are indented + - -ci + # Redirect operators are followed by a space + - -sr + - repo: https://github.com/detailyang/pre-commit-shell + rev: 1.0.5 + hooks: + - id: shell-lint + + # Python hooks + # Run bandit on the "tests" tree with a configuration + - repo: https://github.com/PyCQA/bandit + rev: 1.7.4 + hooks: + - id: bandit + name: bandit (tests tree) + files: tests + args: + - --config=.bandit.yml + # Run bandit on everything except the "tests" tree + - repo: https://github.com/PyCQA/bandit + rev: 1.7.4 + hooks: + - id: bandit + name: bandit (everything else) + exclude: tests + - repo: https://github.com/psf/black + rev: 22.10.0 + hooks: + - id: black + - repo: https://github.com/PyCQA/flake8 + rev: 5.0.4 + hooks: + - id: flake8 + additional_dependencies: + - flake8-docstrings + - repo: https://github.com/PyCQA/isort + rev: 5.10.1 + hooks: + - id: isort + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v0.990 + hooks: + - id: mypy + additional_dependencies: + - types-docopt + - types-requests + - types-setuptools + - repo: https://github.com/asottile/pyupgrade + rev: v3.2.0 + hooks: + - id: pyupgrade + + # Ansible hooks + - repo: https://github.com/ansible-community/ansible-lint + rev: v5.4.0 + hooks: + - id: ansible-lint + # files: molecule/default/playbook.yml + + # Terraform hooks + - repo: https://github.com/antonbabenko/pre-commit-terraform + rev: v1.76.0 + hooks: + - id: terraform_fmt + - id: terraform_validate + + # Docker hooks + - repo: https://github.com/IamTheFij/docker-pre-commit + rev: v2.1.1 + hooks: + - id: docker-compose-check + + # Packer hooks + - repo: https://github.com/cisagov/pre-commit-packer + rev: v0.0.2 + hooks: + - id: packer_validate + - id: packer_fmt diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 0000000..738d402 --- /dev/null +++ b/.prettierignore @@ -0,0 +1,7 @@ +# Already being linted by pretty-format-json +*.json +# Already being linted by mdl +*.md +# Already being linted by yamllint +*.yaml +*.yml diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 08510f1..0000000 --- a/.travis.yml +++ /dev/null @@ -1,48 +0,0 @@ -language: python - -dist: xenial - -services: - - docker - -env: - global: - - IMAGE_NAME=dhsncats/trustymail - - DOCKER_USER=jsf9k - - secure: "IAbtmubLq2hL71aollQEfoV+t9Zbqn4rNVqi5YNerqxvXr6WiDzwmYUujOCnQiHli8xkIU0J8OSPX0aA4cOTxPGiZeNguGuVWmR2ZCB8SMyjbKJOEIpLZv/jG1Be6dVXiJwKwQM3yX4pqPfLIuYkE6S1GAodudPzcJ8xM/h1qzJijknJEqsCJQw43zSHZ/epYulgWcmnCAmaoehZTCjVcW4l8iyVHSNfgO7gu5iWC9y4AIIX96E9TZkSDeud1yqUcORMwfdOl2pHhDh3KnF8HZSOCetTHP7JacLUMJiiCpKN34Xn9RQgjbVKOrF/rcC8WEDrWKn14SFfgUP0dl6lCr6P9HE4aHKvrH/nCyflFOANbEKMywE8DzNA3zd7MC0HQkyb40LAiBSbTD58myUsc+WlmaZyvpJ7akukbmVjVPQWNvs1laz/bqyLdPh4WMqgOTbP7BrgDeIrCVedwdvhqq9KEaoak8RBs/Wb8LisI6j+vAY/HKlVnuIXRL0RNGTko96kBluEpAsWDjls39Hmu5hl1glbqCWDW9+dE0/Zx4MuSt4OVAywMW+lzxA16SdqDLCZqGv00vnPpxuBAvEEQbVtsmQ6lR+fwTYKFEDZM7axRwBlLzIDEzSoz4K0fSq3EuU4mkoVZKcHGnuHuRqaNhce0zzZx3lHdNTm7oa7b0U=" - -python: - - 3.6 - - 3.7 - - 3.8 - -before_install: - - sudo apt-get install -y shellcheck - -install: - - pip install flake8 pytest-cov pytest coveralls - - pip install --editable . - -script: - - shellcheck bump_version.sh tag.sh travis_scripts/*.sh - - pytest --cov=trustymail - - flake8 . - # - bash travis_scripts/build_docker_image.sh - -after_success: - - coveralls - -deploy: - - provider: pypi - user: jsf9k - password: - secure: a1ZOoVl8PVCqWVuqO9v9Y4CZqrNepir9m+MfEBgQuFZrc45zGPmHMbAIfHOETLuEozBx1Dn5gTtssitMMpJmDia8sNkXh+yBuTOlO0zCL9iGIfH/Ul4Z38LshKuSKIbQnO3aedDSo05iP+7L4/ae1zna29E75vLeU3nwgJH6lLAOB0g5tYZPiul/F7UDFBONgsFb0VnM7S3L6e7yUZh0CZ61KARPjKFGqOzXShJWVhoTW3PUn43AzsgUXM8DY2BJX740ks4YOcTIBpbq1+g7KWrL2XZD0vws0vtbqm+HL045fgshOqAzeINf2D5G/yh72YBULlthXCU0pqk4C83///5WL/3PRFv/EYoPnfRq5/Kcmg2qYd5YvrLh1gEz9Aq97+cd1pXXhJh0lK/bNrP7q7/+X1+37eVTriZ1g2FVazBpLjM7JAOT1SZ8HqHm0TInMDSAlQ8aAn2Yke4IziG2v6v9OuOdsEk0yZNI1pEZNVrvS3cXXEe1M+a5VZGOfluqFOLY/xuD6f3tLPHLdSr9Ujw4Fb4naGyIYY4/dCa3K9ATlCk9CyDoqQ+LNm8jOrDdyCALK9S40waoBgMgYjddO+Jb1gn3GtIaH34CIzjSvxry41oACCYMEP3ej3lJ44ZAfOefT22suW9c4ryX/2QS/Hq+k7fAwgM+Q2z69Ysbac4= - distributions: sdist bdist_wheel - on: - tags: true - python: '3.8' - # - provider: script - # script: bash travis_scripts/deploy_to_docker_hub.sh - # on: - # tags: true - # python: '3.6' diff --git a/.yamllint b/.yamllint new file mode 100644 index 0000000..2a119a6 --- /dev/null +++ b/.yamllint @@ -0,0 +1,23 @@ +--- +extends: default + +rules: + # yamllint does not like it when you comment out different parts of + # dictionaries in a list. You can see + # https://github.com/adrienverge/yamllint/issues/384 for some examples of + # this behavior. + comments-indentation: disable + + # yamllint does not allow inline mappings that exceed the line length by + # default. There are many scenarios where the inline mapping may be a key, + # hash, or other long value that would exceed the line length but cannot + # reasonably be broken across lines. + line-length: + # This rule implies the allow-non-breakable-words rule + allow-non-breakable-inline-mappings: true + # Allows a 10% overage from the default limit of 80 + max: 88 + + # yamllint doesn't like when we use yes and no for true and false, + # but that's pretty standard in Ansible. + truthy: disable diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..1176f1d --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,176 @@ +# Welcome # + +We're so glad you're thinking about contributing to this open source +project! If you're unsure or afraid of anything, just ask or submit +the issue or pull request anyway. The worst that can happen is that +you'll be politely asked to change something. We appreciate any sort +of contribution, and don't want a wall of rules to get in the way of +that. + +Before contributing, we encourage you to read our CONTRIBUTING policy +(you are here), our [LICENSE](LICENSE), and our [README](README.md), +all of which should be in this repository. + +## Issues ## + +If you want to report a bug or request a new feature, the most direct +method is to [create an +issue](https://github.com/cisagov/trustymail/issues) in +this repository. We recommend that you first search through existing +issues (both open and closed) to check if your particular issue has +already been reported. If it has then you might want to add a comment +to the existing issue. If it hasn't then feel free to create a new +one. + +## Pull requests ## + +If you choose to [submit a pull +request](https://github.com/cisagov/trustymail/pulls), +you will notice that our continuous integration (CI) system runs a +fairly extensive set of linters, syntax checkers, system, and unit tests. +Your pull request may fail these checks, and that's OK. If you want +you can stop there and wait for us to make the necessary corrections +to ensure your code passes the CI checks. + +If you want to make the changes yourself, or if you want to become a +regular contributor, then you will want to set up +[pre-commit](https://pre-commit.com/) on your local machine. Once you +do that, the CI checks will run locally before you even write your +commit message. This speeds up your development cycle considerably. + +### Setting up pre-commit ### + +There are a few ways to do this, but we prefer to use +[`pyenv`](https://github.com/pyenv/pyenv) and +[`pyenv-virtualenv`](https://github.com/pyenv/pyenv-virtualenv) to +create and manage a Python virtual environment specific to this +project. + +If you already have `pyenv` and `pyenv-virtualenv` configured you can +take advantage of the `setup-env` tool in this repo to automate the +entire environment configuration process. + +```console +./setup-env +``` + +Otherwise, follow the steps below to manually configure your +environment. + +#### Installing and using `pyenv` and `pyenv-virtualenv` #### + +On the Mac, we recommend installing [brew](https://brew.sh/). Then +installation is as simple as `brew install pyenv pyenv-virtualenv` and +adding this to your profile: + +```bash +export PYENV_ROOT="$HOME/.pyenv" +export PATH="$PYENV_ROOT/bin:$PATH" +eval "$(pyenv init --path)" +eval "$(pyenv init -)" +eval "$(pyenv virtualenv-init -)" +``` + +For Linux, Windows Subsystem for Linux (WSL), or on the Mac (if you +don't want to use `brew`) you can use +[pyenv/pyenv-installer](https://github.com/pyenv/pyenv-installer) to +install the necessary tools. Before running this ensure that you have +installed the prerequisites for your platform according to the +[`pyenv` wiki +page](https://github.com/pyenv/pyenv/wiki/common-build-problems). + +On WSL you should treat your platform as whatever Linux distribution +you've chosen to install. + +Once you have installed `pyenv` you will need to add the following +lines to your `.bash_profile` (or `.profile`): + +```bash +export PYENV_ROOT="$HOME/.pyenv" +export PATH="$PYENV_ROOT/bin:$PATH" +eval "$(pyenv init --path)" +``` + +and then add the following lines to your `.bashrc`: + +```bash +eval "$(pyenv init -)" +eval "$(pyenv virtualenv-init -)" +``` + +If you want more information about setting up `pyenv` once installed, please run + +```console +pyenv init +``` + +and + +```console +pyenv virtualenv-init +``` + +for the current configuration instructions. + +If you are using a shell other than `bash` you should follow the +instructions that the `pyenv-installer` script outputs. + +You will need to reload your shell for these changes to take effect so +you can begin to use `pyenv`. + +For a list of Python versions that are already installed and ready to +use with `pyenv`, use the command `pyenv versions`. To see a list of +the Python versions available to be installed and used with `pyenv` +use the command `pyenv install --list`. You can read more +[here](https://github.com/pyenv/pyenv/blob/master/COMMANDS.md) about +the many things that `pyenv` can do. See +[here](https://github.com/pyenv/pyenv-virtualenv#usage) for the +additional capabilities that pyenv-virtualenv adds to the `pyenv` +command. + +#### Creating the Python virtual environment #### + +Once `pyenv` and `pyenv-virtualenv` are installed on your system, you +can create and configure the Python virtual environment with these +commands: + +```console +cd trustymail +pyenv virtualenv trustymail +pyenv local trustymail +pip install --requirement requirements-dev.txt +``` + +#### Installing the pre-commit hook #### + +Now setting up pre-commit is as simple as: + +```console +pre-commit install +``` + +At this point the pre-commit checks will run against any files that +you attempt to commit. If you want to run the checks against the +entire repo, just execute `pre-commit run --all-files`. + +### Running unit and system tests ### + +In addition to the pre-commit checks the CI system will run the suite +of unit and system tests that are included with this project. To run +these tests locally execute `pytest` from the root of the project. + +We encourage any updates to these tests to improve the overall code +coverage. If your pull request adds new functionality we would +appreciate it if you extend existing test cases, or add new ones to +exercise the newly added code. + +## Public domain ## + +This project is in the public domain within the United States, and +copyright and related rights in the work worldwide are waived through +the [CC0 1.0 Universal public domain +dedication](https://creativecommons.org/publicdomain/zero/1.0/). + +All contributions to this project will be released under the CC0 +dedication. By submitting a pull request, you are agreeing to comply +with this waiver of copyright interest. diff --git a/Dockerfile b/Dockerfile index bba529c..e51f6e1 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,11 +6,7 @@ RUN pip install --no-cache-dir --upgrade pip setuptools wheel COPY requirements.txt . -RUN pip install --no-cache-dir --requirement requirements.txt - -COPY scripts/ scripts/ - -COPY trustymail/ trustymail/ +COPY src/trustymail/ src/trustymail/ COPY README.md . @@ -18,7 +14,7 @@ COPY requirements-dev.txt . COPY setup.py . -RUN pip install --editable . +RUN pip install --no-cache-dir --requirement requirements.txt ENTRYPOINT ["trustymail"] CMD ["--help"] diff --git a/LICENSE.md b/LICENSE similarity index 100% rename from LICENSE.md rename to LICENSE diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 4772034..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -import README.md -import LICENSE.md diff --git a/README.md b/README.md index 39c3b6f..f0eccb2 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,16 @@ # Trustworthy Mail # [![Latest Version](https://img.shields.io/pypi/v/trustymail.svg)](https://pypi.org/project/trustymail/) -[![Build Status](https://travis-ci.com/cisagov/trustymail.svg?branch=develop)](https://travis-ci.com/cisagov/trustymail) +[![GitHub Build Status](https://github.com/cisagov/trustymail/workflows/build/badge.svg)](https://github.com/cisagov/trustymail/actions) +[![CodeQL](https://github.com/cisagov/trustymail/workflows/CodeQL/badge.svg)](https://github.com/cisagov/trustymail/actions/workflows/codeql-analysis.yml) [![Coverage Status](https://coveralls.io/repos/github/cisagov/trustymail/badge.svg?branch=develop)](https://coveralls.io/github/cisagov/trustymail?branch=develop) -[![Total alerts](https://img.shields.io/lgtm/alerts/g/cisagov/trustymail.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/cisagov/trustymail/alerts/) -[![Language grade: Python](https://img.shields.io/lgtm/grade/python/g/cisagov/trustymail.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/cisagov/trustymail/context:python) +[![Known Vulnerabilities](https://snyk.io/test/github/cisagov/trustymail/develop/badge.svg)](https://snyk.io/test/github/cisagov/trustymail) `trustymail` is a tool that evaluates SPF/DMARC records set in a domain's DNS. It also checks the mail servers listed in a domain's MX records for STARTTLS support. It saves its results to CSV or JSON. -## Getting Started ## +## Getting started ## `trustymail` requires **Python 3.6+**. Python 2 is not supported. @@ -46,7 +46,7 @@ Then run the CLI: python scripts/trustymail [options] example.com ``` -### Using Docker (optional) +### Using Docker (optional) ### ```console ./run [opts] @@ -112,58 +112,59 @@ output will always be written to disk, defaulting to `results.csv`. ## What's Checked? ## For a given domain, MX records, SPF records (TXT), DMARC (TXT, at -\_dmarc.), and support for STARTTLS are checked. Resource records can also be checked for DNSSEC if the resolver used is DNSSEC-aware. +`_dmarc.`), and support for STARTTLS are checked. Resource records can +also be checked for DNSSEC if the resolver used is DNSSEC-aware. The following values are returned in `results.csv`: ### Domain and redirect info ### -* `Domain` - The domain you're scanning! -* `Base Domain` - The base domain of `Domain`. For example, for a +- `Domain` - The domain you're scanning! +- `Base Domain` - The base domain of `Domain`. For example, for a Domain of `sub.example.gov`, the Base Domain will be `example.gov`. Usually this is the second-level domain, but `trustymail` will download and factor in the [Public Suffix List](https://publicsuffix.org) when calculating the base domain. -* `Live` - The domain is actually published in the DNS. +- `Live` - The domain is actually published in the DNS. ### Mail sending ### -* `MX Record` - If an MX record was found that contains at least a +- `MX Record` - If an MX record was found that contains at least a single mail server. -* `MX Record DNSSEC` - A boolean value indicating whether or not the +- `MX Record DNSSEC` - A boolean value indicating whether or not the DNS record is protected by DNSSEC. -* `Mail Servers` - The list of hosts found in the MX record. -* `Mail Server Ports Tested` - A list of the ports tested for SMTP and +- `Mail Servers` - The list of hosts found in the MX record. +- `Mail Server Ports Tested` - A list of the ports tested for SMTP and STARTTLS support. -* `Domain Supports SMTP` - True if and only if __any__ mail servers +- `Domain Supports SMTP` - True if and only if **any** mail servers specified in a MX record associated with the domain supports SMTP. -* `Domain Supports SMTP Results` - A list of the mail server and port +- `Domain Supports SMTP Results` - A list of the mail server and port combinations that support SMTP. -* `Domain Supports STARTTLS` - True if and only if __all__ mail +- `Domain Supports STARTTLS` - True if and only if **all** mail servers that support SMTP also support STARTTLS. -* `Domain Supports STARTTLS Results` - A list of the mail server and +- `Domain Supports STARTTLS Results` - A list of the mail server and port combinations that support STARTTLS. ### SPF ### -* `SPF Record` - Whether or not a SPF record was found. -* `SPF Record DNSSEC` - A boolean value indicating whether or not the +- `SPF Record` - Whether or not a SPF record was found. +- `SPF Record DNSSEC` - A boolean value indicating whether or not the DNS record is protected by DNSSEC. -* `Valid SPF` - Whether the SPF record found is syntactically correct, +- `Valid SPF` - Whether the SPF record found is syntactically correct, per RFC 4408. -* `SPF Results` - The textual representation of any SPF record found +- `SPF Results` - The textual representation of any SPF record found for the domain. ### DMARC ### -* `DMARC Record` - True/False whether or not a DMARC record was found. -* `DMARC Record DNSSEC` - A boolean value indicating whether or not +- `DMARC Record` - True/False whether or not a DMARC record was found. +- `DMARC Record DNSSEC` - A boolean value indicating whether or not the DNS record is protected by DNSSEC. -* `Valid DMARC` - Whether the DMARC record found is syntactically +- `Valid DMARC` - Whether the DMARC record found is syntactically correct. -* `DMARC Results` - The DMARC record that was discovered when querying +- `DMARC Results` - The DMARC record that was discovered when querying DNS. -* `DMARC Record on Base Domain`, `DMARC Record on Base Domain DNSSEC`, +- `DMARC Record on Base Domain`, `DMARC Record on Base Domain DNSSEC`, `Valid DMARC Record on Base Domain`, `DMARC Results on Base Domain` - Same definition as above, but returns the result for the Base Domain. This is important in DMARC because if there isn't a @@ -171,47 +172,43 @@ The following values are returned in `results.csv`: Domain", per [RFC 7489](https://tools.ietf.org/html/rfc7489#section-6.6.3)), is checked and applied. -* `DMARC Policy` - An adjudication, based on any policies found in +- `DMARC Policy` - An adjudication, based on any policies found in `DMARC Results` and `DMARC Results on Base Domain`, of the relevant DMARC policy that applies. -* `DMARC Subdomain Policy` - An adjudication, based on any policies +- `DMARC Subdomain Policy` - An adjudication, based on any policies found in `DMARC Results` and `DMARC Results on Base Domain`, of the relevant DMARC subdomain policy that applies. -* `DMARC Policy Percentage` - The percentage of mail that should be +- `DMARC Policy Percentage` - The percentage of mail that should be subjected to the `DMARC Policy` according to the `DMARC Results`. -* `DMARC Aggregate Report URIs` - A list of the DMARC aggregate report +- `DMARC Aggregate Report URIs` - A list of the DMARC aggregate report URIs specified by the domain. -* `DMARC Forensic Report URIs` - A list of the DMARC forensic report +- `DMARC Forensic Report URIs` - A list of the DMARC forensic report URIs specified by the domain. -* `DMARC Has Aggregate Report URI` - A boolean value that indicates if +- `DMARC Has Aggregate Report URI` - A boolean value that indicates if `DMARC Results` included `rua` URIs that tell recipients where to send DMARC aggregate reports. -* `DMARC Has Forensic Report URI` - A boolean value that indicates if +- `DMARC Has Forensic Report URI` - A boolean value that indicates if `DMARC Results` included `ruf` URIs that tell recipients where to send DMARC forensic reports. -* `DMARC Reporting Address Acceptance Error` - A boolean value that is +- `DMARC Reporting Address Acceptance Error` - A boolean value that is True if one or more of the domains listed in the aggregate and forensic report URIs does not indicate that it accepts DMARC reports from the domain being tested. -### Etc. ### +### Everything else ### -* `Syntax Errors` - A list of syntax errors that were encountered when +- `Syntax Errors` - A list of syntax errors that were encountered when analyzing SPF records. -* `Debug Info` - A list of any other warnings or errors encountered, +- `Debug Info` - A list of any other warnings or errors encountered, such as DNS failures. These can be helpful when determining how `trustymail` reached its conclusions, and are indispensible for bug reports. -## Public domain ## +## Contributing ## -This project is in the worldwide [public domain](LICENSE.md). +We welcome contributions! Please see [`CONTRIBUTING.md`](CONTRIBUTING.md) for +details. -This project is in the public domain within the United States, and -copyright and related rights in the work worldwide are waived through -the [CC0 1.0 Universal public domain -dedication](https://creativecommons.org/publicdomain/zero/1.0/). +## License ## -All contributions to this project will be released under the CC0 -dedication. By submitting a pull request, you are agreeing to comply -with this waiver of copyright interest. +This project is in the worldwide [public domain](LICENSE). diff --git a/bump_version.sh b/bump_version.sh index 573be27..c9b613b 100755 --- a/bump_version.sh +++ b/bump_version.sh @@ -6,46 +6,48 @@ set -o nounset set -o errexit set -o pipefail -VERSION_FILE=trustymail/__init__.py +VERSION_FILE=src/trustymail/_version.py HELP_INFORMATION="bump_version.sh (show|major|minor|patch|prerelease|build|finalize)" old_version=$(sed -n "s/^__version__ = \"\(.*\)\"$/\1/p" $VERSION_FILE) +# Comment out periods so they are interpreted as periods and don't +# just match any character +old_version_regex=${old_version//\./\\\.} -if [ $# -ne 1 ] -then - echo "$HELP_INFORMATION" +if [ $# -ne 1 ]; then + echo "$HELP_INFORMATION" else - case $1 in - major|minor|patch|prerelease|build) - new_version=$(python -c "import semver; print(semver.bump_$1('$old_version'))") - echo Changing version from "$old_version" to "$new_version" - # A temp file is used to provide compatability with macOS development - # as a result of macOS using the BSD version of sed - tmp_file=/tmp/version.$$ - sed "s/$old_version/$new_version/" $VERSION_FILE > $tmp_file - mv $tmp_file $VERSION_FILE - git add $VERSION_FILE - git commit -m"Bump version from $old_version to $new_version" - git push - ;; - finalize) - new_version=$(python -c "import semver; print(semver.finalize_version('$old_version'))") - echo Changing version from "$old_version" to "$new_version" - # A temp file is used to provide compatability with macOS development - # as a result of macOS using the BSD version of sed - tmp_file=/tmp/version.$$ - sed "s/$old_version/$new_version/" $VERSION_FILE > $tmp_file - mv $tmp_file $VERSION_FILE - git add $VERSION_FILE - git commit -m"Bump version from $old_version to $new_version" - git push - ;; - show) - echo "$old_version" - ;; - *) - echo "$HELP_INFORMATION" - ;; - esac + case $1 in + major | minor | patch | prerelease | build) + new_version=$(python -c "import semver; print(semver.bump_$1('$old_version'))") + echo Changing version from "$old_version" to "$new_version" + # A temp file is used to provide compatability with macOS development + # as a result of macOS using the BSD version of sed + tmp_file=/tmp/version.$$ + sed "s/$old_version_regex/$new_version/" $VERSION_FILE > $tmp_file + mv $tmp_file $VERSION_FILE + git add $VERSION_FILE + git commit -m"Bump version from $old_version to $new_version" + git push + ;; + finalize) + new_version=$(python -c "import semver; print(semver.finalize_version('$old_version'))") + echo Changing version from "$old_version" to "$new_version" + # A temp file is used to provide compatability with macOS development + # as a result of macOS using the BSD version of sed + tmp_file=/tmp/version.$$ + sed "s/$old_version_regex/$new_version/" $VERSION_FILE > $tmp_file + mv $tmp_file $VERSION_FILE + git add $VERSION_FILE + git commit -m"Finalize version from $old_version to $new_version" + git push + ;; + show) + echo "$old_version" + ;; + *) + echo "$HELP_INFORMATION" + ;; + esac fi diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..ed958e0 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +addopts = -v -ra --cov diff --git a/requirements-dev.txt b/requirements-dev.txt index 633f37e..1d7e302 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,7 +1,4 @@ --r requirements.txt - -check-manifest>=0.36 -pytest>=3.5.0 -semver>=2.7.9 -tox>=3.0.0 -wheel>=0.31.0 +--requirement requirements-test.txt +ipython +mypy +semver diff --git a/requirements-test.txt b/requirements-test.txt new file mode 100644 index 0000000..1d0a5a4 --- /dev/null +++ b/requirements-test.txt @@ -0,0 +1,2 @@ +--editable .[test] +--requirement requirements.txt diff --git a/requirements.txt b/requirements.txt index fd95735..8b75fe9 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,3 @@ -dnspython>=1.15.0 -docopt>=0.6.2 -publicsuffix>=1.1.0 -py3dns>=3.1.0 -pyspf==2.0.11 -requests>=2.18.4 +# Note: Add any additional requirements to setup.py's install_requires field +--editable . +wheel diff --git a/setup-env b/setup-env new file mode 100755 index 0000000..3392f60 --- /dev/null +++ b/setup-env @@ -0,0 +1,193 @@ +#!/usr/bin/env bash + +set -o nounset +set -o errexit +set -o pipefail + +USAGE=$( + cat << 'END_OF_LINE' +Configure a development environment for this repository. + +It does the following: + - Verifies pyenv and pyenv-virtualenv are installed. + - Creates a Python virtual environment. + - Configures the activation of the virtual enviroment for the repo directory. + - Installs the requirements needed for development (including mypy type stubs). + - Installs git pre-commit hooks. + - Configures git upstream remote "lineage" repositories. + +Usage: + setup-env [options] [virt_env_name] + setup-env (-h | --help) + +Options: + -f --force Delete virtual enviroment if it already exists. + -h --help Show this message. + -i --install-hooks Install hook environments for all environments in the + pre-commit config file. + +END_OF_LINE +) + +# Flag to force deletion and creation of virtual environment +FORCE=0 + +# Positional parameters +PARAMS="" + +# Parse command line arguments +while (("$#")); do + case "$1" in + -f | --force) + FORCE=1 + shift + ;; + -h | --help) + echo "${USAGE}" + exit 0 + ;; + -i | --install-hooks) + INSTALL_HOOKS=1 + shift + ;; + -*) # unsupported flags + echo "Error: Unsupported flag $1" >&2 + exit 1 + ;; + *) # preserve positional arguments + PARAMS="$PARAMS $1" + shift + ;; + esac +done + +# set positional arguments in their proper place +eval set -- "$PARAMS" + +# Check to see if pyenv is installed +if [ -z "$(command -v pyenv)" ] || [ -z "$(command -v pyenv-virtualenv)" ]; then + echo "pyenv and pyenv-virtualenv are required." + if [[ "$OSTYPE" == "darwin"* ]]; then + cat << 'END_OF_LINE' + + On the Mac, we recommend installing brew, https://brew.sh/. Then installation + is as simple as `brew install pyenv pyenv-virtualenv` and adding this to your + profile: + + eval "$(pyenv init -)" + eval "$(pyenv virtualenv-init -)" + +END_OF_LINE + + fi + cat << 'END_OF_LINE' + For Linux, Windows Subsystem for Linux (WSL), or on the Mac (if you don't want + to use "brew") you can use https://github.com/pyenv/pyenv-installer to install + the necessary tools. Before running this ensure that you have installed the + prerequisites for your platform according to the pyenv wiki page, + https://github.com/pyenv/pyenv/wiki/common-build-problems. + + On WSL you should treat your platform as whatever Linux distribution you've + chosen to install. + + Once you have installed "pyenv" you will need to add the following lines to + your ".bashrc": + + export PATH="$PATH:$HOME/.pyenv/bin" + eval "$(pyenv init -)" + eval "$(pyenv virtualenv-init -)" +END_OF_LINE + exit 1 +fi + +set +o nounset +# Determine the virtual environment name +if [ "$1" ]; then + # Use the user-provided environment name + env_name=$1 +else + # Set the environment name to the last part of the working directory. + env_name=${PWD##*/} +fi +set -o nounset + +# Remove any lingering local configuration. +if [ $FORCE -ne 0 ]; then + rm -f .python-version + pyenv virtualenv-delete --force "${env_name}" || true +elif [[ -f .python-version ]]; then + cat << 'END_OF_LINE' + An existing .python-version file was found. Either remove this file yourself + or re-run with --force option to have it deleted along with the associated + virtual environment. + + rm .python-version + +END_OF_LINE + exit 1 +fi + +# Create a new virtual environment for this project +if ! pyenv virtualenv "${env_name}"; then + cat << END_OF_LINE + An existing virtual environment named $env_name was found. Either delete this + environment yourself or re-run with --force option to have it deleted. + + pyenv virtualenv-delete ${env_name} + +END_OF_LINE + exit 1 +fi + +# Set the local application-specific Python version(s) by writing the +# version name to a file named `.python-version'. +pyenv local "${env_name}" + +# Upgrade pip and friends +python3 -m pip install --upgrade pip setuptools wheel + +# Find a requirements file (if possible) and install +for req_file in "requirements-dev.txt" "requirements-test.txt" "requirements.txt"; do + if [[ -f $req_file ]]; then + pip install --requirement $req_file + break + fi +done + +# Install all necessary mypy type stubs +mypy --install-types src/ + +# Install git pre-commit hooks now or later. +pre-commit install ${INSTALL_HOOKS:+"--install-hooks"} + +# Setup git remotes from lineage configuration +# This could fail if the remotes are already setup, but that is ok. +set +o errexit + +eval "$( + python3 << 'END_OF_LINE' +from pathlib import Path +import yaml +import sys + +LINEAGE_CONFIG = Path(".github/lineage.yml") + +if not LINEAGE_CONFIG.exists(): + print("No lineage configuration found.", file=sys.stderr) + sys.exit(0) + +with LINEAGE_CONFIG.open("r") as f: + lineage = yaml.safe_load(stream=f) + +if lineage["version"] == "1": + for parent_name, v in lineage["lineage"].items(): + remote_url = v["remote-url"] + print(f"git remote add {parent_name} {remote_url};") + print(f"git remote set-url --push {parent_name} no_push;") +else: + print(f'Unsupported lineage version: {lineage["version"]}', file=sys.stderr) +END_OF_LINE +)" + +# Qapla +echo "Success!" diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 8cf3870..0000000 --- a/setup.cfg +++ /dev/null @@ -1,9 +0,0 @@ -[flake8] -ignore = E501,F999 -exclude = venv,.tox - -[bdist_wheel] -universal = true - -[metadata] -description-file = README.md diff --git a/setup.py b/setup.py index 00d2242..b296e90 100644 --- a/setup.py +++ b/setup.py @@ -1,41 +1,64 @@ """ -setup module for trustymail +This is the setup module for the trustymail project. Based on: -- https://github.com/cisagov/pshtt +- https://packaging.python.org/distributing/ +- https://github.com/pypa/sampleproject/blob/master/setup.py +- https://blog.ionelmc.ro/2014/05/25/python-packaging/#the-structure """ -from setuptools import setup -from trustymail import __version__ +# Standard Python Libraries +import codecs +from glob import glob +from os.path import abspath, basename, dirname, join, splitext + +# Third-Party Libraries +from setuptools import find_packages, setup def readme(): - with open("README.md") as f: + """Read in and return the contents of the project's README.md file.""" + with open("README.md", encoding="utf-8") as f: return f.read() -with open("requirements.txt") as fp: - reqs = [line.strip() for line in fp.readlines() if line] +# Below two methods were pulled from: +# https://packaging.python.org/guides/single-sourcing-package-version/ +def read(rel_path): + """Open a file for reading from a given relative path.""" + here = abspath(dirname(__file__)) + with codecs.open(join(here, rel_path), "r") as fp: + return fp.read() + -with open("requirements-dev.txt") as fp: - lines = [line.strip() for line in fp.readlines() if line] - dev_reqs = [line for line in lines if line and "-r requirements.txt" not in line] +def get_version(version_file): + """Extract a version number from the given file path.""" + for line in read(version_file).splitlines(): + if line.startswith("__version__"): + delim = '"' if '"' in line else "'" + return line.split(delim)[1] + raise RuntimeError("Unable to find version string.") setup( name="trustymail", - version=__version__, + # Versions should comply with PEP440 + version=get_version("src/trustymail/_version.py"), description="Scan domains and return data based on trustworthy email best practices", long_description=readme(), long_description_content_type="text/markdown", - # NCATS "homepage" - url="https://www.us-cert.gov/resources/ncats", - # The project's main homepage - download_url="https://github.com/cisagov/trustymail", + # Landing page for CISA's cybersecurity mission + url="https://www.cisa.gov/cybersecurity", + # Additional URLs for this project per + # https://packaging.python.org/guides/distributing-packages-using-setuptools/#project-urls + project_urls={ + "Source": "https://github.com/cisagov/trustymail", + "Tracker": "https://github.com/cisagov/trustymail/issues", + }, # Author details - author="Cyber and Infrastructure Security Agency", - author_email="ncats@hq.dhs.gov", + author="Cybersecurity and Infrastructure Security Agency", + author_email="github@cisa.dhs.gov", license="License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication", # See https://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ @@ -51,15 +74,45 @@ def readme(): # Specify the Python versions you support here. In particular, ensure # that you indicate whether you support Python 2, Python 3 or both. "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: Implementation :: CPython", ], python_requires=">=3.6", # What does your project relate to? - keywords="email authentication, STARTTLS", - packages=["trustymail"], - install_requires=reqs, - extras_require={"dev": dev_reqs}, - scripts=["scripts/trustymail"], + keywords="email authentication STARTTLS", + packages=find_packages(where="src"), + package_dir={"": "src"}, + py_modules=[splitext(basename(path))[0] for path in glob("src/*.py")], + install_requires=[ + "dnspython", + "docopt", + "publicsuffix", + "py3dns", + "pyspf", + "requests", + "setuptools >= 24.2.0", + ], + extras_require={ + "test": [ + "coverage", + # coveralls 1.11.0 added a service number for calls from + # GitHub Actions. This caused a regression which resulted in a 422 + # response from the coveralls API with the message: + # Unprocessable Entity for url: https://coveralls.io/api/v1/jobs + # 1.11.1 fixed this issue, but to ensure expected behavior we'll pin + # to never grab the regression version. + "coveralls != 1.11.0", + "pre-commit", + "pytest-cov", + "pytest", + ] + }, + # Conveniently allows one to run the CLI tool as `trustymail` + entry_points={"console_scripts": ["trustymail = trustymail.cli:main"]}, ) diff --git a/src/trustymail/__init__.py b/src/trustymail/__init__.py new file mode 100644 index 0000000..253ac61 --- /dev/null +++ b/src/trustymail/__init__.py @@ -0,0 +1,14 @@ +"""The trustymail library.""" +# Standard Python Libraries +from typing import List + +# We disable a Flake8 check for "Module imported but unused (F401)" here because +# although this import is not directly used, it populates the value +# package_name.__version__, which is used to get version information about this +# Python package. +from ._version import __version__ # noqa: F401 + +__all__: List[str] = [] + +PublicSuffixListFilename = "public_suffix_list.dat" +PublicSuffixListReadOnly = False diff --git a/src/trustymail/__main__.py b/src/trustymail/__main__.py new file mode 100644 index 0000000..c123d36 --- /dev/null +++ b/src/trustymail/__main__.py @@ -0,0 +1,5 @@ +"""Code to run if this package is used as a Python module.""" + +from .cli import main + +main() diff --git a/src/trustymail/_version.py b/src/trustymail/_version.py new file mode 100644 index 0000000..cbf21cc --- /dev/null +++ b/src/trustymail/_version.py @@ -0,0 +1,2 @@ +"""This file defines the version of this module.""" +__version__ = "0.8.0-rc.1" diff --git a/scripts/trustymail b/src/trustymail/cli.py old mode 100755 new mode 100644 similarity index 94% rename from scripts/trustymail rename to src/trustymail/cli.py index 248a498..71847f4 --- a/scripts/trustymail +++ b/src/trustymail/cli.py @@ -1,7 +1,5 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - """trustymail: A tool for scanning DNS mail records for evaluating security. + Usage: trustymail (INPUT ...) [options] trustymail (INPUT ...) [--output=OUTFILE] [--timeout=TIMEOUT] [--smtp-timeout=TIMEOUT] [--smtp-localhost=HOSTNAME] [--smtp-ports=PORTS] [--no-smtp-cache] [--mx] [--starttls] [--spf] [--dmarc] [--debug] [--json] [--dns=HOSTNAMES] [--psl-filename=FILENAME] [--psl-read-only] @@ -49,23 +47,27 @@ Notes: If no scan type options are specified, all are run against a given domain/input. """ +# Standard Python Libraries # Built-in imports import errno import logging import os +# Third-Party Libraries # Dependency imports import docopt # Local imports -import trustymail +from . import trustymail +from ._version import __version__ # The default ports to be checked to see if an SMTP server is listening. _DEFAULT_SMTP_PORTS = {25, 465, 587} def main(): - args = docopt.docopt(__doc__, version=trustymail.__version__) + """Perform a trustymail scan using the provided options.""" + args = docopt.docopt(__doc__, version=__version__) # Monkey patching trustymail to make it cache the PSL where we want if args["--psl-filename"] is not None: @@ -73,6 +75,7 @@ def main(): # Monkey patching trustymail to make the PSL cache read-only if args["--psl-read-only"]: trustymail.PublicSuffixListReadOnly = True + # cisagov Libraries import trustymail.trustymail as tmail log_level = logging.WARN @@ -162,6 +165,7 @@ def main(): def write(content, out_file): + """Write the provided content to a file after ensuring all intermediate directories exist.""" parent = os.path.dirname(out_file) if parent != "": mkdir_p(parent) @@ -174,6 +178,7 @@ def write(content, out_file): # mkdir -p in python, from: # http://stackoverflow.com/questions/600268/mkdir-p-functionality-in-python def mkdir_p(path): + """Make a directory and all intermediate directories in its path.""" try: os.makedirs(path) except OSError as exc: # Python >2.5 @@ -181,7 +186,3 @@ def mkdir_p(path): pass else: raise - - -if __name__ == "__main__": - main() diff --git a/trustymail/domain.py b/src/trustymail/domain.py old mode 100755 new mode 100644 similarity index 86% rename from trustymail/domain.py rename to src/trustymail/domain.py index 432de5d..a8067ca --- a/trustymail/domain.py +++ b/src/trustymail/domain.py @@ -1,17 +1,20 @@ -from datetime import datetime, timedelta +"""Provide a data model for domains and some utility functions.""" + +# Standard Python Libraries from collections import OrderedDict +from datetime import datetime, timedelta from os import path, stat +from typing import Dict +# Third-Party Libraries import publicsuffix -from trustymail import PublicSuffixListReadOnly -from trustymail import PublicSuffixListFilename -from trustymail import trustymail +from . import PublicSuffixListFilename, PublicSuffixListReadOnly, trustymail def get_psl(): """ - Gets the Public Suffix List - either new, or cached in the CWD for 24 hours + Get the Public Suffix List - either new, or cached in the CWD for 24 hours. Returns ------- @@ -41,14 +44,14 @@ def download_psl(): def get_public_suffix(domain): - """Returns the public suffix of a given domain""" + """Return the public suffix of a given domain.""" public_list = get_psl() return public_list.get_public_suffix(domain) def format_list(record_list): - """Format a list into a string to increase readability in CSV""" + """Format a list into a string to increase readability in CSV.""" # record_list should only be a list, not an integer, None, or # anything else. Thus this if clause handles only empty # lists. This makes a "null" appear in the JSON output for @@ -60,7 +63,9 @@ def format_list(record_list): class Domain: - base_domains = {} + """Store information about a domain.""" + + base_domains: Dict[str, "Domain"] = {} def __init__( self, @@ -72,6 +77,7 @@ def __init__( smtp_cache, dns_hostnames, ): + """Retrieve information about a given domain name.""" self.domain_name = domain_name.lower() self.base_domain_name = get_public_suffix(self.domain_name) @@ -136,15 +142,13 @@ def __init__( self.ports_tested = set() def has_mail(self): + """Check if there are any mail servers associated with this domain.""" if self.mail_servers is not None: return len(self.mail_servers) > 0 return None def has_supports_smtp(self): - """ - Returns True if any of the mail servers associated with this - domain are listening and support SMTP. - """ + """Check if any of the mail servers associated with this domain are listening and support SMTP.""" result = None if len(self.starttls_results) > 0: result = ( @@ -159,10 +163,7 @@ def has_supports_smtp(self): return result def has_starttls(self): - """ - Returns True if any of the mail servers associated with this - domain are listening and support STARTTLS. - """ + """Check if any of the mail servers associated with this domain are listening and support STARTTLS.""" result = None if len(self.starttls_results) > 0: result = ( @@ -177,16 +178,19 @@ def has_starttls(self): return result def has_spf(self): + """Check if this domain has any Sender Policy Framework records.""" if self.spf is not None: return len(self.spf) > 0 return None def has_dmarc(self): + """Check if this domain has a Domain-based Message Authentication, Reporting, and Conformance record.""" if self.dmarc is not None: return len(self.dmarc) > 0 return None def add_mx_record(self, record): + """Add a mail server record for this domain.""" if self.mx_records is None: self.mx_records = [] self.mx_records.append(record) @@ -197,30 +201,35 @@ def add_mx_record(self, record): self.mail_servers.append(record.exchange.to_text().rstrip(".").lower()) def parent_has_dmarc(self): + """Check if a domain or its parent has a Domain-based Message Authentication, Reporting, and Conformance record.""" ans = self.has_dmarc() if self.base_domain: ans = self.base_domain.has_dmarc() return ans def parent_dmarc_dnssec(self): + """Get this domain or its parent's DMARC DNSSEC information.""" ans = self.dmarc_dnssec if self.base_domain: ans = self.base_domain.dmarc_dnssec return ans def parent_valid_dmarc(self): + """Check if this domain or its parent have a valid DMARC record.""" ans = self.valid_dmarc if self.base_domain: return self.base_domain.valid_dmarc return ans def parent_dmarc_results(self): + """Get this domain or its parent's DMARC information.""" ans = format_list(self.dmarc) if self.base_domain: ans = format_list(self.base_domain.dmarc) return ans def get_dmarc_policy(self): + """Get this domain or its parent's DMARC policy.""" ans = self.dmarc_policy # If the policy was never set, or isn't in the list of valid # policies, check the parents. @@ -238,6 +247,7 @@ def get_dmarc_policy(self): return ans def get_dmarc_subdomain_policy(self): + """Get this domain or its parent's DMARC subdomain policy.""" ans = self.dmarc_subdomain_policy # If the policy was never set, or isn't in the list of valid # policies, check the parents. @@ -249,6 +259,7 @@ def get_dmarc_subdomain_policy(self): return ans def get_dmarc_pct(self): + """Get this domain or its parent's DMARC percentage information.""" ans = self.dmarc_pct if not ans and self.base_domain: # Check the parents @@ -256,6 +267,7 @@ def get_dmarc_pct(self): return ans def get_dmarc_has_aggregate_uri(self): + """Get this domain or its parent's DMARC aggregate URI.""" ans = self.dmarc_has_aggregate_uri # If there are no aggregate URIs then check the parents. if not ans and self.base_domain: @@ -263,6 +275,7 @@ def get_dmarc_has_aggregate_uri(self): return ans def get_dmarc_has_forensic_uri(self): + """Check if this domain or its parent have a DMARC forensic URI.""" ans = self.dmarc_has_forensic_uri # If there are no forensic URIs then check the parents. if not ans and self.base_domain: @@ -270,6 +283,7 @@ def get_dmarc_has_forensic_uri(self): return ans def get_dmarc_aggregate_uris(self): + """Get this domain or its parent's DMARC aggregate URIs.""" ans = self.dmarc_aggregate_uris # If there are no aggregate URIs then check the parents. if not ans and self.base_domain: @@ -277,6 +291,7 @@ def get_dmarc_aggregate_uris(self): return ans def get_dmarc_forensic_uris(self): + """Get this domain or its parent's DMARC forensic URIs.""" ans = self.dmarc_forensic_uris # If there are no forensic URIs then check the parents. if not ans and self.base_domain: @@ -284,6 +299,7 @@ def get_dmarc_forensic_uris(self): return ans def generate_results(self): + """Generate the results for this domain.""" if len(self.starttls_results.keys()) == 0: domain_supports_smtp = None domain_supports_starttls = None diff --git a/trustymail/trustymail.py b/src/trustymail/trustymail.py old mode 100755 new mode 100644 similarity index 93% rename from trustymail/trustymail.py rename to src/trustymail/trustymail.py index 9077b25..e1506c7 --- a/trustymail/trustymail.py +++ b/src/trustymail/trustymail.py @@ -1,21 +1,24 @@ +"""Functions to check a domain's configuration for trustworthy mail.""" + +# Standard Python Libraries +from collections import OrderedDict import csv import datetime import inspect import json import logging import re -from collections import OrderedDict -import requests import smtplib import socket -import spf +# Third-Party Libraries import DNS import dns.resolver import dns.reversename +import requests +import spf - -from trustymail.domain import get_public_suffix, Domain +from .domain import Domain, get_public_suffix # A cache for SMTP scanning results _SMTP_CACHE = {} @@ -26,6 +29,7 @@ def domain_list_from_url(url): + """Get a list of domains from a provided URL.""" if not url: return [] @@ -37,6 +41,7 @@ def domain_list_from_url(url): def domain_list_from_csv(csv_file): + """Get a list of domains from a provided CSV file.""" domain_list = list(csv.reader(csv_file, delimiter=",")) # Check the headers for the word domain - use that column. @@ -60,7 +65,9 @@ def domain_list_from_csv(csv_file): def check_dnssec(domain, domain_name, record_type): - """Checks whether the domain has a record of type that is protected + """Test to see if a DNSSEC record is valid and correct. + + Checks a domain for DNSSEC whether the domain has a record of type that is protected by DNSSEC or NXDOMAIN or NoAnswer that is protected by DNSSEC. TODO: Probably does not follow redirects (CNAMEs). Should work on @@ -81,6 +88,7 @@ def check_dnssec(domain, domain_name, record_type): def mx_scan(resolver, domain): + """Scan a domain to see if it has any mail servers.""" try: if domain.mx_records is None: domain.mx_records = [] @@ -425,9 +433,10 @@ def get_spf_record_text(resolver, domain_name, domain, follow_redirect=False): def spf_scan(resolver, domain): - """Scan a domain to see if it supports SPF. If the domain has an SPF - record, verify that it properly handles mail sent from an IP known - not to be listed in an MX record for ANY domain. + """Scan a domain to see if it supports SPF. + + If the domain has an SPF record, verify that it properly handles mail sent from + an IP known not to be listed in an MX record for ANY domain. Parameters ---------- @@ -459,7 +468,7 @@ def spf_scan(resolver, domain): def parse_dmarc_report_uri(uri): """ - Parses a DMARC Reporting (i.e. ``rua``/``ruf)`` URI + Parse a DMARC Reporting (i.e. ``rua``/``ruf)`` URI. Notes ----- @@ -491,6 +500,7 @@ def parse_dmarc_report_uri(uri): def dmarc_scan(resolver, domain): + """Scan a domain to see if it supports DMARC.""" # dmarc records are kept in TXT records for _dmarc.domain_name. try: if domain.dmarc is None: @@ -530,7 +540,7 @@ def dmarc_scan(resolver, domain): "subdomain record does not actually exist, and the request for TXT records was " "redirected to the base domain" ) - handle_syntax_error("[DMARC]", domain, "{0}".format(msg)) + handle_syntax_error("[DMARC]", domain, "{}".format(msg)) domain.valid_dmarc = False # Remove excess whitespace @@ -559,7 +569,7 @@ def dmarc_scan(resolver, domain): ) if "p" not in tag_dict: msg = "Record missing required policy (p) tag" - handle_syntax_error("[DMARC]", domain, "{0}".format(msg)) + handle_syntax_error("[DMARC]", domain, "{}".format(msg)) domain.valid_dmarc = False elif "sp" not in tag_dict: tag_dict["sp"] = tag_dict["p"] @@ -595,20 +605,20 @@ def dmarc_scan(resolver, domain): "rua", "ruf", ]: - msg = "Unknown DMARC tag {0}".format(tag) - handle_syntax_error("[DMARC]", domain, "{0}".format(msg)) + msg = "Unknown DMARC tag {}".format(tag) + handle_syntax_error("[DMARC]", domain, "{}".format(msg)) domain.valid_dmarc = False elif tag == "p": if tag_dict[tag] not in ["none", "quarantine", "reject"]: - msg = "Unknown DMARC policy {0}".format(tag) - handle_syntax_error("[DMARC]", domain, "{0}".format(msg)) + msg = "Unknown DMARC policy {}".format(tag) + handle_syntax_error("[DMARC]", domain, "{}".format(msg)) domain.valid_dmarc = False else: domain.dmarc_policy = tag_dict[tag] elif tag == "sp": if tag_dict[tag] not in ["none", "quarantine", "reject"]: - msg = "Unknown DMARC subdomain policy {0}".format(tag_dict[tag]) - handle_syntax_error("[DMARC]", domain, "{0}".format(msg)) + msg = "Unknown DMARC subdomain policy {}".format(tag_dict[tag]) + handle_syntax_error("[DMARC]", domain, "{}".format(msg)) domain.valid_dmarc = False else: domain.dmarc_subdomain_policy = tag_dict[tag] @@ -616,37 +626,37 @@ def dmarc_scan(resolver, domain): values = tag_dict[tag].split(":") if "0" in values and "1" in values: msg = "fo tag values 0 and 1 are mutually exclusive" - handle_syntax_error("[DMARC]", domain, "{0}".format(msg)) + handle_syntax_error("[DMARC]", domain, "{}".format(msg)) for value in values: if value not in ["0", "1", "d", "s"]: - msg = "Unknown DMARC fo tag value {0}".format(value) - handle_syntax_error("[DMARC]", domain, "{0}".format(msg)) + msg = "Unknown DMARC fo tag value {}".format(value) + handle_syntax_error("[DMARC]", domain, "{}".format(msg)) domain.valid_dmarc = False elif tag == "rf": values = tag_dict[tag].split(":") for value in values: if value not in ["afrf"]: - msg = "Unknown DMARC rf tag value {0}".format(value) - handle_syntax_error("[DMARC]", domain, "{0}".format(msg)) + msg = "Unknown DMARC rf tag value {}".format(value) + handle_syntax_error("[DMARC]", domain, "{}".format(msg)) domain.valid_dmarc = False elif tag == "ri": try: int(tag_dict[tag]) except ValueError: - msg = "Invalid DMARC ri tag value: {0} - must be an integer".format( + msg = "Invalid DMARC ri tag value: {} - must be an integer".format( tag_dict[tag] ) - handle_syntax_error("[DMARC]", domain, "{0}".format(msg)) + handle_syntax_error("[DMARC]", domain, "{}".format(msg)) domain.valid_dmarc = False elif tag == "pct": try: pct = int(tag_dict[tag]) if pct < 0 or pct > 100: msg = ( - "Error: invalid DMARC pct tag value: {0} - must be an integer between " + "Error: invalid DMARC pct tag value: {} - must be an integer between " "0 and 100".format(tag_dict[tag]) ) - handle_syntax_error("[DMARC]", domain, "{0}".format(msg)) + handle_syntax_error("[DMARC]", domain, "{}".format(msg)) domain.valid_dmarc = False domain.dmarc_pct = pct if pct < 100: @@ -656,10 +666,10 @@ def dmarc_scan(resolver, domain): "Warning: The DMARC pct tag value may be less than 100 (the implicit default) during deployment, but should be removed or set to 100 upon full deployment", ) except ValueError: - msg = "invalid DMARC pct tag value: {0} - must be an integer".format( + msg = "invalid DMARC pct tag value: {} - must be an integer".format( tag_dict[tag] ) - handle_syntax_error("[DMARC]", domain, "{0}".format(msg)) + handle_syntax_error("[DMARC]", domain, "{}".format(msg)) domain.valid_dmarc = False elif tag == "rua" or tag == "ruf": uris = tag_dict[tag].split(",") @@ -676,8 +686,8 @@ def dmarc_scan(resolver, domain): # mailto: is currently the only type of DMARC URI parsed_uri = parse_dmarc_report_uri(uri) if parsed_uri is None: - msg = "Error: {0} is an invalid DMARC URI".format(uri) - handle_syntax_error("[DMARC]", domain, "{0}".format(msg)) + msg = "Error: {} is an invalid DMARC URI".format(uri) + handle_syntax_error("[DMARC]", domain, "{}".format(msg)) domain.valid_dmarc = False else: if tag == "rua": @@ -690,11 +700,11 @@ def dmarc_scan(resolver, domain): get_public_suffix(email_domain).lower() != domain.base_domain_name.lower() ): - target = "{0}._report._dmarc.{1}".format( + target = "{}._report._dmarc.{}".format( domain.domain_name, email_domain ) error_message = ( - "{0} does not indicate that it accepts DMARC reports about {1} - " + "{} does not indicate that it accepts DMARC reports about {} - " "https://tools.ietf.org" "/html/rfc7489#section-7.1".format( email_domain, domain.domain_name @@ -710,7 +720,7 @@ def dmarc_scan(resolver, domain): handle_error( "[DMARC]", domain, - "{0}".format(error_message), + "{}".format(error_message), ) domain.dmarc_reports_address_error = True domain.valid_dmarc = False @@ -721,7 +731,7 @@ def dmarc_scan(resolver, domain): dns.exception.Timeout, ): handle_syntax_error( - "[DMARC]", domain, "{0}".format(error_message) + "[DMARC]", domain, "{}".format(error_message) ) domain.dmarc_reports_address_error = True domain.valid_dmarc = False @@ -738,7 +748,7 @@ def dmarc_scan(resolver, domain): "[DMARC]", domain, "The domain for reporting " - "address {0} does not have any " + "address {} does not have any " "MX records".format(email_address), ) domain.valid_dmarc = False @@ -772,6 +782,7 @@ def dmarc_scan(resolver, domain): def find_host_from_ip(resolver, ip_addr): + """Find the host name for a given IP address.""" # Use TCP, since we care about the content and correctness of the records # more than whether their records fit in a single UDP packet. hostname, _ = resolver.query(dns.reversename.from_address(ip_addr), "PTR", tcp=True) @@ -788,6 +799,7 @@ def scan( scan_types, dns_hostnames, ): + """Parse a domain's DNS information for mail related records.""" # # Configure the dnspython library # @@ -847,7 +859,7 @@ def scan( dns_hostnames, ) - logging.debug("[{0}]".format(domain_name.lower())) + logging.debug("[{}]".format(domain_name.lower())) if scan_types["mx"] and domain.is_live: mx_scan(resolver, domain) @@ -877,9 +889,10 @@ def scan( def handle_error(prefix, domain, error, syntax_error=False): - """Handle an error by logging via the Python logging library and - recording it in the debug_info or syntax_error members of the - trustymail.Domain object. + """Handle the provided error by logging a message and storing it in the Domain object. + + Logging is performed via the Python logging library and recording it in the + debug_info or syntax_error members of the trustymail.Domain object. Since the "Debug Info" and "Syntax Error" fields in the CSV output of trustymail come directly from the debug_info and syntax_error @@ -945,11 +958,12 @@ def handle_error(prefix, domain, error, syntax_error=False): def handle_syntax_error(prefix, domain, error): - """Convenience method for handle_error""" + """Handle a syntax error by passing it to handle_error().""" handle_error(prefix, domain, error, syntax_error=True) def generate_csv(domains, file_name): + """Generate a CSV file with the given domain information.""" with open(file_name, "w", encoding="utf-8", newline="\n") as output_file: writer = csv.DictWriter( output_file, fieldnames=domains[0].generate_results().keys() @@ -964,6 +978,7 @@ def generate_csv(domains, file_name): def generate_json(domains): + """Generate a JSON string with the given domain information.""" output = [] for domain in domains: output.append(domain.generate_results()) @@ -973,6 +988,7 @@ def generate_json(domains): # Taken from pshtt to keep formatting similar def format_datetime(obj): + """Format the provided datetime information.""" if isinstance(obj, datetime.date): return obj.isoformat() elif isinstance(obj, str): @@ -982,7 +998,7 @@ def format_datetime(obj): def remove_quotes(txt_record): - """Remove double quotes and contatenate strings in a DNS TXT record + """Remove double quotes and contatenate strings in a DNS TXT record. A DNS TXT record can contain multiple double-quoted strings, and in that case the client has to remove the quotes and concatenate the diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..39e2be2 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,29 @@ +"""pytest plugin configuration. + +https://docs.pytest.org/en/latest/writing_plugins.html#conftest-py-plugins +""" +# Third-Party Libraries +import pytest + + +def pytest_addoption(parser): + """Add new commandline options to pytest.""" + parser.addoption( + "--runslow", action="store_true", default=False, help="run slow tests" + ) + + +def pytest_configure(config): + """Register new markers.""" + config.addinivalue_line("markers", "slow: mark test as slow") + + +def pytest_collection_modifyitems(config, items): + """Modify collected tests based on custom marks and commandline options.""" + if config.getoption("--runslow"): + # --runslow given in cli: do not skip slow tests + return + skip_slow = pytest.mark.skip(reason="need --runslow option to run") + for item in items: + if "slow" in item.keywords: + item.add_marker(skip_slow) diff --git a/tests/test_trustymail.py b/tests/test_trustymail.py index 8bbed18..90f3794 100644 --- a/tests/test_trustymail.py +++ b/tests/test_trustymail.py @@ -1,6 +1,11 @@ +"""Tests for the trustymail module.""" +# Standard Python Libraries import unittest class TestLiveliness(unittest.TestCase): + """Test the liveliness of a domain.""" + def test_domain_list_parsing(self): + """Test that a domain list is correctly parsed.""" pass diff --git a/tox.ini b/tox.ini deleted file mode 100644 index a2053d9..0000000 --- a/tox.ini +++ /dev/null @@ -1,15 +0,0 @@ -[tox] -envlist = py36,py37,py38,flake8 -skip_missing_interpreters = true -; usedevelop = true - -[testenv] -deps = - pytest-cov - pytest - coveralls -commands = pytest --cov={envsitepackagesdir}/trustymail - -[testenv:flake8] -deps = flake8 -commands = flake8 diff --git a/travis_scripts/build_docker_image.sh b/travis_scripts/build_docker_image.sh deleted file mode 100644 index 862f220..0000000 --- a/travis_scripts/build_docker_image.sh +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env bash - -set -o nounset -set -o errexit -set -o pipefail - -version=$(./bump_version.sh show) -docker build -t "$IMAGE_NAME":"$version" . diff --git a/travis_scripts/deploy_to_docker_hub.sh b/travis_scripts/deploy_to_docker_hub.sh deleted file mode 100644 index 2be3e20..0000000 --- a/travis_scripts/deploy_to_docker_hub.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/usr/bin/env bash - -set -o nounset -set -o errexit -set -o pipefail - -echo "$DOCKER_PW" | docker login -u "$DOCKER_USER" --password-stdin -version=$(./bump_version.sh show) -docker push "$IMAGE_NAME":"$version" diff --git a/trustymail/__init__.py b/trustymail/__init__.py deleted file mode 100644 index 5efd435..0000000 --- a/trustymail/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from __future__ import unicode_literals, absolute_import, print_function - -__version__ = "0.7.5" - -PublicSuffixListFilename = "public_suffix_list.dat" -PublicSuffixListReadOnly = False