Skip to content

Commit

Permalink
Merge pull request #1355 from opendatacube/release-1.8.9
Browse files Browse the repository at this point in the history
Release 1.8.9
  • Loading branch information
omad committed Nov 17, 2022
2 parents e0dfd94 + 64ef235 commit e833f5d
Show file tree
Hide file tree
Showing 109 changed files with 1,533 additions and 726 deletions.
2 changes: 1 addition & 1 deletion .dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -79,4 +79,4 @@ docs/notebooks/

#Local Visual Studio Code configurations
.vscode/
.env
.env
105 changes: 105 additions & 0 deletions .doctor-rst.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
rules:
avoid_repetetive_words: ~
blank_line_after_anchor: ~
blank_line_after_directive: ~
blank_line_before_directive: ~
composer_dev_option_not_at_the_end: ~
correct_code_block_directive_based_on_the_content: ~
deprecated_directive_should_have_version: ~
ensure_exactly_one_space_between_link_definition_and_link: ~
ensure_link_definition_contains_valid_url: ~
ensure_order_of_code_blocks_in_configuration_block: ~
extend_abstract_controller: ~
# extension_xlf_instead_of_xliff: ~
indention: ~
lowercase_as_in_use_statements: ~
max_blank_lines:
max: 2
max_colons: ~
no_app_console: ~
no_blank_line_after_filepath_in_php_code_block: ~
no_blank_line_after_filepath_in_twig_code_block: ~
no_blank_line_after_filepath_in_xml_code_block: ~
no_blank_line_after_filepath_in_yaml_code_block: ~
no_brackets_in_method_directive: ~
no_composer_req: ~
no_directive_after_shorthand: ~
no_explicit_use_of_code_block_php: ~
no_inheritdoc: ~
no_namespace_after_use_statements: ~
no_php_open_tag_in_code_block_php_directive: ~
no_space_before_self_xml_closing_tag: ~
only_backslashes_in_namespace_in_php_code_block: ~
only_backslashes_in_use_statements_in_php_code_block: ~
ordered_use_statements: ~
php_prefix_before_bin_console: ~
replace_code_block_types: ~
short_array_syntax: ~
space_between_label_and_link_in_doc: ~
space_between_label_and_link_in_ref: ~
string_replacement: ~
typo: ~
unused_links: ~
use_deprecated_directive_instead_of_versionadded: ~
use_https_xsd_urls: ~
valid_inline_highlighted_namespaces: ~
valid_use_statements: ~
versionadded_directive_should_have_version: ~
yaml_instead_of_yml_suffix: ~
yarn_dev_option_at_the_end: ~
# no_app_bundle: ~

# master
versionadded_directive_major_version:
major_version: 6

versionadded_directive_min_version:
min_version: '6.0'

deprecated_directive_major_version:
major_version: 6

deprecated_directive_min_version:
min_version: '6.0'

# do not report as violation
whitelist:
regex:
- '/FOSUserBundle(.*)\.yml/'
- '/``.yml``/'
- '/(.*)\.orm\.yml/' # currently DoctrineBundle only supports .yml
- '/rst-class/'
- /docker-compose\.yml/
lines:
- 'in config files, so the old ``app/config/config_dev.yml`` goes to'
- '#. The most important config file is ``app/config/services.yml``, which now is'
- 'code in production without a proxy, it becomes trivially easy to abuse your'
- '.. _`EasyDeployBundle`: https://github.com/EasyCorp/easy-deploy-bundle'
- 'The bin/console Command'
- '# username is your full Gmail or Google Apps email address'
- '.. _`LDAP injection`: http://projects.webappsec.org/w/page/13246947/LDAP%20Injection'
- '.. versionadded:: 1.9.0' # Encore
- '.. versionadded:: 0.28.4' # Encore
- '.. versionadded:: 2.4.0' # SwiftMailer
- '.. versionadded:: 1.30' # Twig
- '.. versionadded:: 1.35' # Twig
- '.. versionadded:: 1.11' # Messenger (Middleware / DoctrineBundle)
- '.. versionadded:: 1.18' # Flex in setup/upgrade_minor.rst
- '.. versionadded:: 1.0.0' # Encore
- '0 => 123' # assertion for var_dumper - components/var_dumper.rst
- '1 => "foo"' # assertion for var_dumper - components/var_dumper.rst
- '123,' # assertion for var_dumper - components/var_dumper.rst
- '"foo",' # assertion for var_dumper - components/var_dumper.rst
- '$var .= "Because of this `\xE9` octet (\\xE9),\n";'
- "`Deploying Symfony 4 Apps on Heroku`_."
- ".. _`Deploying Symfony 4 Apps on Heroku`: https://devcenter.heroku.com/articles/deploying-symfony4"
- "// 224, 165, 141, 224, 164, 164, 224, 165, 135])"
- '.. versionadded:: 0.2' # MercureBundle
- 'provides a ``loginUser()`` method to simulate logging in in your functional'
- '.. code-block:: twig'
- '.. versionadded:: 3.6' # MonologBundle
- '// bin/console'
- 'End to End Tests (E2E)'
- '.. code-block:: php'
- '.. _`a feature to test applications using Mercure`: https://github.com/symfony/panther#creating-isolated-browsers-to-test-apps-using-mercure-or-websocket'
- '.. End to End Tests (E2E)'
56 changes: 56 additions & 0 deletions .github/workflows/doc-qa.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
name: Doc QA
on:
# Triggers the workflow on push or pull request events but only for the main branch
push:
branches:
- 'develop'
pull_request:
branches:
- 'develop'

# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:

jobs:
pyspell:
runs-on: ubuntu-latest
steps:
# Spellcheck
- uses: actions/checkout@v1
with:
fetch-depth: 0

- uses: igsekor/pyspelling-any@v1.0.4
name: Spellcheck

doctor-rst:
name: Lint (DOCtor-RST)
runs-on: ubuntu-latest
steps:
- name: "Checkout"
uses: actions/checkout@v2

- name: "Create cache dir"
run: mkdir .cache

- name: "Extract base branch name"
run: echo "branch=${GITHUB_BASE_REF:-${GITHUB_REF##*/}}"
id: extract_base_branch

- name: "Cache DOCtor-RST"
uses: actions/cache@v2
with:
path: .cache
key: ${{ runner.os }}-doctor-rst-${{ steps.extract_base_branch.outputs.branch }}

- name: "Run DOCtor-RST"
uses: docker://oskarstark/doctor-rst
with:
args: --short --error-format=github --cache-file=/github/workspace/.cache/doctor-rst.cache

documentation-preview:
runs-on: ubuntu-latest
steps:
- uses: readthedocs/actions/preview@v1
with:
project-slug: "datacube-core"
4 changes: 2 additions & 2 deletions .github/workflows/docker-test-runner.yml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ jobs:
- name: Config
id: cfg
run: |
echo ::set-output name=docker_image::${ORG}/${IMAGE}:latest
echo "docker_image=${ORG}/${IMAGE}:latest" >> $GITHUB_OUTPUT
# This is the a separate action that sets up buildx runner
- name: Set up Docker Buildx
Expand Down Expand Up @@ -95,7 +95,7 @@ jobs:
if [ -n "${{ secrets.DockerPassword }}" ]; then
echo "Login to DockerHub as ${DOCKER_USER}"
echo "${{ secrets.DockerPassword }}" | docker login -u "${DOCKER_USER}" --password-stdin
echo "::set-output name=logged_in::yes"
echo "logged_in=yes" >> $GITHUB_OUTPUT
else
echo "Set DockerPassword secret to push to docker"
fi
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ jobs:
esac
for x in primary push_pypi push_test_pypi push_dea; do
echo "::set-output name=${x}::${!x}"
echo "${x}=${!x}" >> $GITHUB_OUTPUT
done
- name: Pull Docker
Expand Down
5 changes: 3 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
repos:
- repo: https://github.com/adrienverge/yamllint.git
rev: v1.19.0
rev: v1.28.0
hooks:
- id: yamllint
- repo: https://github.com/pre-commit/pre-commit-hooks
Expand All @@ -15,10 +15,11 @@ repos:
- id: debug-statements
- id: name-tests-test
args: ['--django']
exclude: ^tests/drivers/fail_drivers
- id: requirements-txt-fixer
- id: check-added-large-files
- id: check-merge-conflict
- repo: https://github.com/pre-commit/mirrors-pylint
rev: v2.4.4 # Use the sha / tag you want to point at
rev: v3.0.0a5 # Use the sha / tag you want to point at
hooks:
- id: pylint
4 changes: 0 additions & 4 deletions .yamllint
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,3 @@ rules:
colons: disable
comments: disable
comments-indentation: disable




55 changes: 29 additions & 26 deletions README.rst
Original file line number Diff line number Diff line change
@@ -1,7 +1,17 @@
Open Data Cube Core
===================

|Build Status| |Coverage Status| |Documentation Status|
.. image:: https://github.com/opendatacube/datacube-core/workflows/build/badge.svg
:alt: Build Status
:target: https://github.com/opendatacube/datacube-core/actions

.. image:: https://codecov.io/gh/opendatacube/datacube-core/branch/develop/graph/badge.svg
:alt: Coverage Status
:target: https://codecov.io/gh/opendatacube/datacube-core

.. image:: https://readthedocs.org/projects/datacube-core/badge/?version=latest
:alt: Documentation Status
:target: http://datacube-core.readthedocs.org/en/latest/

Overview
========
Expand Down Expand Up @@ -96,6 +106,7 @@ to ``./check-code.sh`` script.
To run individual test in docker container

::

docker run -ti -v /home/ubuntu/datacube-core:/code opendatacube/datacube-tests:latest pytest integration_tests/test_filename.py::test_function_name


Expand All @@ -108,34 +119,26 @@ Install dependencies:

::

sudo apt-get update
sudo apt-get install -y \
autoconf automake build-essential make cmake \
graphviz \
python3-venv \
python3-dev \
libpq-dev \
libyaml-dev \
libnetcdf-dev \
libudunits2-dev
sudo apt-get update
sudo apt-get install -y \
autoconf automake build-essential make cmake \
graphviz \
python3-venv \
python3-dev \
libpq-dev \
libyaml-dev \
libnetcdf-dev \
libudunits2-dev


Build the python virtual environment:

::

pyenv="${HOME}/.envs/odc" # Change to suit your needs
mkdir -p "${pyenv}"
python3 -m venv "${pyenv}"
source "${pyenv}/bin/activate"
pip install -U pip wheel cython numpy
pip install -e '.[dev]'
pip install flake8 mypy pylint autoflake black


.. |Build Status| image:: https://github.com/opendatacube/datacube-core/workflows/build/badge.svg
:target: https://github.com/opendatacube/datacube-core/actions
.. |Coverage Status| image:: https://codecov.io/gh/opendatacube/datacube-core/branch/develop/graph/badge.svg
:target: https://codecov.io/gh/opendatacube/datacube-core
.. |Documentation Status| image:: https://readthedocs.org/projects/datacube-core/badge/?version=latest
:target: http://datacube-core.readthedocs.org/en/latest/
pyenv="${HOME}/.envs/odc" # Change to suit your needs
mkdir -p "${pyenv}"
python3 -m venv "${pyenv}"
source "${pyenv}/bin/activate"
pip install -U pip wheel cython numpy
pip install -e '.[dev]'
pip install flake8 mypy pylint autoflake black
18 changes: 9 additions & 9 deletions datacube/api/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -626,7 +626,7 @@ def mk_data_var(m, shape, coords, dims, data_func):

@staticmethod
def _dask_load(sources, geobox, measurements, dask_chunks,
skip_broken_datasets=False, extra_dims=None):
skip_broken_datasets=False, extra_dims=None, patch_url=None):
chunk_sizes = _calculate_chunk_sizes(sources, geobox, dask_chunks, extra_dims)
needed_irr_chunks = chunk_sizes[0]
if extra_dims:
Expand Down Expand Up @@ -656,7 +656,8 @@ def data_func(measurement, shape):
measurement,
chunks=chunks,
skip_broken_datasets=skip_broken_datasets,
extra_dims=extra_dims)
extra_dims=extra_dims,
patch_url=patch_url)

return Datacube.create_storage(sources.coords, geobox, measurements, data_func, extra_dims)

Expand Down Expand Up @@ -774,12 +775,10 @@ def load_data(sources, geobox, measurements, resampling=None,
measurements = per_band_load_data_settings(measurements, resampling=resampling, fuse_func=fuse_func)

if dask_chunks is not None:
if patch_url is not None:
# TODO: url mangler for Dask?
raise ValueError("The patch_url arguments is not currently supported for Dask loading.")
return Datacube._dask_load(sources, geobox, measurements, dask_chunks,
skip_broken_datasets=skip_broken_datasets,
extra_dims=extra_dims)
extra_dims=extra_dims,
patch_url=patch_url)
else:
return Datacube._xr_load(sources, geobox, measurements,
skip_broken_datasets=skip_broken_datasets,
Expand Down Expand Up @@ -997,7 +996,8 @@ def _make_dask_array(chunked_srcs,
measurement,
chunks,
skip_broken_datasets=False,
extra_dims=None):
extra_dims=None,
patch_url=None):
dsk = dsk.copy() # this contains mapping from dataset id to dataset object

token = uuid.uuid4().hex
Expand Down Expand Up @@ -1052,11 +1052,11 @@ def _mk_empty(shape: Tuple[int, ...]) -> str:
# Do extra_dim subsetting here
index_subset = extra_dims.measurements_index(measurement.extra_dim)
for result_index, extra_dim_index in enumerate(range(*index_subset)):
dsk[key_prefix + (result_index,) + idx] = val + (extra_dim_index,)
dsk[key_prefix + (result_index,) + idx] = val + (extra_dim_index, patch_url)
else:
# Get extra_dim index if available
extra_dim_index = measurement.get('extra_dim_index', None)
dsk[key_prefix + idx] = val + (extra_dim_index,)
dsk[key_prefix + idx] = val + (extra_dim_index, patch_url)

y_shapes = [grid_chunks[0]]*gbt.shape[0]
x_shapes = [grid_chunks[1]]*gbt.shape[1]
Expand Down
2 changes: 1 addition & 1 deletion datacube/drivers/postgis/_fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -491,7 +491,7 @@ def __init__(self, field, low_value, high_value, _range_class):
@property
def alchemy_expression(self):
return self.field.alchemy_expression.overlaps(
self._range_class(self.low_value, self.high_value)
self._range_class(self.low_value, self.high_value, bounds='[]')
)


Expand Down
2 changes: 1 addition & 1 deletion datacube/drivers/postgis/_spatial.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def spindex_for_epsg(epsg: int) -> Type[SpatialIndex]:

def spindex_for_crs(crs: CRS) -> Type[SpatialIndex]:
"""Return ORM class of a SpatialIndex for CRS - dynamically creating if necessary"""
if not (str(crs).startswith('EPSG') and crs.epsg):
if not str(crs).startswith("EPSG:") and crs.epsg is None:
# Postgis identifies CRSs by a numeric "SRID" which is equivalent to EPSG number.
_LOG.error("Cannot create a postgis spatial index for a non-EPSG-style CRS.")
return None
Expand Down
2 changes: 1 addition & 1 deletion datacube/drivers/postgres/_fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -491,7 +491,7 @@ def __init__(self, field, low_value, high_value, _range_class):
@property
def alchemy_expression(self):
return self.field.alchemy_expression.overlaps(
self._range_class(self.low_value, self.high_value)
self._range_class(self.low_value, self.high_value, bounds="[]")
)


Expand Down
2 changes: 1 addition & 1 deletion datacube/index/default-metadata-types.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -310,4 +310,4 @@ dataset:
max_offset:
- [image, satellite_ref_point_end, y]
# If an end is not specified, use the start.
- [image, satellite_ref_point_start, y]
- [image, satellite_ref_point_start, y]

0 comments on commit e833f5d

Please sign in to comment.