From 9ce4ec201aaff490f4746891e3c3bb4ba5f6afc4 Mon Sep 17 00:00:00 2001 From: Damien Ayers Date: Tue, 11 Apr 2017 11:26:39 +1000 Subject: [PATCH 01/12] Add remote->local replication tool --- datacube_apps/simple_replica.py | 206 ++++++++++++++++++++++++++++++++ 1 file changed, 206 insertions(+) create mode 100644 datacube_apps/simple_replica.py diff --git a/datacube_apps/simple_replica.py b/datacube_apps/simple_replica.py new file mode 100644 index 0000000000..d97718c73d --- /dev/null +++ b/datacube_apps/simple_replica.py @@ -0,0 +1,206 @@ +#!/usr/bin/env python +""" +A Simple Data Cube Replication Tool + +Connects to a remote Data Cube via SSH, and downloads database records and files to a local file system and database. + +Provide a configuration file in ~/.datacube.replication.conf in YAML format, or specify an alternate location +on the command line. + +For example, the following config will download 3 PQ products for the specified time and space range. Queries +are specified the same as when using the API to search for datasets. + +.. code-block:: yaml + + remote_host: raijin.nci.org.auo + remote_user: dra547 + db_password: xxxxxxxxxxxx + remote_dir: /g/data/ + local_dir: C:/datacube/ + + replicated_data: + - product: ls5_pq_albers + crs: EPSG:3577 + x: [1200000, 1300000] + y: [-4200000, -4300000] + time: [2008-01-01, 2010-01-01] + + - product: ls7_pq_albers + crs: EPSG:3577 + x: [1200000, 1300000] + y: [-4200000, -4300000] + time: [2008-01-01, 2010-01-01] + + - product: ls8_pq_albers + crs: EPSG:3577 + x: [1200000, 1300000] + y: [-4200000, -4300000] + time: [2008-01-01, 2010-01-01] + +Requirements, limitations and assumptions: + +- Remote datacube files and database are accessed via an SSH host that can be logged into without a password, ie. + with a local SSH key agent. +- The remote datacube index must be at the same version as the local codebase. +- + +""" + +import logging +from configparser import ConfigParser +from pathlib import Path + +import click +import yaml +from paramiko import SSHClient, WarningPolicy +from sshtunnel import SSHTunnelForwarder +from tqdm import tqdm + +from datacube import Datacube +from datacube.config import LocalConfig, _DEFAULT_CONF +from datacube.index import index_connect +from datacube.ui.click import global_cli_options + +LOG = logging.getLogger('simple_replicator') + +DEFAULT_REPLICATION_CONFIG = str(Path('~/.datacube.replication.conf').expanduser()) + + +def uri_to_path(uri): + return uri.replace('file://', '') + + +class DatacubeReplicator: + def __init__(self, config): + self.remote_host = config['remote_host'] + self.remote_user = config['remote_user'] + self.db_password = config['db_password'] + self.remote_dir = config['remote_dir'] + self.local_dir = config['local_dir'] + self.replication_defns = config['replicated_data'] + + self.client = None + self.sftp = None + self.tunnel = None + self.remote_dc_config = None + self.remote_dc = None + self.local_index = index_connect() + + def run(self): + self.connect() + self.read_remote_config() + self.connect_to_db() + self.replicate_all() + self.disconnect() + + def connect(self): + client = SSHClient() + client.load_system_host_keys() + client.set_missing_host_key_policy(WarningPolicy()) + client.connect(hostname=self.remote_host, username=self.remote_user) + + LOG.debug(client) + self.client = client + self.sftp = client.open_sftp() + + def disconnect(self): + self.client.close() + self.tunnel.stop() + + def read_remote_config(self): + remote_config = ConfigParser() + remote_config.read_string(_DEFAULT_CONF) + with self.sftp.open('.datacube.conf') as fin: + remote_config.read_file(fin) + self.remote_dc_config = LocalConfig(remote_config) + + def connect_to_db(self): + self.tunnel = SSHTunnelForwarder( + self.remote_host, + ssh_username=self.remote_user, + remote_bind_address=(self.remote_dc_config.db_hostname, int(self.remote_dc_config.db_port))) + self.tunnel.start() + + self.remote_dc_config._config['datacube']['db_hostname'] = '127.0.0.1' + self.remote_dc_config._config['datacube']['db_port'] = str(self.tunnel.local_bind_port) + self.remote_dc_config._config['datacube']['db_username'] = self.remote_user + self.remote_dc_config._config['datacube']['db_password'] = self.db_password + + # This requires the password from somewhere + # Parsing it out of .pgpass sounds error prone and fragile + # Lets put it in the configuration for now + LOG.debug('Remote configuration loaded %s', self.remote_dc_config) + + self.remote_dc = Datacube(config=self.remote_dc_config) + + def replicate_all(self): + + for defn in tqdm(self.replication_defns, 'Replicating products'): + self.replicate(defn) + + def replicate_all_products(self): + products = self.remote_dc.index.products.get_all() + for product in products: + self.local_index.products.add(product) + + def replicate(self, defn): + datasets = list(self.remote_dc.find_datasets(**defn)) + + if not datasets: + LOG.info('No remote datasets found matching %s', defn) + return + + # TODO: use generator not list + product = datasets[0].type + LOG.info('Ensuring remote product is in local index. %s', product) + + self.local_index.products.add(product) + + for dataset in tqdm(datasets, 'Datasets'): + # dataset = remote_dc.index.datasets.get(dataset.id, include_sources=True) + # We would need to pull the parent products down too + # TODO: Include parent source datasets + product definitions + dataset.sources = {} + + LOG.debug('Replicating dataset %s', dataset) + remote_path = uri_to_path(dataset.local_uri) + local_path = self.remote_to_local(uri_to_path(dataset.local_uri)) + + # Ensure local path exists + Path(local_path).parent.mkdir(parents=True, exist_ok=True) + + # Download file + self.sftp.get(remote_path, local_path) + + # Add to local index + dataset.local_uri = 'file://' + local_path + self.local_index.datasets.add(dataset) + LOG.debug('Downloaded to %s', local_path) + + def remote_to_local(self, remote): + return remote.replace(self.remote_dir, self.local_dir) + + +def replicate_data(config): + replicator = DatacubeReplicator(config) + replicator.run() + + +@click.command() +@click.argument('config_path', required=False) +@global_cli_options +def replicate(config_path): + """ + Connect to a remote Datacube, and replicate data locally. + """ + if config_path is None: + config_path = DEFAULT_REPLICATION_CONFIG + LOG.debug('Config path: %s', config_path) + with open(config_path) as fin: + config = yaml.load(fin) + + replicate_data(config) + + +if __name__ == '__main__': + replicate() From c19a70b671c438a1ca5555eb856c9b4026319506 Mon Sep 17 00:00:00 2001 From: Damien Ayers Date: Fri, 28 Apr 2017 17:52:55 +1000 Subject: [PATCH 02/12] Improve RTD syntax highlighting --- docs/_templates/layout.html | 7 +++++++ docs/conf.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 docs/_templates/layout.html diff --git a/docs/_templates/layout.html b/docs/_templates/layout.html new file mode 100644 index 0000000000..5e9a19d27e --- /dev/null +++ b/docs/_templates/layout.html @@ -0,0 +1,7 @@ +{# Fix to allow changing the syntax highlighting colour scheme. + See https://github.com/rtfd/sphinx_rtd_theme/issues/166 for more info #} +{# layout.html #} +{# Import the theme's layout. #} +{% extends "!layout.html" %} + +{% set css_files = css_files + ['_static/pygments.css'] %} diff --git a/docs/conf.py b/docs/conf.py index 7b53e79888..d9dc9a2f44 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -193,7 +193,7 @@ # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} From 5899114b9b0ef97e7d2242a2c741f991d589c8bc Mon Sep 17 00:00:00 2001 From: Damien Ayers Date: Fri, 28 Apr 2017 17:53:45 +1000 Subject: [PATCH 03/12] Improve docs makefiles - Add make.bat for building on windows - Add target for using sphinx-autobuild --- docs/Makefile | 4 ++++ docs/make.bat | 37 +++++++++++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+) create mode 100644 docs/make.bat diff --git a/docs/Makefile b/docs/Makefile index d606e113c2..608b951379 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -175,3 +175,7 @@ pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." + +livehtml: + sphinx-autobuild -b html -p 8123 --ignore "*_tmp_*" --ignore "*_old_*" $(ALLSPHINXOPTS) $(BUILDDIR)/html + diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000000..3f2b992613 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,37 @@ +@ECHO OFF + +pushd %~dp0 + +activate agdc +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build +set SPHINXPROJ=FooBar + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% + +:end +popd From 3ce69d389600b4ef1a3ac3c0d809777d85c97682 Mon Sep 17 00:00:00 2001 From: Damien Ayers Date: Fri, 28 Apr 2017 17:54:22 +1000 Subject: [PATCH 04/12] Install datacube-simple-replica tool with datacube-core --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index ff4506dadc..42711d392f 100755 --- a/setup.py +++ b/setup.py @@ -88,7 +88,8 @@ 'datacube = datacube.scripts.cli_app:cli', 'datacube-stacker = datacube_apps.stacker:main', 'pixeldrill = datacube_apps.pixeldrill:main [interactive]', - 'movie_generator = datacube_apps.movie_generator:main' + 'movie_generator = datacube_apps.movie_generator:main', + 'datacube-simple-replica = datacube_apps.simple_replica:replicate' ] }, ) From e1647b9950eb91ff1bcf1a40820c11a5da66ab2a Mon Sep 17 00:00:00 2001 From: Damien Ayers Date: Fri, 28 Apr 2017 17:54:36 +1000 Subject: [PATCH 05/12] Add Simple Replica documentation --- docs/conf.py | 3 +- docs/index.rst | 1 + docs/ops/replication.rst | 77 ++++++++++++++++++++++++++++++++++++++++ 3 files changed, 80 insertions(+), 1 deletion(-) create mode 100644 docs/ops/replication.rst diff --git a/docs/conf.py b/docs/conf.py index d9dc9a2f44..fd749bd712 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -134,7 +134,8 @@ } click_utils_commands = {'datacube-search': 'datacube.scripts.search_tool:cli', - 'datacube': 'datacube.scripts.cli_app:cli'} + 'datacube': 'datacube.scripts.cli_app:cli', + 'datacube-simple-replica': 'datacube_apps.simple_replica:replicate'} graphviz_output_format = 'svg' diff --git a/docs/index.rst b/docs/index.rst index e0193e40e0..bcd51a1670 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -29,6 +29,7 @@ and related data from multiple satellite and other acquisition systems. ops/config ops/prepare_scripts ops/tools + ops/replication .. toctree:: :maxdepth: 2 diff --git a/docs/ops/replication.rst b/docs/ops/replication.rst new file mode 100644 index 0000000000..30cbc34dc2 --- /dev/null +++ b/docs/ops/replication.rst @@ -0,0 +1,77 @@ +================ +Data Replication +================ + +Simple Data Cube Replication Tool +--------------------------------- + +This tool provides a very simplistic way to download data and metadata from a +remote Data Cube onto a local PC. It connects to a remote Data Cube via SSH, +and downloads database records and files. + +A configuration file is used to define which portions of which Product should +be downloaded. If a Dataset is already available locally, it will not be +downlaoded again, meaning the tool can be run multiple times to keep the local +system up to date with new datasets on the remote server. + +It can be run from the command line as :ref:`datacube-simple-replica`, taking an +optional parameter of a configuration file. + +Provide a configuration file in :ref:`datacube-replication-config` in YAML format, +or specify an alternate location on the command line. + + +Configuration +------------- + +As an example, the following configration will download 3 Products for the +specified time and space range. Queries are specified using the same +terms as for the Data Cube Query API. + + +.. code-block:: yaml + :caption: ~/.datacube.replication.conf + :name: datacube-replication-config + + remote_host: raijin.nci.org.au + remote_user: example12345 + db_password: xxxxxxxxxxxx + + remote_dir: /g/data/ + local_dir: C:/datacube/ + + replicated_data: + - product: ls5_pq_albers + crs: EPSG:3577 + x: [1200000, 1300000] + y: [-4200000, -4300000] + time: [2008-01-01, 2010-01-01] + + - product: ls7_pq_albers + crs: EPSG:3577 + x: [1200000, 1300000] + y: [-4200000, -4300000] + time: [2008-01-01, 2010-01-01] + + - product: ls8_pq_albers + crs: EPSG:3577 + x: [1200000, 1300000] + y: [-4200000, -4300000] + time: [2008-01-01, 2010-01-01] + +Caveats and limitations +----------------------- + +- Remote datacube files and database are accessed via an SSH host that can be + logged into without a password, ie. by using local SSH key agent. +- The remote datacube index must be same version as the local datacube code. + +Command line documentation +-------------------------- + +.. _datacube-simple-replica: + +.. datacube:click-help:: datacube-simple-replica + + + From 9fe379160ed1e57198344056b2947d67ac71df9a Mon Sep 17 00:00:00 2001 From: Damien Ayers Date: Fri, 28 Apr 2017 18:00:54 +1000 Subject: [PATCH 06/12] Appease pylint --- datacube_apps/simple_replica.py | 14 ++++---------- 1 file changed, 4 insertions(+), 10 deletions(-) diff --git a/datacube_apps/simple_replica.py b/datacube_apps/simple_replica.py index d97718c73d..652d1134e9 100644 --- a/datacube_apps/simple_replica.py +++ b/datacube_apps/simple_replica.py @@ -17,33 +17,26 @@ db_password: xxxxxxxxxxxx remote_dir: /g/data/ local_dir: C:/datacube/ - + replicated_data: - product: ls5_pq_albers crs: EPSG:3577 x: [1200000, 1300000] y: [-4200000, -4300000] time: [2008-01-01, 2010-01-01] - + - product: ls7_pq_albers crs: EPSG:3577 x: [1200000, 1300000] y: [-4200000, -4300000] time: [2008-01-01, 2010-01-01] - + - product: ls8_pq_albers crs: EPSG:3577 x: [1200000, 1300000] y: [-4200000, -4300000] time: [2008-01-01, 2010-01-01] -Requirements, limitations and assumptions: - -- Remote datacube files and database are accessed via an SSH host that can be logged into without a password, ie. - with a local SSH key agent. -- The remote datacube index must be at the same version as the local codebase. -- - """ import logging @@ -121,6 +114,7 @@ def connect_to_db(self): remote_bind_address=(self.remote_dc_config.db_hostname, int(self.remote_dc_config.db_port))) self.tunnel.start() + # pylint: disable=protected-access self.remote_dc_config._config['datacube']['db_hostname'] = '127.0.0.1' self.remote_dc_config._config['datacube']['db_port'] = str(self.tunnel.local_bind_port) self.remote_dc_config._config['datacube']['db_username'] = self.remote_user From 4623393f84f40642f0b00f91793d8578230efc95 Mon Sep 17 00:00:00 2001 From: Damien Ayers Date: Wed, 10 May 2017 15:16:21 +1000 Subject: [PATCH 07/12] Improve workflow for CLI apps - Output as JSON to make it easy to re-consume --- datacube/scripts/metadata_type.py | 4 ++-- datacube/scripts/product.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/datacube/scripts/metadata_type.py b/datacube/scripts/metadata_type.py index 39f5a15f8f..ea931ba822 100644 --- a/datacube/scripts/metadata_type.py +++ b/datacube/scripts/metadata_type.py @@ -1,8 +1,8 @@ from __future__ import absolute_import, print_function +import json import logging from pathlib import Path -from pprint import pprint import click from click import echo @@ -106,7 +106,7 @@ def show_metadata_type(index, metadata_type_name, verbose): print(metadata_type_obj.description) print('Search fields: %s' % ', '.join(sorted(metadata_type_obj.dataset_fields.keys()))) if verbose: - pprint(metadata_type_obj.definition, width=100) + echo(json.dumps(metadata_type_obj.definition, indent=4)) @metadata_type.command('list') diff --git a/datacube/scripts/product.py b/datacube/scripts/product.py index 16ea57242b..735d07fd42 100644 --- a/datacube/scripts/product.py +++ b/datacube/scripts/product.py @@ -1,8 +1,8 @@ from __future__ import absolute_import +import json import logging from pathlib import Path -from pprint import pprint import click from click import echo @@ -131,4 +131,4 @@ def show_product(index, product_name): Show details about a product in the index """ product_def = index.products.get_by_name(product_name) - pprint(product_def.definition) + click.echo_via_pager(json.dumps(product_def.definition, indent=4)) From 158a8a9e7c4f8ea6a89f4c8d6d6bc8ff3e0a00f6 Mon Sep 17 00:00:00 2001 From: Damien Ayers Date: Fri, 12 May 2017 09:16:50 +1000 Subject: [PATCH 08/12] Install requirements for replicas tool --- datacube_apps/simple_replica.py | 2 +- setup.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/datacube_apps/simple_replica.py b/datacube_apps/simple_replica.py index 652d1134e9..b8877c72f3 100644 --- a/datacube_apps/simple_replica.py +++ b/datacube_apps/simple_replica.py @@ -63,7 +63,7 @@ def uri_to_path(uri): return uri.replace('file://', '') -class DatacubeReplicator: +class DatacubeReplicator(object): def __init__(self, config): self.remote_host = config['remote_host'] self.remote_user = config['remote_user'] diff --git a/setup.py b/setup.py index 42711d392f..8eeaa4e685 100755 --- a/setup.py +++ b/setup.py @@ -11,6 +11,7 @@ 'distributed': ['distributed', 'dask[distributed]'], 'analytics': ['scipy', 'pyparsing', 'numexpr'], 'doc': ['Sphinx', 'setuptools'], + 'replicas': ['paramiko', 'sshtunnel', 'tqdm'], 'test': tests_require, } # An 'all' option, following ipython naming conventions. From 4a78b2cff86646cc1cd5207d2206f0d64b2e80c9 Mon Sep 17 00:00:00 2001 From: Damien Ayers Date: Fri, 12 May 2017 09:17:34 +1000 Subject: [PATCH 09/12] Install replicas requirements for travisCI --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 92805b9c5a..a1ae7da233 100644 --- a/.travis.yml +++ b/.travis.yml @@ -47,7 +47,7 @@ install: - conda env create -n agdc --file $CONDA_ENV_FILE - source activate agdc - - pip install .[analytics,test,interactive] --no-deps --upgrade + - pip install .[analytics,test,interactive,replicas] --no-deps --upgrade - pip freeze From 6015563eb9eff18b32d31c1bf02b3567e29e9eba Mon Sep 17 00:00:00 2001 From: Damien Ayers Date: Fri, 12 May 2017 09:18:33 +1000 Subject: [PATCH 10/12] Install replicas requirements for CircleCI --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index f04889fdf1..8e82465faa 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -37,7 +37,7 @@ jobs: conda env create -q -n agdc --file .travis/environment_py35.yaml source activate agdc conda install -q sphinx sphinx_rtd_theme mock click # Stuff for docs - pip install .[analytics,test,interactive,docs] --no-deps --upgrade + pip install .[analytics,test,interactive,replicas,docs] --no-deps --upgrade # - restore_cache: # key: projectname-{{ .Branch }}-{{ checksum "yarn.lock" }} From 7ca43ae3fe4289471355125a5f2de959d6a0f388 Mon Sep 17 00:00:00 2001 From: Damien Ayers Date: Fri, 12 May 2017 09:59:09 +1000 Subject: [PATCH 11/12] Fix reqs install in CI loops --- .circleci/config.yml | 2 +- .travis.yml | 2 +- .travis/environment_py27.yaml | 3 +++ .travis/environment_py35.yaml | 3 +++ 4 files changed, 8 insertions(+), 2 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 8e82465faa..89b911a291 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -37,7 +37,7 @@ jobs: conda env create -q -n agdc --file .travis/environment_py35.yaml source activate agdc conda install -q sphinx sphinx_rtd_theme mock click # Stuff for docs - pip install .[analytics,test,interactive,replicas,docs] --no-deps --upgrade + pip install . --no-deps --upgrade # - restore_cache: # key: projectname-{{ .Branch }}-{{ checksum "yarn.lock" }} diff --git a/.travis.yml b/.travis.yml index a1ae7da233..1f9386169a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -47,7 +47,7 @@ install: - conda env create -n agdc --file $CONDA_ENV_FILE - source activate agdc - - pip install .[analytics,test,interactive,replicas] --no-deps --upgrade + - pip install . --no-deps --upgrade - pip freeze diff --git a/.travis/environment_py27.yaml b/.travis/environment_py27.yaml index 06d68d46d4..e49d3c8e65 100644 --- a/.travis/environment_py27.yaml +++ b/.travis/environment_py27.yaml @@ -24,6 +24,9 @@ dependencies: - pathlib - compliance-checker = 3.0.3 - pygeoif = 0.6 # compliance-checker 3.0.3 fails with 0.7 +- paramiko # for simple-replicas +- sshtunnel # for simple-replicas +- tqdm # for simple-replicas - pip: - pypeg2 - pytest-cov # testing diff --git a/.travis/environment_py35.yaml b/.travis/environment_py35.yaml index 44896a2cc7..241a0c6374 100644 --- a/.travis/environment_py35.yaml +++ b/.travis/environment_py35.yaml @@ -25,6 +25,9 @@ dependencies: - pathlib - compliance-checker = 3.0.3 - pygeoif = 0.6 # compliance-checker 3.0.3 fails with 0.7 +- paramiko # for simple-replicas +- sshtunnel # for simple-replicas +- tqdm # for simple-replicas - pip: - pypeg2 - pytest-cov # testing From 02fcdbcc0ac6387a8bfb6b39b5e5d6cdee22d573 Mon Sep 17 00:00:00 2001 From: Damien Ayers Date: Fri, 12 May 2017 10:55:09 +1000 Subject: [PATCH 12/12] Support py2.7 --- datacube_apps/simple_replica.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/datacube_apps/simple_replica.py b/datacube_apps/simple_replica.py index b8877c72f3..edd06f0a10 100644 --- a/datacube_apps/simple_replica.py +++ b/datacube_apps/simple_replica.py @@ -40,6 +40,7 @@ """ import logging +import os.path from configparser import ConfigParser from pathlib import Path @@ -56,7 +57,7 @@ LOG = logging.getLogger('simple_replicator') -DEFAULT_REPLICATION_CONFIG = str(Path('~/.datacube.replication.conf').expanduser()) +DEFAULT_REPLICATION_CONFIG = os.path.expanduser('~/.datacube.replication.conf') def uri_to_path(uri):