| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -1,176 +1,11 @@ | ||
| jobs: | ||
|
|
||
| - template: ci/azure/linux.yml | ||
| parameters: | ||
| name: Linux | ||
| vmImage: ubuntu-16.04 | ||
|
|
||
| - template: ci/azure/windows.yml | ||
| parameters: | ||
| name: Windows | ||
| vmImage: windows-2019 |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,266 @@ | ||
| parameters: | ||
| name: '' | ||
| vmImage: '' | ||
|
|
||
| jobs: | ||
| - job: ${{ parameters.name }}Test | ||
|
|
||
| pool: | ||
| vmImage: ${{ parameters.vmImage }} | ||
|
|
||
| variables: | ||
| AZURECI: 1 | ||
| COMPOSE_FILE: ci/docker-compose.yml | ||
| PYTHONHASHSEED: "random" | ||
| PYTEST_MARK_EXPRESSION: "not udf" | ||
| BACKENDS: "clickhouse impala kudu-master kudu-tserver mysql omniscidb parquet postgres sqlite" | ||
| strategy: | ||
| matrix: | ||
| py36: | ||
| PYTHON_MAJOR_VERSION: "3" | ||
| PYTHON_MINOR_VERSION: "6" | ||
| PYTHON_VERSION: $(PYTHON_MAJOR_VERSION).$(PYTHON_MINOR_VERSION) | ||
| PYTHON_NO_DOT_VERSION: $(PYTHON_MAJOR_VERSION)$(PYTHON_MINOR_VERSION) | ||
| py37: | ||
| PYTHON_MAJOR_VERSION: "3" | ||
| PYTHON_MINOR_VERSION: "7" | ||
| PYTHON_VERSION: $(PYTHON_MAJOR_VERSION).$(PYTHON_MINOR_VERSION) | ||
| PYTHON_NO_DOT_VERSION: $(PYTHON_MAJOR_VERSION)$(PYTHON_MINOR_VERSION) | ||
| py38: | ||
| PYTHON_MAJOR_VERSION: "3" | ||
| PYTHON_MINOR_VERSION: "8" | ||
| PYTHON_VERSION: $(PYTHON_MAJOR_VERSION).$(PYTHON_MINOR_VERSION) | ||
| PYTHON_NO_DOT_VERSION: $(PYTHON_MAJOR_VERSION)$(PYTHON_MINOR_VERSION) | ||
| # pymapd and pyspark are not working on Ibis with Python 3.8 | ||
| # https://github.com/ibis-project/ibis/issues/2091 | ||
| # https://github.com/ibis-project/ibis/issues/2090 | ||
| PYTEST_MARK_EXPRESSION: "not udf and not omniscidb and not spark and not pyspark" | ||
| BACKENDS: "clickhouse impala kudu-master kudu-tserver mysql parquet postgres sqlite" | ||
|
|
||
| steps: | ||
| - bash: | | ||
| if [ -n "${GCLOUD_SERVICE_KEY}" ]; then | ||
| mkdir -p /tmp/ibis | ||
| base64 --decode --ignore-garbage <<< "${GCLOUD_SERVICE_KEY}" > /tmp/ibis/gcloud-service-key.json | ||
| fi | ||
| env: | ||
| GCLOUD_SERVICE_KEY: $(GCLOUD_SERVICE_KEY) | ||
| displayName: 'Setup BigQuery credentials' | ||
| condition: eq(variables['System.PullRequest.IsFork'], 'False') | ||
| - bash: make start PYTHON_VERSION=$PYTHON_VERSION BACKENDS="${BACKENDS}" | ||
| displayName: 'Start databases' | ||
|
|
||
| - bash: make wait PYTHON_VERSION=$PYTHON_VERSION BACKENDS="${BACKENDS}" | ||
| displayName: 'Wait for databases' | ||
|
|
||
| - bash: docker ps | ||
| displayName: 'Show running containers' | ||
|
|
||
| - bash: make build PYTHON_VERSION=$PYTHON_VERSION | ||
| displayName: 'Build ibis image' | ||
|
|
||
| - bash: docker images | ||
| displayName: 'List docker images' | ||
|
|
||
| - bash: make docker_lint PYTHON_VERSION=$PYTHON_VERSION | ||
| displayName: 'Lint' | ||
|
|
||
| # TODO: change match-dir when docstrings are fixed for other backends | ||
| - bash: docker-compose run ibis pydocstyle --match-dir="(ibis|omniscidb)" | ||
| displayName: "Docstring check" | ||
|
|
||
| - bash: make docker_check_pre_commit_hooks PYTHON_VERSION=$PYTHON_VERSION | ||
| displayName: 'Ensure all pre-commit hooks checking are passing.' | ||
|
|
||
| - bash: | | ||
| sudo mkdir -p /tmp/ibis/test-reports/pytest | ||
| mkdir -p /tmp/env | ||
| displayName: 'Make artifacts directories' | ||
| - bash: make docker_run PYTHON_VERSION=$PYTHON_VERSION DOCKER_RUN_COMMAND="conda list" | ||
| displayName: 'Show packages in conda environment' | ||
|
|
||
| - bash: make docker_run PYTHON_VERSION=$PYTHON_VERSION DOCKER_RUN_COMMAND="conda list --export > /tmp/env/env.yml" | ||
| displayName: 'Capture packages in conda environment' | ||
|
|
||
| - bash: make load PYTHON_VERSION=$PYTHON_VERSION BACKENDS="${BACKENDS}" | ||
| displayName: 'Load test datasets' | ||
|
|
||
| - bash: | | ||
| docker-compose run \ | ||
| -e PYTHONHASHSEED=$PYTHONHASHSEED \ | ||
| -e AZURECI=$AZURECI \ | ||
| -e GOOGLE_APPLICATION_CREDENTIALS=/tmp/gcloud-service-key.json \ | ||
| ibis \ | ||
| pytest ibis -m "${PYTEST_MARK_EXPRESSION}" \ | ||
| -ra \ | ||
| --numprocesses auto \ | ||
| --doctest-modules \ | ||
| --doctest-ignore-import-errors \ | ||
| --junitxml=/tmp/test-reports/pytest/junit.xml \ | ||
| --cov=ibis \ | ||
| --cov-report=xml:/tmp/test-reports/pytest-cov/coverage.xml | ||
| displayName: 'Run tests' | ||
| # See #1954 | ||
| # - bash: | | ||
| # bash <(curl -s https://codecov.io/bash) \ | ||
| # -f /tmp/ibis/test-reports/pytest-cov/coverage.xml | ||
| # displayName: 'Upload coverage' | ||
|
|
||
| - task: PublishTestResults@2 | ||
| displayName: 'Publish test results from pytest JUnitXML' | ||
| inputs: | ||
| testResultsFiles: /tmp/ibis/test-reports/pytest/junit.xml | ||
| testRunTitle: 'Publish test results' | ||
| mergeTestResults: False | ||
| condition: succeededOrFailed() # pass or fail, but not cancelled | ||
|
|
||
| - task: PublishPipelineArtifact@1 | ||
| inputs: | ||
| path: /tmp/env/env.yml | ||
| artifact: LinuxCondaEnvironment$(PYTHON_NO_DOT_VERSION) | ||
| displayName: 'Publish Linux environment YAML to Azure' | ||
| condition: succeededOrFailed() # pass or fail, but not cancelled | ||
|
|
||
| - job: ${{ parameters.name }}BuildConda | ||
|
|
||
| pool: | ||
| vmImage: ${{ parameters.vmImage }} | ||
|
|
||
| variables: | ||
| PYTHON_VERSION: "3.7" | ||
| AZURECI: 1 | ||
| COMPOSE_FILE: ci/docker-compose.yml | ||
|
|
||
| steps: | ||
| - bash: make build PYTHON_VERSION=$PYTHON_VERSION | ||
| displayName: 'Build ibis image' | ||
|
|
||
| - bash: docker images | ||
| displayName: 'List docker images' | ||
|
|
||
| - bash: make docker_run PYTHON_VERSION=$PYTHON_VERSION DOCKER_RUN_COMMAND="ci/feedstock.py test" | ||
| displayName: 'Clone, update and build conda-forge recipe' | ||
|
|
||
| - task: PublishPipelineArtifact@1 | ||
| inputs: | ||
| path: /tmp/ibis/packages | ||
| artifact: LinuxCondaPackage | ||
| displayName: 'Publish conda package to Azure' | ||
| condition: and(succeeded(), eq(variables['System.PullRequest.IsFork'], 'False')) | ||
|
|
||
| - job: ${{ parameters.name }}Benchmark | ||
|
|
||
| pool: | ||
| vmImage: ${{ parameters.vmImage }} | ||
|
|
||
| variables: | ||
| PYTHON_VERSION: "3.6" | ||
| AZURECI: 1 | ||
| COMPOSE_FILE: ci/docker-compose.yml | ||
|
|
||
| steps: | ||
| - bash: make build PYTHON_VERSION=$PYTHON_VERSION | ||
| displayName: 'Build Ibis Image' | ||
|
|
||
| - bash: make docker_run PYTHON_VERSION=$PYTHON_VERSION DOCKER_RUN_COMMAND='ci/benchmark.sh azure "$(Build.SourceVersion)"' | ||
| displayName: 'Run Benchmark (ASV)' | ||
|
|
||
| - job: ${{ parameters.name }}BuildDocs | ||
|
|
||
| pool: | ||
| vmImage: ${{ parameters.vmImage }} | ||
|
|
||
| variables: | ||
| PYTHON_VERSION: "3.6" | ||
| AZURECI: 1 | ||
| COMPOSE_FILE: ci/docker-compose.yml | ||
|
|
||
| steps: | ||
| - bash: make start PYTHON_VERSION=$PYTHON_VERSION | ||
| displayName: 'Start databases' | ||
|
|
||
| - bash: make wait PYTHON_VERSION=$PYTHON_VERSION | ||
| displayName: 'Wait for databases' | ||
|
|
||
| - bash: make builddoc PYTHON_VERSION=$PYTHON_VERSION | ||
| displayName: 'build ibis documentation image' | ||
|
|
||
| - bash: docker images | ||
| displayName: 'List docker images' | ||
|
|
||
| - bash: docker ps | ||
| displayName: 'Show running containers' | ||
|
|
||
| - bash: make load PYTHON_VERSION=$PYTHON_VERSION | ||
| displayName: 'Load test datasets' | ||
|
|
||
| - bash: make docker_docs_run PYTHON_VERSION=$PYTHON_VERSION DOCKER_RUN_COMMAND="ping -c 1 impala" | ||
| displayName: 'Ping the impala host used in the tutorial notebooks' | ||
|
|
||
| - bash: | | ||
| make docker_docs_run PYTHON_VERSION=$PYTHON_VERSION DOCKER_RUN_COMMAND="git clone --branch gh-pages \ | ||
| https://github.com/cpcloud/docs.ibis-project.org \ | ||
| /tmp/docs.ibis-project.org" | ||
| displayName: 'Clone doc repo' | ||
| - bash: | | ||
| docker-compose run ibis-docs \ | ||
| find /tmp/docs.ibis-project.org \ | ||
| -maxdepth 1 \ | ||
| # ignore the directory we're searching in itself | ||
| ! -wholename /tmp/docs.ibis-project.org \ | ||
| # ignore git files | ||
| ! -name '*.git' \ | ||
| # ignore the CNAME record | ||
| ! -name CNAME \ | ||
| # ignore files ending in nojekyll | ||
| ! -name '*.nojekyll' \ | ||
| -exec rm -rf {} \; | ||
| displayName: 'Clear out old docs' | ||
| - bash: make docker_docs_run PYTHON_VERSION=$PYTHON_VERSION DOCKER_RUN_COMMAND="conda list" | ||
| displayName: 'Show the doc env' | ||
|
|
||
| - bash: | | ||
| make docker_docs_run PYTHON_VERSION=$PYTHON_VERSION DOCKER_RUN_COMMAND="sphinx-build -b html \ | ||
| docs/source /tmp/docs.ibis-project.org -W -T" | ||
| displayName: 'Build docs' | ||
| - task: PublishPipelineArtifact@1 | ||
| inputs: | ||
| path: /tmp/ibis/docs.ibis-project.org | ||
| artifact: Documentation | ||
| displayName: 'Publish documentation to Azure' | ||
| condition: and(succeeded(), eq(variables['System.PullRequest.IsFork'], 'False')) | ||
|
|
||
| - bash: | | ||
| mkdir ~/.ssh | ||
| base64 --decode --ignore-garbage <<< "${IBIS_GH_TOKEN}" > ~/.ssh/id_rsa | ||
| chmod 700 ~/.ssh | ||
| chmod 600 ~/.ssh/id_rsa | ||
| # add github to known hosts | ||
| ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts | ||
| sudo chown -R "${USER}":"${USER}" /tmp/ibis | ||
| pushd /tmp/ibis/docs.ibis-project.org | ||
| git remote set-url origin git@github.com:ibis-project/docs.ibis-project.org | ||
| git config user.name 'Ibis Documentation Bot' | ||
| git config user.email '' | ||
| # Add everything | ||
| git add --all . | ||
| git commit -m "Docs from ibis at $(Build.SourceVersion)" | ||
| git push --force origin gh-pages | ||
| displayName: 'Push docs to remote repo' | ||
| env: | ||
| IBIS_GH_TOKEN: $(IBIS_GH_TOKEN) | ||
| condition: | | ||
| and(eq(variables['System.PullRequest.IsFork'], 'False'), | ||
| eq(variables['Build.Repository.Name'], 'ibis-project/ibis'), | ||
| eq(variables['Build.SourceBranchName'], 'master')) |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,137 @@ | ||
| parameters: | ||
| name: '' | ||
| vmImage: '' | ||
|
|
||
| jobs: | ||
| - job: ${{ parameters.name }}Test | ||
|
|
||
| pool: | ||
| vmImage: ${{ parameters.vmImage }} | ||
|
|
||
| variables: | ||
| AZURECI: 1 | ||
| conda.version: "4.6" | ||
|
|
||
| strategy: | ||
| matrix: | ||
| py36: | ||
| python.major.version: "3" | ||
| python.minor.version: "6" | ||
| python.version: "$(python.major.version).$(python.minor.version)" | ||
| python.no.dot.version: "$(python.major.version)$(python.minor.version)" | ||
| conda.env: "ibis$(python.no.dot.version)" | ||
| py37: | ||
| python.major.version: "3" | ||
| python.minor.version: "7" | ||
| python.version: "$(python.major.version).$(python.minor.version)" | ||
| python.no.dot.version: "$(python.major.version)$(python.minor.version)" | ||
| conda.env: "ibis$(python.no.dot.version)" | ||
| py38: | ||
| python.major.version: "3" | ||
| python.minor.version: "8" | ||
| python.version: "$(python.major.version).$(python.minor.version)" | ||
| python.no.dot.version: "$(python.major.version)$(python.minor.version)" | ||
| conda.env: "ibis$(python.no.dot.version)" | ||
|
|
||
| steps: | ||
| - powershell: Write-Host "##vso[task.prependpath]$env:CONDA\Scripts" | ||
| displayName: Add conda to PATH | ||
|
|
||
| - script: conda config --set always_yes True --set show_channel_urls True | ||
| displayName: 'Set conda always_yes and show_channel_urls' | ||
|
|
||
| - script: conda config --add channels conda-forge | ||
| displayName: 'Add the conda-forge channel' | ||
|
|
||
| - script: conda update --all conda=$(conda.version) | ||
| displayName: 'Update conda and install an appropriate version' | ||
|
|
||
| - script: conda create --name $(conda.env) python=$(python.version) numpy pandas pytables ruamel.yaml jinja2 pyarrow multipledispatch pymysql "sqlalchemy>=1.1" psycopg2 graphviz click mock plumbum geopandas toolz regex | ||
| displayName: 'Create conda environment' | ||
|
|
||
| - script: | | ||
| call activate $(conda.env) | ||
| pip install --upgrade "pytest>=4.5" | ||
| pip install --upgrade pytest-xdist | ||
| displayName: 'Install latest pytest' | ||
| - script: conda info | ||
| displayName: 'Show conda info' | ||
|
|
||
| - script: conda list --name $(conda.env) | ||
| displayName: 'Show installed packages' | ||
|
|
||
| - script: conda list --export --name $(conda.env) > env.yml | ||
| displayName: 'Capture installed packages' | ||
|
|
||
| - task: PublishPipelineArtifact@1 | ||
| inputs: | ||
| path: env.yml | ||
| artifact: WindowsCondaEnvironment$(python.no.dot.version) | ||
| displayName: 'Publish Windows environment YAML to Azure' | ||
| condition: succeededOrFailed() # pass or fail, but not cancelled | ||
|
|
||
| - script: | | ||
| call activate $(conda.env) | ||
| python -c "import numpy" | ||
| displayName: 'Import numpy' | ||
| - script: | | ||
| call activate $(conda.env) | ||
| python -c "import pandas" | ||
| displayName: 'Import pandas' | ||
| - script: choco install -y mariadb --version=10.3.16 | ||
| displayName: 'Install mariadb (mysql) from chocolatey' | ||
|
|
||
| - script: '"C:\\Program Files\\MariaDB 10.3\\bin\\mysql" -u root -e "CREATE OR REPLACE USER ibis@localhost IDENTIFIED BY ''ibis''"' | ||
| displayName: 'Create ibis user and password in MySQL database' | ||
|
|
||
| - script: '"C:\\Program Files\\MariaDB 10.3\\bin\\mysql" -u root -e "GRANT ALL PRIVILEGES ON *.* TO ibis@localhost"' | ||
| displayName: 'Setup privileges for ibis user in MySQL' | ||
|
|
||
| - script: choco install -y postgresql10 --params '/Password:postgres' | ||
| displayName: 'Install postgres from chocolatey' | ||
|
|
||
| - script: | | ||
| call activate $(conda.env) | ||
| pip install -e . --no-deps --ignore-installed --no-cache-dir | ||
| displayName: 'Install ibis' | ||
| - script: | | ||
| call activate $(conda.env) | ||
| python ci/datamgr.py download | ||
| displayName: 'Download data' | ||
| - script: | | ||
| call activate $(conda.env) | ||
| python ci/datamgr.py mysql | ||
| displayName: 'Load MySQL data' | ||
| - script: | | ||
| call activate $(conda.env) | ||
| python ci/datamgr.py postgres --no-plpython --psql-path="C:/Program Files/PostgreSQL/10/bin/psql.exe" -t functional_alltypes -t diamonds -t batting -t awards_players | ||
| displayName: 'Load PostgreSQL data' | ||
| - script: | | ||
| call activate $(conda.env) | ||
| python ci/datamgr.py sqlite | ||
| displayName: 'Load SQLite data' | ||
| - script: | | ||
| call activate $(conda.env) | ||
| python ci/datamgr.py parquet -i | ||
| displayName: 'Load Parquet data' | ||
| - script: | | ||
| call activate $(conda.env) | ||
| pytest --tb=short --junitxml="junit-$(python.version).xml" -n auto -m "not backend and not clickhouse and not impala and not hdfs and not bigquery and not omniscidb and not postgis and not postgresql" -ra ibis | ||
| displayName: 'Run tests' | ||
| - task: PublishTestResults@2 | ||
| displayName: 'Publish test results from pytest JUnitXML' | ||
| inputs: | ||
| testResultsFiles: junit-$(python.version).xml | ||
| testRunTitle: 'Publish test results' | ||
| mergeTestResults: False | ||
| condition: succeededOrFailed() # pass or fail, but not cancelled |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,30 @@ | ||
| #!/bin/bash -e | ||
|
|
||
| # The script generates a marker for each backend, which `pytest` | ||
| # can use to run specific tests. | ||
| # | ||
| # Usage: | ||
| # $ ./ci/backends-markers.sh param1 | ||
| # * param1: array of backends | ||
| # | ||
| # Example: | ||
| # markers=`./ci/backends-markers.sh omniscidb impala` && echo $markers | ||
| # | ||
| # Output: | ||
| # '-m omniscidb or impala' | ||
|
|
||
| BACKENDS=$@ | ||
|
|
||
| PYTEST_MARKERS="" | ||
|
|
||
| for s in ${BACKENDS[@]} | ||
| do | ||
| if [ "$PYTEST_MARKERS" == "" ] | ||
| then | ||
| PYTEST_MARKERS="-m ${PYTEST_MARKERS} ${s}" | ||
| else | ||
| PYTEST_MARKERS="${PYTEST_MARKERS} or ${s}" | ||
| fi | ||
| done | ||
|
|
||
| echo "'${PYTEST_MARKERS}'" |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,41 @@ | ||
| #!/bin/bash | ||
|
|
||
| # Among the backends, the script finds those that will need to be launched | ||
| # by the `docker-compose` or those for which test datasets should be loaded. | ||
| # The choice depends on comparing each backend with elements inside the | ||
| # USER_REQUESTED_BACKENDS variable. | ||
| # | ||
| # Usage: | ||
| # $ ./ci/backends-to-start.sh param1 param2 | ||
| # * param1: string of backends | ||
| # * param2: string of backends, which then need to launched by `docker-compose` | ||
| # (as docker's services) before working with them or for which test | ||
| # datasets should be loaded. | ||
| # | ||
| # Example: | ||
| # current_backends=`./ci/backends-to-start.sh "omniscidb impala parquet" "omniscidb impala"` && echo $current_backends | ||
| # | ||
| # Output: | ||
| # 'omniscidb impala' | ||
|
|
||
| # convert strings to arrays | ||
| BACKENDS=($(echo $1)) | ||
| USER_REQUESTED_BACKENDS=($(echo $2)) | ||
|
|
||
| # lookup table to choose backends to start | ||
| declare -A USER_REQUESTED_BACKENDS_LOOKUP | ||
| for service in ${USER_REQUESTED_BACKENDS[@]} | ||
| do | ||
| USER_REQUESTED_BACKENDS_LOOKUP[$service]=1 | ||
| done | ||
|
|
||
| i=0 | ||
| for backend in ${BACKENDS[@]} | ||
| do | ||
| if [[ ${USER_REQUESTED_BACKENDS_LOOKUP[${backend}]} ]]; then | ||
| CHOSEN_BACKENDS[${i}]=${backend} | ||
| ((i++)) | ||
| fi | ||
| done | ||
|
|
||
| echo ${CHOSEN_BACKENDS[@]} |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,22 @@ | ||
| #!/bin/bash | ||
|
|
||
| SERVICES=$@ | ||
|
|
||
| echo "DOCKER_CODE: ${DOCKER_CODE}" | ||
| echo "SERVICES: ${SERVICES}" | ||
|
|
||
| if [ $DOCKER_CODE -eq 0 ] | ||
| then | ||
| echo "[II] Done." | ||
| else | ||
| for s in ${SERVICES} | ||
| do | ||
| docker container ls | ||
| echo "==============================================================" | ||
| echo "docker ${s} log" | ||
| echo "==============================================================" | ||
| docker logs --details $(docker ps -aqf "name=ci_${s}_1") | ||
| done | ||
|
|
||
| fi | ||
| exit $DOCKER_CODE |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,41 @@ | ||
| #!/bin/bash | ||
|
|
||
| DOCKERIZE_CALL="dockerize" | ||
|
|
||
| add_wait() { | ||
| wait_string=$1 | ||
| DOCKERIZE_CALL="${DOCKERIZE_CALL} ${wait_string}" | ||
| } | ||
|
|
||
| for service in $@; do | ||
| case "${service}" in | ||
| omniscidb) | ||
| add_wait "-wait tcp://omniscidb:6274" | ||
| ;; | ||
| mysql) | ||
| add_wait "-wait tcp://mysql:3306" | ||
| ;; | ||
| postgres) | ||
| add_wait "-wait tcp://postgres:5432" | ||
| ;; | ||
| impala) | ||
| add_wait "-wait tcp://impala:21050" | ||
| add_wait "-wait tcp://impala:50070" | ||
| ;; | ||
| kudu-master) | ||
| add_wait "-wait tcp://kudu-master:7051" | ||
| add_wait "-wait tcp://kudu-master:8051" | ||
| ;; | ||
| kudu-tserver) | ||
| add_wait "-wait tcp://kudu-tserver:7050" | ||
| add_wait "-wait tcp://kudu-tserver:8050" | ||
| ;; | ||
| clickhouse) | ||
| add_wait "-wait tcp://clickhouse:9000" | ||
| ;; | ||
| esac | ||
| done | ||
|
|
||
| DOCKERIZE_CALL="${DOCKERIZE_CALL} -wait-retry-interval 5s -timeout 10m" | ||
|
|
||
| echo ${DOCKERIZE_CALL} |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,61 @@ | ||
| channels: | ||
| - conda-forge | ||
| dependencies: | ||
| - black=19.10b0 | ||
| - click | ||
| - clickhouse-cityhash | ||
| - clickhouse-driver>=0.1.3 | ||
| - clickhouse-sqlalchemy | ||
| - cmake | ||
| - flake8 | ||
| - geoalchemy2 | ||
| - geopandas | ||
| - google-cloud-bigquery>=1.0.0 | ||
| - graphviz | ||
| - impyla>=0.15.0 | ||
| - jinja2 | ||
| - libiconv # see https://github.com/jupyter/repo2docker/issues/758 | ||
| - lz4 | ||
| - multipledispatch>=0.6.0 | ||
| - mypy | ||
| - numpy>=1.15 | ||
| - openjdk=8 | ||
| - pandas>=0.25.3 | ||
| - pip=19.3.1 | ||
| - plumbum | ||
| - pre-commit | ||
| - psycopg2 | ||
| - pyarrow>=0.13 | ||
| - pydata-google-auth | ||
| - pydocstyle=4.0.1 | ||
| - pygit2 | ||
| # currently it introduces incompatible packages | ||
| # maybe it is related to the pinned arrow used | ||
| # - pymapd>=0.12 | ||
| - pymysql | ||
| # not fully compatible with Python 3.8 | ||
| # https://github.com/apache/spark/pull/26194#issuecomment-566592265 | ||
| # - pyspark>=3.0 | ||
| - pytables>=3.0.0 | ||
| - pytest>=4.5 | ||
| - pytest-cov | ||
| - pytest-xdist | ||
| - python=3.8 | ||
| - python-graphviz | ||
| - python-hdfs>=2.0.16 | ||
| - pytz | ||
| - regex | ||
| - requests | ||
| - rtree | ||
| - ruamel.yaml | ||
| - shapely | ||
| - sqlalchemy>=1.1 | ||
| - thrift>=0.9.3 | ||
| - thriftpy2 # required for impyla in case of py3 | ||
| - toolz | ||
| - xorg-libxpm | ||
| - xorg-libxrender | ||
| - pip: | ||
| # see .pre-commit-config.yaml, isort pinned | ||
| - seed-isort-config | ||
| - git+git://github.com/timothycrosley/isort@18ad293fc9d1852776afe35015a932b68d26fb14#egg=isort |