Showing 377 changed files with 16,333 additions and 8,259 deletions.
14 changes: 14 additions & 0 deletions .github/workflows/assign.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
name: Assign
on:
issue_comment:
types: created

jobs:
one:
runs-on: ubuntu-latest
steps:
- if: github.event.comment.body == '/take'
name:
run: |
echo "Assigning issue ${{ github.event.issue.number }} to ${{ github.event.comment.user.login }}"
curl -H "Authorization: token ${{ secrets.GITHUB_TOKEN }}" -d '{"assignees": ["${{ github.event.comment.user.login }}"]}' https://api.github.com/repos/${{ github.repository }}/issues/${{ github.event.issue.number }}/assignees
28 changes: 28 additions & 0 deletions .github/workflows/bigquery.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
name: BigQuery

on:
push:
branches: master

jobs:
Tests_bigquery:
name: Tests bigquery
runs-on: ubuntu-latest
env:
BACKENDS: "bigquery"
strategy:
fail-fast: false
matrix:
python_version: ["3.7", "3.8"]
steps:
- name: checkout
uses: actions/checkout@v1

- name: set up bigquery credentials
run: echo '${{ secrets.GCLOUD_SERVICE_KEY }}' > gcloud-service-key.json

- name: set up environment
run: GOOGLE_BIGQUERY_PROJECT_ID="ibis-gbq" GOOGLE_APPLICATION_CREDENTIALS=gcloud-service-key.json ./ci/setup_env.sh "${{ matrix.python_version }}" "$BACKENDS"

- name: run tests
run: PYTEST_BACKENDS=$BACKENDS PYTEST_EXPRESSION="not hdfs" GOOGLE_BIGQUERY_PROJECT_ID="ibis-gbq" GOOGLE_APPLICATION_CREDENTIALS=gcloud-service-key.json ./ci/run_tests.sh
238 changes: 238 additions & 0 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,238 @@
name: Main

on:
push:
branches: master
pull_request:
branches: master

jobs:
Tests_pandas:
name: Tests pandas / files
runs-on: ${{ matrix.os }}
env:
BACKENDS: "pandas csv parquet hdf5"
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest]
python_version: ["3.7", "3.8"]
steps:
- name: checkout
uses: actions/checkout@v1

- name: set up environment
run: ./ci/setup_env.sh "${{ matrix.python_version }}" "$BACKENDS"
shell: bash

- name: run tests
run: PYTEST_BACKENDS=$BACKENDS PYTEST_EXPRESSION="not hdfs" ./ci/run_tests.sh
shell: bash

Tests_sql:
name: Tests SQL
runs-on: ubuntu-latest
env:
BACKENDS: "postgres mysql sqlite"
strategy:
fail-fast: false
matrix:
python_version: ["3.7", "3.8"]
services:
postgres:
image: shajekpivotal/ibis-docker-postgres-9.5
ports:
- 5432:5432
env:
POSTGRES_PASSWORD: ''
options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 3
mysql:
image: mariadb:10.4.12
ports:
- 3306:3306
env:
MYSQL_ALLOW_EMPTY_PASSWORD: true
MYSQL_DATABASE: ibis_testing
MYSQL_USER: ibis
MYSQL_PASSWORD: ibis
options: --health-cmd="mysqladmin ping" --health-interval 10s --health-timeout 5s --health-retries 3
steps:
- name: checkout
uses: actions/checkout@v1

- name: set up environment
run: ./ci/setup_env.sh "${{ matrix.python_version }}" "$BACKENDS"

- name: run tests
run: PYTEST_BACKENDS=$BACKENDS PYTEST_EXPRESSION="not udf and not hdfs" ./ci/run_tests.sh

Tests_impala_clickhouse:
name: Tests Impala / Clickhouse
runs-on: ubuntu-latest
if: false # TODO: temporary disabling this build, since the impala container seems to not be starting properly
env:
BACKENDS: "impala clickhouse"
strategy:
fail-fast: false
matrix:
python_version: ["3.7", "3.8"]
services:
impala:
image: ibisproject/impala:latest
env:
PGPASSWORD: postgres
ports:
# HDFS
- 9020:9020
- 50070:50070
- 50075:50075
- 8020:8020
- 8042:8042
# Hive
- 9083:9083

# Impala
- 21000:21000
- 21050:21050
- 25000:25000
- 25010:25010
- 25020:25020
options: --health-cmd "nc -z 127.0.0.1 21050 && nc -z 127.0.0.1 50070" --health-interval 30s --health-timeout 10s --health-retries 20
clickhouse:
# XXX does clickhouse backend require the impala container too?
image: yandex/clickhouse-server:18.14
ports:
- 8123:8123
- 9000:9000

steps:
- name: checkout
uses: actions/checkout@v1

- name: set up environment
run: ./ci/setup_env.sh "${{ matrix.python_version }}" "$BACKENDS"

- name: run tests
run: PYTEST_BACKENDS=$BACKENDS ./ci/run_tests.sh

Tests_omniscidb:
name: Tests OmniSciDB
runs-on: ubuntu-latest
env:
BACKENDS: "omniscidb"
strategy:
fail-fast: false
matrix:
python_version: ["3.7"]
services:
omnisci:
# NOTE: Keep the documentation about the OmniSciDB supported version
# updated (docs/source/backends/omnisci.rst).
image: omnisci/core-os-cpu:v5.3.0
ports:
- 6274:6274
- 6278:6278
volumes:
- omniscidb.conf:/omnisci-storage/omnisci.conf
steps:
- name: checkout
uses: actions/checkout@v1

- name: set up environment
run: ./ci/setup_env.sh "${{ matrix.python_version }}" "$BACKENDS"

- name: run tests
run: PYTEST_BACKENDS=$BACKENDS PYTEST_EXPRESSION="not hdfs" ./ci/run_tests.sh

Tests_spark:
name: Tests PySpark / Spark
runs-on: ubuntu-latest
env:
BACKENDS: "spark pyspark"
strategy:
fail-fast: false
matrix:
python_version: ["3.7", "3.8"]
steps:
- name: checkout
uses: actions/checkout@v1

- name: set up environment
run: ./ci/setup_env.sh "${{ matrix.python_version }}" "$BACKENDS"

- name: run tests
run: PYTEST_BACKENDS=$BACKENDS PYTEST_EXPRESSION="not hdfs" ./ci/run_tests.sh


Lint_and_benchmarks:
name: Lint and benckmarks
runs-on: ubuntu-latest
steps:

- name: checkout
uses: actions/checkout@v1

- name: set up environment
run: ./ci/setup_env.sh

- name: black
run: black --check .
if: always()

- name: mypy
# TODO: mypy has errors that need to be fixed before it can be added
run: mypy --ignore-missing-imports ibis || true
if: always()

- name: pydocstyle
# TODO: change match-dir when docstrings are fixed for other backends
run: pydocstyle --match-dir="(ibis|omniscidb)"
if: always()

- name: isort
run: isort --check-only .
if: always()

- name: publish feedstock artifact
uses: actions/upload-artifact@master
with:
name: LinuxCondaPackage
path: /tmp/ibis/packages
if: github.event_name == 'push'

- name: benckmark
run: asv machine --yes && asv dev
if: always()

Conda_package:
name: Conda package
runs-on: ubuntu-latest
steps:

- name: checkout
uses: actions/checkout@v1

- name: set up environment
run: ./ci/setup_env.sh

- name: clone feedstock repo
run: git clone https://github.com/conda-forge/ibis-framework-feedstock /tmp/feedstock

- name: update recipe file
run: |
set -x
IBIS_PATH=`pwd`
sed -i "s|url:.*|path: $IBIS_PATH|g" ci/recipe/meta.yaml
IBIS_VERSION=`python -c "import ibis; print(ibis.__version__)"`
sed -i "s/{{ version }}/$IBIS_VERSION/g" ci/recipe/meta.yaml
cat ci/recipe/meta.yaml
- name: build recipe
run: conda build -c conda-forge --python 3.7 ci/recipe

- name: deploy recipe package
run: |
mkdir /tmp/packages
cp -r /usr/share/miniconda/conda-bld/noarch /tmp/packages/noarch
cp -r /usr/share/miniconda/conda-bld/linux-64 /tmp/packages/linux-64
conda index /tmp/packages
6 changes: 1 addition & 5 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,10 +1,6 @@
repos:
- repo: https://github.com/asottile/seed-isort-config
rev: v1.9.2
hooks:
- id: seed-isort-config
- repo: https://github.com/timothycrosley/isort
rev: 18ad293fc9d1852776afe35015a932b68d26fb14
rev: 5.6.4
hooks:
- id: isort
- repo: https://github.com/psf/black
Expand Down
57 changes: 33 additions & 24 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,13 @@
SHELL := /bin/bash
MAKEFILE_DIR = $(patsubst %/,%,$(dir $(abspath $(lastword $(MAKEFILE_LIST)))))

# from PYTHON_VERSION depends which `./ci/requirements-$PYTHON_VERSION-dev` file
# will be used for creating ibis image (see for additional info: `./ci/Dockerfile.dev`
# and `./ci/docker-compose.yml`)
# you can use `3.6` or `3.7` for now
PYTHON_VERSION := 3.6
PYTHONHASHSEED := "random"
# PYTHON_VERSION defines which `./ci/requirements-dev-$PYTHON_VERSION`
# file will be used for creating the ibis image (see for additional info: `./ci/Dockerfile.dev` and
# `./ci/docker-compose.yml`)
# You can use `3.7` for now for the PYTHON_VERSION
PYTHON_VERSION := 3.7

PYTHONHASHSEED := random

# docker specific
COMPOSE_FILE := "$(MAKEFILE_DIR)/ci/docker-compose.yml"
Expand Down Expand Up @@ -117,36 +118,47 @@ init: restart
$(MAKE) build
$(MAKE) load

# Targets for testing ibis inside docker's containers
# Targets for running backend specific Ibis tests inside docker's containers
# BACKENDS can be set to choose which tests should be run:
# make --directory ibis testparallel BACKENDS='omniscidb impala'

test: init
# use the target to run backend specific tests
$(DOCKER_RUN) -e PYTHONHASHSEED="$(PYTHONHASHSEED)" ibis bash -c "${REMOVE_COMPILED_PYTHON_SCRIPTS} && \
pytest $(PYTEST_DOCTEST_OPTIONS) $(PYTEST_OPTIONS) ${PYTEST_MARKERS} -k 'not test_import_time'"

testnoinit:
# Same as make test, but does not run init first
$(DOCKER_RUN) -e PYTHONHASHSEED="$(PYTHONHASHSEED)" ibis bash -c "${REMOVE_COMPILED_PYTHON_SCRIPTS} && \
pytest $(PYTEST_DOCTEST_OPTIONS) $(PYTEST_OPTIONS) ${PYTEST_MARKERS} -k 'not test_import_time'"

testparallel: init
$(DOCKER_RUN) -e PYTHONHASHSEED="$(PYTHONHASHSEED)" ibis bash -c "${REMOVE_COMPILED_PYTHON_SCRIPTS} && \
pytest $(PYTEST_DOCTEST_OPTIONS) $(PYTEST_OPTIONS) ${PYTEST_MARKERS} -n auto -m 'not udf' -k 'not test_import_time'"
pytest $(PYTEST_DOCTEST_OPTIONS) $(PYTEST_OPTIONS) ${PYTEST_MARKERS} -n auto -k 'not test_import_time'"

testall:
testparallelnoinit:
# Same as make testparallel, but does not run init first
$(DOCKER_RUN) -e PYTHONHASHSEED="$(PYTHONHASHSEED)" ibis bash -c "${REMOVE_COMPILED_PYTHON_SCRIPTS} && \
pytest $(PYTEST_DOCTEST_OPTIONS) $(PYTEST_OPTIONS) -k 'not test_import_time'"
pytest $(PYTEST_DOCTEST_OPTIONS) $(PYTEST_OPTIONS) ${PYTEST_MARKERS} -n auto -k 'not test_import_time'"

# Shortcut targets for running a subset of the Ibis tests inside docker's containers

testall:
@echo "You should use make testmain instead"

testmain:
$(MAKE) testparallelnoinit PYTEST_MARKERS="-m 'not (udf or spark or pyspark)'"

testmost:
$(DOCKER_RUN) -e PYTHONHASHSEED="$(PYTHONHASHSEED)" ibis bash -c "${REMOVE_COMPILED_PYTHON_SCRIPTS} && \
pytest $(PYTEST_DOCTEST_OPTIONS) $(PYTEST_OPTIONS) -n auto -m 'not (udf or impala or hdfs)' -k 'not test_import_time'"
$(MAKE) testparallelnoinit PYTEST_MARKERS="-m 'not (udf or impala or hdfs or spark or pyspark)'"

testfast:
$(DOCKER_RUN) -e PYTHONHASHSEED="$(PYTHONHASHSEED)" ibis bash -c "${REMOVE_COMPILED_PYTHON_SCRIPTS} && \
pytest $(PYTEST_DOCTEST_OPTIONS) $(PYTEST_OPTIONS) -n auto -m 'not (udf or impala or hdfs or bigquery)' -k 'not test_import_time'"
$(MAKE) testparallelnoinit PYTEST_MARKERS="-m 'not (udf or impala or hdfs or bigquery or spark or pyspark)'"

testpandas:
$(DOCKER_RUN) -e PYTHONHASHSEED="$(PYTHONHASHSEED)" ibis bash -c "${REMOVE_COMPILED_PYTHON_SCRIPTS} && \
pytest $(PYTEST_DOCTEST_OPTIONS) $(PYTEST_OPTIONS) -n auto -m 'pandas' -k 'not test_import_time'"
$(MAKE) testparallelnoinit PYTEST_MARKERS="-m pandas"

testspark:
$(DOCKER_RUN) -e PYTHONHASHSEED="$(PYTHONHASHSEED)" ibis bash -c "${REMOVE_COMPILED_PYTHON_SCRIPTS} && \
pytest $(PYTEST_DOCTEST_OPTIONS) $(PYTEST_OPTIONS) -n auto -m 'pyspark' -k 'not test_import_time'"
testpyspark:
$(MAKE) testparallelnoinit PYTEST_MARKERS="-m pyspark"

fastopt:
@echo -m 'not (backend or bigquery or clickhouse or hdfs or impala or kudu or omniscidb or mysql or postgis or postgresql or superuser or udf)'
Expand Down Expand Up @@ -176,8 +188,5 @@ doc: builddoc docclean

# Targets for run commands inside ibis and ibis-docs containers

docker_run: build
docker_run:
$(DOCKER_RUN) ibis $(DOCKER_RUN_COMMAND)

docker_docs_run: builddoc
$(DOCKER_RUN) ibis-docs $(DOCKER_RUN_COMMAND)
Loading