Skip to content

Commit

Permalink
Merge 81c5847 into a2bdc32
Browse files Browse the repository at this point in the history
  • Loading branch information
michael-kotliar committed Dec 30, 2020
2 parents a2bdc32 + 81c5847 commit 3f50860
Show file tree
Hide file tree
Showing 17 changed files with 1,119 additions and 695 deletions.
98 changes: 55 additions & 43 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,36 +14,36 @@ python:
env:
- NTEST=1
- NTEST=2
- NTEST=3
- NTEST=4
- NTEST=5
- NTEST=6
- NTEST=7
- NTEST=8
- NTEST=9
- NTEST=10
- NTEST=11
- NTEST=12
- NTEST=13
- NTEST=14
- NTEST=15
- NTEST=16
- NTEST=17
- NTEST=18
- NTEST=19
- NTEST=20
- NTEST=21
- NTEST=22
- NTEST=23
- NTEST=24
- NTEST=25
- NTEST=26
- NTEST=27
- NTEST=28
- NTEST=29
- NTEST=30
- NTEST=31
- NTEST=32
# - NTEST=3
# - NTEST=4
# - NTEST=5
# - NTEST=6
# - NTEST=7
# - NTEST=8
# - NTEST=9
# - NTEST=10
# - NTEST=11
# - NTEST=12
# - NTEST=13
# - NTEST=14
# - NTEST=15
# - NTEST=16
# - NTEST=17
# - NTEST=18
# - NTEST=19
# - NTEST=20
# - NTEST=21
# - NTEST=22
# - NTEST=23
# - NTEST=24
# - NTEST=25
# - NTEST=26
# - NTEST=27
# - NTEST=28
# - NTEST=29
# - NTEST=30
# - NTEST=31
# - NTEST=32

jobs:
include:
Expand All @@ -65,11 +65,11 @@ jobs:
on:
tags: true
- name: DAG with embedded workflow (just one test)
script: cwl-airflow test --suite workflows/tests/conformance_tests.yaml --spin --range 1 --embed
script: cwl-airflow test --suite workflows/tests/conformance_tests.yaml --spin --range 1 --embed > /dev/null 2>&1
- name: DAG with attached workflow using combined API call (just one test)
script: cwl-airflow test --suite workflows/tests/conformance_tests.yaml --spin --range 1 --combine
script: cwl-airflow test --suite workflows/tests/conformance_tests.yaml --spin --range 1 --combine > /dev/null 2>&1
- name: DAG with embedded workflow using combined API call (just one test)
script: cwl-airflow test --suite workflows/tests/conformance_tests.yaml --spin --range 1 --embed --combine
script: cwl-airflow test --suite workflows/tests/conformance_tests.yaml --spin --range 1 --embed --combine > /dev/null 2>&1
- name: Test of `init --upgrade`
before_install:
- mkdir -p ~/airflow/dags
Expand All @@ -80,7 +80,7 @@ jobs:
before_script:
- cwl-airflow init --upgrade
- rm -f ~/airflow/dags/bam-bedgraph-bigwig-single.cwl
script: airflow list_dags --report # to check if all DAGs are correct
script: airflow dags list # to check if all DAGs are correct
- name: Test packaging for Ubuntu 18.04, Python 3.6
install:
- ./packaging/portable/ubuntu/pack.sh 18.04 3.6 $TRAVIS_BRANCH
Expand All @@ -89,32 +89,44 @@ jobs:
before_script:
- ./python3/bin_portable/airflow --help # to generate airflow.cfg
- sed -i'.backup' -e 's/^executor.*/executor = LocalExecutor/g' ~/airflow/airflow.cfg
- sed -i'.backup' -e 's/^parsing_processes.*/parsing_processes = 1/g' ~/airflow/airflow.cfg
- sed -i'.backup' -e 's/^logging_level.*/logging_level = DEBUG/g' ~/airflow/airflow.cfg
- sed -i'.backup' -e 's/^sql_alchemy_pool_enabled.*/sql_alchemy_pool_enabled = False/g' ~/airflow/airflow.cfg
- sed -i'.backup' -e 's/^dag_dir_list_interval =.*/dag_dir_list_interval = 60/g' ~/airflow/airflow.cfg
- sed -i'.backup' -e 's/^parallelism =.*/parallelism = 1/g' ~/airflow/airflow.cfg
- sed -i'.backup' -e 's/^sql_alchemy_conn.*/sql_alchemy_conn = mysql:\/\/airflow:airflow@127.0.0.1:6603\/airflow/g' ~/airflow/airflow.cfg
- ./python3/bin_portable/cwl-airflow init # to init database
- ./python3/bin_portable/airflow connections --add --conn_id process_report --conn_type http --conn_host localhost --conn_port 3070 --conn_extra "{\"endpoint\":\"/airflow/\"}" # to add process_report connection
- ./python3/bin_portable/airflow scheduler > /dev/null 2>&1 &
- sleep 10 # just in case wait until mysql creates database
- cwl-airflow init
- ./python3/bin_portable/airflow connections add process_report --conn-type http --conn-host localhost --conn-port 3070 --conn-extra "{\"endpoint\":\"/airflow/\"}" # to add process_report connection
- ./python3/bin_portable/airflow scheduler &
- ./python3/bin_portable/cwl-airflow api > /dev/null 2>&1 &
script: ./python3/bin_portable/cwl-airflow test --suite workflows/tests/conformance_tests.yaml --spin --range 1
- sleep 3 # just in case to let scheduler to parse all dags, otherwise we can't run the following command
- ./python3/bin_portable/airflow dags unpause resend_results
script: ./python3/bin_portable/cwl-airflow test --suite workflows/tests/conformance_tests.yaml --spin --range 1 > /dev/null 2>&1

before_install:
- git clone https://github.com/datirium/workflows.git --recursive
- docker pull mysql/mysql-server:5.7
- docker run -v ~/database:/var/lib/mysql -e MYSQL_ROOT_PASSWORD=airflow -e MYSQL_DATABASE=airflow -e MYSQL_USER=airflow -e MYSQL_PASSWORD=airflow -p 6603:3306 -d mysql/mysql-server:5.7 --explicit-defaults-for-timestamp=1
install:
- pip install ".[mysql,crypto]" --constraint ./packaging/constraints/constraints-$TRAVIS_PYTHON_VERSION.txt
- pip install ".[mysql]" --constraint ./packaging/constraints/constraints-$TRAVIS_PYTHON_VERSION.txt
before_script:
- airflow --help # to generate airflow.cfg
- sed -i'.backup' -e 's/^executor.*/executor = LocalExecutor/g' ~/airflow/airflow.cfg
- sed -i'.backup' -e 's/^parsing_processes.*/parsing_processes = 1/g' ~/airflow/airflow.cfg
- sed -i'.backup' -e 's/^logging_level.*/logging_level = DEBUG/g' ~/airflow/airflow.cfg
- sed -i'.backup' -e 's/^sql_alchemy_pool_enabled.*/sql_alchemy_pool_enabled = False/g' ~/airflow/airflow.cfg
- sed -i'.backup' -e 's/^dag_dir_list_interval =.*/dag_dir_list_interval = 60/g' ~/airflow/airflow.cfg
- sed -i'.backup' -e 's/^parallelism =.*/parallelism = 1/g' ~/airflow/airflow.cfg
- sed -i'.backup' -e 's/^sql_alchemy_conn.*/sql_alchemy_conn = mysql:\/\/airflow:airflow@127.0.0.1:6603\/airflow/g' ~/airflow/airflow.cfg
- cwl-airflow init # to init database
- airflow connections --add --conn_id process_report --conn_type http --conn_host localhost --conn_port 3070 --conn_extra "{\"endpoint\":\"/airflow/\"}" # to add process_report connection
- airflow scheduler > /dev/null 2>&1 &
- sleep 10 # just in case wait until mysql creates database
- cwl-airflow init
- airflow connections add process_report --conn-type http --conn-host localhost --conn-port 3070 --conn-extra "{\"endpoint\":\"/airflow/\"}" # to add process_report connection
- airflow scheduler &
- cwl-airflow api > /dev/null 2>&1 &
script: cwl-airflow test --suite workflows/tests/conformance_tests.yaml --spin --range $NTEST
- sleep 3 # just in case to let scheduler to parse all dags, otherwise we can't run the following command
- airflow dags unpause resend_results
script: cwl-airflow test --suite workflows/tests/conformance_tests.yaml --spin --range $NTEST > /dev/null 2>&1

branches:
only:
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

# **CWL-Airflow**

Python package to extend **[Apache-Airflow 1.10.12](https://airflow.apache.org)**
Python package to extend **[Apache-Airflow 2.0.0](https://airflow.apache.org)**
functionality with **[CWL v1.1](https://www.commonwl.org/v1.1/)** support

## **Cite as**
Expand Down
3 changes: 2 additions & 1 deletion cwl_airflow/components/api/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
from airflow.utils.state import State
from airflow.utils.timezone import parse as parsedate
from airflow.utils.db import provide_session
from airflow.utils.types import DagRunType

from cwl_airflow.utilities.helpers import (
get_version,
Expand Down Expand Up @@ -200,7 +201,7 @@ def create_dag_run(self, dag_id, run_id, conf, session):
raise ValueError(f"dag_run {run_id} for dag_id {dag_id} already exists")
else:
run_conf = conf if isinstance(conf, dict) else json.loads(conf)
dag_run = DagRun(dag_id=dag_id, run_id=run_id, conf=run_conf)
dag_run = DagRun(dag_id=dag_id, run_id=run_id, conf=run_conf, run_type=DagRunType.MANUAL)
session.add(dag_run)
session.commit()
return dag_run
Expand Down

0 comments on commit 3f50860

Please sign in to comment.