diff --git a/infra/pipelines/docker/search_timelines.sh b/infra/pipelines/docker/search_timelines.sh new file mode 100755 index 0000000..2703772 --- /dev/null +++ b/infra/pipelines/docker/search_timelines.sh @@ -0,0 +1,32 @@ +#!/bin/bash +set -ex + +DOMINO_JOB_NAME=${JOB_NAME:-historic_pfas_1} +DOMINO_USERNAMES=${USERNAMES:-"a,b,c"} +DOMINO_FETCH_PROFILES=${FETCH_PROFILES:-"false"} +DOMINO_STRIDE_SEC=${STRIDE_SEC:-30} +DOMINO_WRITE_FORMAT=${WRITE_FORMAT:-parquet} +DOMINO_S3_FILEPATH=${S3_FILEPATH:-dt-phase1} +DOMINO_COMPRESSION=${COMPRESSION:-snappy} +AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-} +AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-} + +echo "Using usernames: $DOMINO_USERNAMES" + +docker-compose -f datastream-docker-compose.yml -p ${DOMINO_JOB_NAME} down -v +docker-compose -f datastream-docker-compose.yml build # --no-cache +JOB_FILE="search_timelines.py" \ + DOMINO_JOB_NAME=$DOMINO_JOB_NAME \ + DOMINO_USERNAMES=$DOMINO_USERNAMES \ + DOMINO_FETCH_PROFILES=$DOMINO_FETCH_PROFILES \ + DOMINO_STRIDE_SEC=$DOMINO_STRIDE_SEC \ + DOMINO_WRITE_FORMAT=$DOMINO_WRITE_FORMAT \ + DOMINO_S3_FILEPATH=$DOMINO_S3_FILEPATH \ + DOMINO_COMPRESSION=$DOMINO_COMPRESSION \ + AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \ + AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \ + docker-compose -f datastream-docker-compose.yml \ + -p ${DOMINO_JOB_NAME} \ + up \ + data-stream \ + $@ \ No newline at end of file