Skip to content

CI

CI #999

Workflow file for this run

name: CI
on:
# push:
# branches:
# - master
schedule:
- cron: '0 18 * * *' # every day at 6 pm
workflow_dispatch:
inputs:
test_name:
description: 'test(s) to run (example connect/connect-jms-weblogic-sink connect/connect-http-sink)'
required: false
default: ''
jobs:
pre-build:
if: ${{ github.event.inputs.test_name == '' }}
name: Cleanup resources
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
with:
repository: vdesabou/kafka-docker-playground
fetch-depth: 0
- name: Cleanup resources
run: |
./scripts/cleanup-resources.sh
env:
AZ_USER: ${{ secrets.AZ_USER}}
AZ_PASS: ${{ secrets.AZ_PASS}}
CONFLUENT_CLOUD_EMAIL: ${{ secrets.CONFLUENT_CLOUD_EMAIL}}
CONFLUENT_CLOUD_PASSWORD: ${{ secrets.CONFLUENT_CLOUD_PASSWORD}}
ENVIRONMENT: ${{ secrets.ENVIRONMENT}}
CLUSTER_NAME: ${{ secrets.CLUSTER_NAME}}
CLUSTER_REGION: ${{ secrets.CLUSTER_REGION}}
CLUSTER_CLOUD: ${{ secrets.CLUSTER_CLOUD}}
CLUSTER_CREDS: ${{ secrets.CLUSTER_CREDS}}
SCHEMA_REGISTRY_CREDS: ${{ secrets.SCHEMA_REGISTRY_CREDS}}
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_ACCOUNT_NAME}}
SNOWFLAKE_USERNAME: ${{ secrets.SNOWFLAKE_USERNAME}}
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD}}
GCP_KEYFILE_CONTENT: ${{ secrets.GCP_KEYFILE_CONTENT}}
GCP_PROJECT: ${{ secrets.GCP_PROJECT}}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_REGION: ${{ secrets.AWS_REGION}}
build:
if: ${{ github.event.inputs.test_name == '' }}
runs-on: ubuntu-latest
needs: pre-build
name: ${{ matrix.tag }} ${{ matrix.test_list }}
strategy:
fail-fast: false
matrix:
tag: ["7.1.11","7.2.9","7.3.7","7.4.4","7.5.2"]
test_list : [
"🚀 connect/connect-servicenow-sink connect/connect-servicenow-source connect/connect-datagen-source",
"🚀 connect/connect-salesforce-bulkapi-sink connect/connect-salesforce-bulkapi-source connect/connect-salesforce-pushtopics-source connect/connect-salesforce-sobject-sink connect/connect-salesforce-cdc-source connect/connect-salesforce-platform-events-sink connect/connect-salesforce-platform-events-source",
"🚀 connect/connect-splunk-sink connect/connect-splunk-source connect/connect-splunk-s2s-source connect/connect-spool-dir-source connect/connect-syslog-source other/connect-override-policy-sftp-sink other/connect-override-policy-sftp-source",
"🚀 connect/connect-minio-s3-sink connect/connect-marketo-source connect/connect-active-mq-sink connect/connect-active-mq-source connect/connect-lenses-active-mq-source connect/connect-cassandra-sink connect/connect-couchbase-sink connect/connect-couchbase-source connect/connect-hbase-sink",
"🚀 connect/connect-jms-tibco-sink connect/connect-jms-tibco-source connect/connect-debezium-mongodb-source connect/connect-debezium-mysql-source connect/connect-debezium-postgresql-source connect/connect-debezium-sqlserver-source connect/connect-elasticsearch-sink connect/connect-datadiode-source-sink connect/connect-jms-sag-um-source connect/connect-jms-sag-um-sink",
"🚀 connect/connect-hdfs2-sink connect/connect-hdfs2-source connect/connect-hdfs3-sink connect/connect-hdfs3-source connect/connect-ibm-mq-sink connect/connect-ibm-mq-source connect/connect-snmp-source",
"🚀 connect/connect-cdc-oracle11-source connect/connect-jdbc-oracle11-sink connect/connect-jdbc-oracle11-source connect/connect-influxdb-sink connect/connect-influxdb-source connect/connect-jdbc-mysql-sink connect/connect-jdbc-mysql-source connect/connect-jdbc-postgresql-sink connect/connect-jdbc-postgresql-source connect/connect-jdbc-sqlserver-sink",
"🚀 connect/connect-jdbc-sqlserver-source connect/connect-jdbc-vertica-sink connect/connect-singlestore-sink connect/connect-jdbc-singlestore-source connect/connect-jms-active-mq-sink connect/connect-jms-solace-sink connect/connect-jms-solace-source connect/connect-mongodb-sink connect/connect-mongodb-source connect/connect-mqtt-sink connect/connect-mqtt-source connect/connect-neo4j-sink connect/connect-omnisci-sink connect/connect-tibco-sink connect/connect-tibco-source",
"🚀 connect/connect-jdbc-oracle12-source connect/connect-jdbc-oracle12-sink",
"🚀 connect/connect-jdbc-oracle19-source connect/connect-jdbc-oracle19-sink connect/connect-jms-oracle19-sink connect/connect-jms-oracle19-source",
"🚀 connect/connect-jdbc-oracle21-source connect/connect-jdbc-oracle21-sink connect/connect-jms-oracle21-sink connect/connect-jms-oracle21-source",
"🚀 connect/connect-rabbitmq-source connect/connect-redis-sink connect/connect-replicator connect/connect-sftp-source connect/connect-solace-sink connect/connect-solace-source",
"🚀 connect/connect-aws-cloudwatch-logs-source connect/connect-aws-cloudwatch-metrics-sink connect/connect-aws-dynamodb-sink connect/connect-aws-kinesis-source connect/connect-aws-lambda-sink connect/connect-aws-redshift-sink connect/connect-jdbc-aws-redshift-source connect/connect-jdbc-aws-redshift-sink connect/connect-sftp-sink",
"🚀 connect/connect-gcp-bigquery-sink connect-jdbc-gcp-bigquery-source connect/connect-gcp-cloud-functions-sink connect/connect-vertica-sink connect/connect-prometheus-sink connect/connect-aws-sqs-source connect/connect-aws-s3-sink connect/connect-aws-s3-source connect/connect-databricks-delta-lake-sink",
"🚀 connect/connect-gcp-pubsub-source connect/connect-gcp-google-pubsub-source connect/connect-gcp-google-pubsub-sink connect/connect-gcp-gcs-sink connect/connect-gcp-gcs-source connect/connect-gcp-bigtable-sink connect/connect-kudu-source connect/connect-kudu-sink",
"🚀 connect/connect-azure-data-lake-storage-gen1-sink connect/connect-azure-data-lake-storage-gen2-sink connect/connect-azure-event-hubs-source connect/connect-azure-cognitive-search-sink connect/connect-azure-functions-sink connect/connect-azure-service-bus-source connect/connect-azure-blob-storage-source",
"🚀 connect/connect-ftps-source connect/connect-ftps-sink connect/connect-rabbitmq-sink connect/connect-amps-source connect/connect-jira-source connect/connect-github-source connect/connect-pivotal-gemfire-sink connect/connect-azure-blob-storage-sink connect/connect-azure-synapse-analytics-sink",
"🚀 connect/connect-http-sink",
"🚀 multi-data-center/replicator-connect",
"🚀 multi-data-center/replicator-executable",
"🚀 multi-data-center/mirrormaker2 ksqldb/materialized-view connect/connect-pagerduty-sink connect/connect-zendesk-source connect/connect-datadog-metrics-sink connect/connect-gcp-spanner-sink connect/connect-gcp-firebase-source connect/connect-gcp-firebase-sink",
"🚀 other/filebeat-to-kafka other/rest-proxy-security-plugin other/tiered-storage-with-aws other/write-logs-to-files other/audit-logs schema-registry/multiple-event-types-in-topic other/broker-schema-validation other/schema-registry-security-plugin multi-data-center/cluster-linking other/secrets-management other/connect-secret-registry other/ldap-authorizer-with-ldap-failover other/schema-format-protobuf other/schema-format-json-schema other/schema-format-avro",
"🚀 ccloud/replicator ccloud/rest-proxy-security-plugin ccloud/schema-registry-security-plugin ccloud/audit-log-connector",
"🚀 connect/connect-cdc-oracle12-source",
"🚀 connect/connect-cdc-oracle19-source",
"🚀 connect/connect-cdc-oracle18-source",
"🚀 connect/connect-cdc-oracle21-source",
"🚀 other/cp-ansible-playground",
"🚀 connect/connect-weblogic-source connect/connect-jms-weblogic-sink connect/connect-jms-weblogic-source",
"🚀 connect/connect-azure-cosmosdb-source connect/connect-azure-cosmosdb-sink connect/connect-jdbc-cockroachdb-source connect/connect-jdbc-ibmdb2-source connect/connect-jdbc-ibmdb2-sink connect/connect-jdbc-sybase-source connect/connect-jdbc-sybase-sink",
"🚀 environment/plaintext environment/2way-ssl environment/kerberos environment/ldap-authorizer-sasl-plain environment/ldap-sasl-plain environment/mdc-kerberos environment/mdc-plaintext environment/mdc-sasl-plain environment/rbac-sasl-plain environment/sasl-plain environment/sasl-scram environment/sasl-ssl environment/ssl_kerberos environment/kraft-plaintext connect/connect-snowflake-sink connect/connect-jdbc-snowflake-source connect/connect-jdbc-snowflake-sink connect/connect-filestream-source connect/connect-filestream-sink connect/connect-filepulse-source connect/connect-jdbc-mariadb-source connect/connect-jdbc-mariadb-sink",
"🚀 connect/connect-mapr-sink",
"🚀 other/syslog-logstash-ksqldb other/mqtt-proxy",
"🚀 connect/connect-jdbc-sap-hana-sink connect/connect-jdbc-sap-hana-source connect/connect-sap-hana-sink"
]
steps:
# - name: Maximize build space
# uses: easimon/maximize-build-space@master
# with:
# root-reserve-mb: 512
# swap-size-mb: 1024
# remove-dotnet: 'true'
# remove-android: 'true'
# remove-haskell: 'true'
# remove-codeql: 'true'
- name: Checkout code
uses: actions/checkout@v3
with:
repository: vdesabou/kafka-docker-playground
fetch-depth: 0
- name: "Free up disk space"
run: |
df -h
sudo apt-get -qq purge build-essential ghc*
sudo apt-get clean
sudo apt-get install expect fzf coreutils -y
docker system prune -af
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
sudo docker rmi $(docker image ls -aq) >/dev/null 2>&1 || true
sudo rm -rf \
/usr/share/dotnet /usr/local/lib/android /opt/ghc \
/usr/local/share/powershell /usr/share/swift /usr/local/.ghcup \
/usr/lib/jvm || true
echo "some directories deleted"
sudo apt install aptitude -y >/dev/null 2>&1
sudo aptitude purge aria2 ansible azure-cli shellcheck rpm xorriso zsync \
esl-erlang firefox gfortran-8 gfortran-9 google-chrome-stable \
google-cloud-sdk imagemagick \
libmagickcore-dev libmagickwand-dev libmagic-dev ant ant-optional kubectl \
mercurial apt-transport-https mono-complete libmysqlclient \
unixodbc-dev yarn chrpath libssl-dev libxft-dev \
libfreetype6 libfreetype6-dev libfontconfig1 libfontconfig1-dev \
snmp pollinate libpq-dev postgresql-client powershell ruby-full \
sphinxsearch subversion mongodb-org azure-cli microsoft-edge-stable \
-y -f >/dev/null 2>&1
sudo aptitude purge google-cloud-sdk -f -y >/dev/null 2>&1
sudo aptitude purge microsoft-edge-stable -f -y >/dev/null 2>&1 || true
sudo apt purge microsoft-edge-stable -f -y >/dev/null 2>&1 || true
sudo aptitude purge '~n ^mysql' -f -y >/dev/null 2>&1
sudo aptitude purge '~n ^php' -f -y >/dev/null 2>&1
sudo aptitude purge '~n ^dotnet' -f -y >/dev/null 2>&1
sudo apt-get autoremove -y >/dev/null 2>&1
sudo apt-get autoclean -y >/dev/null 2>&1
echo "some packages purged"
df -h
- name: Decrypt secrets.tar
run: |
./.github/scripts/decrypt_secret.sh
tar xvf secrets.tar
rm secrets.tar
mkdir -p $HOME/.aws
mv aws_credentials_with_assuming_iam_role $HOME/.aws/credentials-with-assuming-iam-role
mv aws_credentials_aws_account_with_assume_role $HOME/.aws/credentials_aws_account_with_assume_role
chmod -R a+rw $HOME/.aws
mkdir -p $HOME/.confluent
echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
env:
SECRETS_ENCRYPTION_PASSWORD: ${{ secrets.SECRETS_ENCRYPTION_PASSWORD }}
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME}}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD}}
- name: Build and Test
run: bash scripts/run-tests.sh "${{ matrix.test_list }}" "${{ matrix.tag }}"
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_REGION: ${{ secrets.AWS_REGION}}
AWS_ACCOUNT_WITH_ASSUME_ROLE_AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCOUNT_WITH_ASSUME_ROLE_AWS_ACCESS_KEY_ID}}
AWS_ACCOUNT_WITH_ASSUME_ROLE_AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCOUNT_WITH_ASSUME_ROLE_AWS_SECRET_ACCESS_KEY}}
AWS_STS_ROLE_ARN: ${{ secrets.AWS_STS_ROLE_ARN}}
AZ_USER: ${{ secrets.AZ_USER}}
AZ_PASS: ${{ secrets.AZ_PASS}}
AZURE_TENANT_NAME: ${{ secrets.AZURE_TENANT_NAME}}
CONFLUENT_CLOUD_EMAIL: ${{ secrets.CONFLUENT_CLOUD_EMAIL}}
CONFLUENT_CLOUD_PASSWORD: ${{ secrets.CONFLUENT_CLOUD_PASSWORD}}
ENVIRONMENT: ${{ secrets.ENVIRONMENT}}
CLUSTER_NAME: ${{ secrets.CLUSTER_NAME}}
CLUSTER_REGION: ${{ secrets.CLUSTER_REGION}}
CLUSTER_CLOUD: ${{ secrets.CLUSTER_CLOUD}}
CLUSTER_CREDS: ${{ secrets.CLUSTER_CREDS}}
SCHEMA_REGISTRY_CREDS: ${{ secrets.SCHEMA_REGISTRY_CREDS}}
CONFLUENT_LICENSE: ${{ secrets.CONFLUENT_LICENSE}}
SALESFORCE_USERNAME: ${{ secrets.SALESFORCE_USERNAME}}
SALESFORCE_PASSWORD: ${{ secrets.SALESFORCE_PASSWORD}}
SALESFORCE_CONSUMER_KEY: ${{ secrets.SALESFORCE_CONSUMER_KEY}}
SALESFORCE_CONSUMER_PASSWORD: ${{ secrets.SALESFORCE_CONSUMER_PASSWORD}}
SALESFORCE_SECURITY_TOKEN: ${{ secrets.SALESFORCE_SECURITY_TOKEN}}
SALESFORCE_INSTANCE: ${{ secrets.SALESFORCE_INSTANCE}}
SALESFORCE_USERNAME_ACCOUNT2: ${{ secrets.SALESFORCE_USERNAME_ACCOUNT2}}
SALESFORCE_PASSWORD_ACCOUNT2: ${{ secrets.SALESFORCE_PASSWORD_ACCOUNT2}}
SALESFORCE_SECURITY_TOKEN_ACCOUNT2: ${{ secrets.SALESFORCE_SECURITY_TOKEN_ACCOUNT2}}
SALESFORCE_CONSUMER_KEY_ACCOUNT2: ${{ secrets.SALESFORCE_CONSUMER_KEY_ACCOUNT2}}
SALESFORCE_CONSUMER_PASSWORD_ACCOUNT2: ${{ secrets.SALESFORCE_CONSUMER_PASSWORD_ACCOUNT2}}
SALESFORCE_CONSUMER_KEY_WITH_JWT: ${{ secrets.SALESFORCE_CONSUMER_KEY_WITH_JWT}}
SALESFORCE_CONSUMER_PASSWORD_WITH_JWT: ${{ secrets.SALESFORCE_CONSUMER_PASSWORD_WITH_JWT}}
DD_API_KEY: ${{ secrets.DD_API_KEY}}
DD_APP_KEY: ${{ secrets.DD_APP_KEY}}
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME}}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD}}
JIRA_URL: ${{ secrets.JIRA_URL}}
JIRA_USERNAME: ${{ secrets.JIRA_USERNAME}}
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN}}
MARKETO_ENDPOINT_URL: ${{ secrets.MARKETO_ENDPOINT_URL}}
MARKETO_CLIENT_ID: ${{ secrets.MARKETO_CLIENT_ID}}
MARKETO_CLIENT_SECRET: ${{ secrets.MARKETO_CLIENT_SECRET}}
PAGERDUTY_USER_EMAIL: ${{ secrets.PAGERDUTY_USER_EMAIL}}
PAGERDUTY_API_KEY: ${{ secrets.PAGERDUTY_API_KEY}}
PAGERDUTY_SERVICE_ID: ${{ secrets.PAGERDUTY_SERVICE_ID}}
CCLOUD_REST_PROXY_SECURITY_PLUGIN_API_KEY: ${{ secrets.CCLOUD_REST_PROXY_SECURITY_PLUGIN_API_KEY}}
CCLOUD_REST_PROXY_SECURITY_PLUGIN_API_SECRET: ${{ secrets.CCLOUD_REST_PROXY_SECURITY_PLUGIN_API_SECRET}}
SERVICENOW_URL: ${{ secrets.SERVICENOW_URL}}
SERVICENOW_PASSWORD: ${{ secrets.SERVICENOW_PASSWORD}}
SERVICENOW_DEVELOPER_USERNAME: ${{ secrets.SERVICENOW_DEVELOPER_USERNAME}}
SERVICENOW_DEVELOPER_PASSWORD: ${{ secrets.SERVICENOW_DEVELOPER_PASSWORD}}
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_ACCOUNT_NAME}}
SNOWFLAKE_USERNAME: ${{ secrets.SNOWFLAKE_USERNAME}}
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD}}
ZENDESK_URL: ${{ secrets.ZENDESK_URL}}
ZENDESK_USERNAME: ${{ secrets.ZENDESK_USERNAME}}
ZENDESK_PASSWORD: ${{ secrets.ZENDESK_PASSWORD}}
CONNECTOR_GITHUB_ACCESS_TOKEN: ${{ secrets.CONNECTOR_GITHUB_ACCESS_TOKEN}}
CI_GITHUB_TOKEN: ${{ secrets.CI_GITHUB_TOKEN}}
AUDIT_LOG_CLUSTER_BOOTSTRAP_SERVERS: ${{ secrets.AUDIT_LOG_CLUSTER_BOOTSTRAP_SERVERS}}
AUDIT_LOG_CLUSTER_API_KEY: ${{ secrets.AUDIT_LOG_CLUSTER_API_KEY}}
AUDIT_LOG_CLUSTER_API_SECRET: ${{ secrets.AUDIT_LOG_CLUSTER_API_SECRET}}
NGROK_AUTH_TOKEN: ${{ secrets.NGROK_AUTH_TOKEN}}
DATABRICKS_AWS_BUCKET_NAME: ${{ secrets.DATABRICKS_AWS_BUCKET_NAME}}
DATABRICKS_AWS_BUCKET_REGION: ${{ secrets.DATABRICKS_AWS_BUCKET_REGION}}
DATABRICKS_AWS_STAGING_S3_ACCESS_KEY_ID: ${{ secrets.DATABRICKS_AWS_STAGING_S3_ACCESS_KEY_ID}}
DATABRICKS_AWS_STAGING_S3_SECRET_ACCESS_KEY: ${{ secrets.DATABRICKS_AWS_STAGING_S3_SECRET_ACCESS_KEY}}
DATABRICKS_SERVER_HOSTNAME: ${{ secrets.DATABRICKS_SERVER_HOSTNAME}}
DATABRICKS_HTTP_PATH: ${{ secrets.DATABRICKS_HTTP_PATH}}
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN}}
ORACLE_CONTAINER_REGISTRY_USERNAME: ${{ secrets.ORACLE_CONTAINER_REGISTRY_USERNAME}}
ORACLE_CONTAINER_REGISTRY_PASSWORD: ${{ secrets.ORACLE_CONTAINER_REGISTRY_PASSWORD}}
GCP_KEYFILE_CONTENT: ${{ secrets.GCP_KEYFILE_CONTENT}}
GCP_PROJECT: ${{ secrets.GCP_PROJECT}}
HPE_MAPR_EMAIL: ${{ secrets.HPE_MAPR_EMAIL}}
HPE_MAPR_TOKEN: ${{ secrets.HPE_MAPR_TOKEN}}
execute_one_test:
if: ${{ github.event.inputs.test_name != '' }}
runs-on: ubuntu-latest
name: ${{ matrix.tag }} ${{ matrix.test_list }}
strategy:
fail-fast: false
matrix:
tag: ["7.1.11","7.2.9","7.3.7","7.4.4","7.5.2"]
test_list : [
"🚀 ${{ github.event.inputs.test_name }}"
]
steps:
# - name: Maximize build space
# uses: easimon/maximize-build-space@master
# with:
# root-reserve-mb: 512
# swap-size-mb: 1024
# remove-dotnet: 'true'
# remove-android: 'true'
# remove-haskell: 'true'
# remove-codeql: 'true'
- name: Checkout code
uses: actions/checkout@v3
with:
repository: vdesabou/kafka-docker-playground
fetch-depth: 0
- name: "Free up disk space"
run: |
df -h
sudo apt-get -qq purge build-essential ghc*
sudo apt-get clean
sudo apt-get install expect fzf coreutils -y
docker system prune -af
sudo rm -rf /usr/share/dotnet
sudo rm -rf /opt/ghc
sudo rm -rf "/usr/local/share/boost"
sudo rm -rf "$AGENT_TOOLSDIRECTORY"
sudo docker rmi $(docker image ls -aq) >/dev/null 2>&1 || true
sudo rm -rf \
/usr/share/dotnet /usr/local/lib/android /opt/ghc \
/usr/local/share/powershell /usr/share/swift /usr/local/.ghcup \
/usr/lib/jvm || true
echo "some directories deleted"
sudo apt install aptitude -y >/dev/null 2>&1
sudo aptitude purge aria2 ansible azure-cli shellcheck rpm xorriso zsync \
esl-erlang firefox gfortran-8 gfortran-9 google-chrome-stable \
google-cloud-sdk imagemagick \
libmagickcore-dev libmagickwand-dev libmagic-dev ant ant-optional kubectl \
mercurial apt-transport-https mono-complete libmysqlclient \
unixodbc-dev yarn chrpath libssl-dev libxft-dev \
libfreetype6 libfreetype6-dev libfontconfig1 libfontconfig1-dev \
snmp pollinate libpq-dev postgresql-client powershell ruby-full \
sphinxsearch subversion mongodb-org azure-cli microsoft-edge-stable \
-y -f >/dev/null 2>&1
sudo aptitude purge google-cloud-sdk -f -y >/dev/null 2>&1
sudo aptitude purge microsoft-edge-stable -f -y >/dev/null 2>&1 || true
sudo apt purge microsoft-edge-stable -f -y >/dev/null 2>&1 || true
sudo aptitude purge '~n ^mysql' -f -y >/dev/null 2>&1
sudo aptitude purge '~n ^php' -f -y >/dev/null 2>&1
sudo aptitude purge '~n ^dotnet' -f -y >/dev/null 2>&1
sudo apt-get autoremove -y >/dev/null 2>&1
sudo apt-get autoclean -y >/dev/null 2>&1
echo "some packages purged"
df -h
- name: Decrypt secrets.tar
run: |
./.github/scripts/decrypt_secret.sh
tar xvf secrets.tar
rm secrets.tar
mkdir -p $HOME/.aws
mv aws_credentials_with_assuming_iam_role $HOME/.aws/credentials-with-assuming-iam-role
mv aws_credentials_aws_account_with_assume_role $HOME/.aws/credentials_aws_account_with_assume_role
chmod -R a+rw $HOME/.aws
mkdir -p $HOME/.confluent
echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
env:
SECRETS_ENCRYPTION_PASSWORD: ${{ secrets.SECRETS_ENCRYPTION_PASSWORD }}
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME}}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD}}
- name: Build and Test
run: bash scripts/run-tests.sh "${{ matrix.test_list }}" "${{ matrix.tag }}"
env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_REGION: ${{ secrets.AWS_REGION}}
AWS_ACCOUNT_WITH_ASSUME_ROLE_AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCOUNT_WITH_ASSUME_ROLE_AWS_ACCESS_KEY_ID}}
AWS_ACCOUNT_WITH_ASSUME_ROLE_AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_ACCOUNT_WITH_ASSUME_ROLE_AWS_SECRET_ACCESS_KEY}}
AWS_STS_ROLE_ARN: ${{ secrets.AWS_STS_ROLE_ARN}}
AZ_USER: ${{ secrets.AZ_USER}}
AZ_PASS: ${{ secrets.AZ_PASS}}
AZURE_TENANT_NAME: ${{ secrets.AZURE_TENANT_NAME}}
CONFLUENT_CLOUD_EMAIL: ${{ secrets.CONFLUENT_CLOUD_EMAIL}}
CONFLUENT_CLOUD_PASSWORD: ${{ secrets.CONFLUENT_CLOUD_PASSWORD}}
ENVIRONMENT: ${{ secrets.ENVIRONMENT}}
CLUSTER_NAME: ${{ secrets.CLUSTER_NAME}}
CLUSTER_REGION: ${{ secrets.CLUSTER_REGION}}
CLUSTER_CLOUD: ${{ secrets.CLUSTER_CLOUD}}
CLUSTER_CREDS: ${{ secrets.CLUSTER_CREDS}}
SCHEMA_REGISTRY_CREDS: ${{ secrets.SCHEMA_REGISTRY_CREDS}}
CONFLUENT_LICENSE: ${{ secrets.CONFLUENT_LICENSE}}
SALESFORCE_USERNAME: ${{ secrets.SALESFORCE_USERNAME}}
SALESFORCE_PASSWORD: ${{ secrets.SALESFORCE_PASSWORD}}
SALESFORCE_CONSUMER_KEY: ${{ secrets.SALESFORCE_CONSUMER_KEY}}
SALESFORCE_CONSUMER_PASSWORD: ${{ secrets.SALESFORCE_CONSUMER_PASSWORD}}
SALESFORCE_SECURITY_TOKEN: ${{ secrets.SALESFORCE_SECURITY_TOKEN}}
SALESFORCE_INSTANCE: ${{ secrets.SALESFORCE_INSTANCE}}
SALESFORCE_USERNAME_ACCOUNT2: ${{ secrets.SALESFORCE_USERNAME_ACCOUNT2}}
SALESFORCE_PASSWORD_ACCOUNT2: ${{ secrets.SALESFORCE_PASSWORD_ACCOUNT2}}
SALESFORCE_SECURITY_TOKEN_ACCOUNT2: ${{ secrets.SALESFORCE_SECURITY_TOKEN_ACCOUNT2}}
SALESFORCE_CONSUMER_KEY_ACCOUNT2: ${{ secrets.SALESFORCE_CONSUMER_KEY_ACCOUNT2}}
SALESFORCE_CONSUMER_PASSWORD_ACCOUNT2: ${{ secrets.SALESFORCE_CONSUMER_PASSWORD_ACCOUNT2}}
SALESFORCE_CONSUMER_KEY_WITH_JWT: ${{ secrets.SALESFORCE_CONSUMER_KEY_WITH_JWT}}
SALESFORCE_CONSUMER_PASSWORD_WITH_JWT: ${{ secrets.SALESFORCE_CONSUMER_PASSWORD_WITH_JWT}}
DD_API_KEY: ${{ secrets.DD_API_KEY}}
DD_APP_KEY: ${{ secrets.DD_APP_KEY}}
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME}}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD}}
JIRA_URL: ${{ secrets.JIRA_URL}}
JIRA_USERNAME: ${{ secrets.JIRA_USERNAME}}
JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN}}
MARKETO_ENDPOINT_URL: ${{ secrets.MARKETO_ENDPOINT_URL}}
MARKETO_CLIENT_ID: ${{ secrets.MARKETO_CLIENT_ID}}
MARKETO_CLIENT_SECRET: ${{ secrets.MARKETO_CLIENT_SECRET}}
PAGERDUTY_USER_EMAIL: ${{ secrets.PAGERDUTY_USER_EMAIL}}
PAGERDUTY_API_KEY: ${{ secrets.PAGERDUTY_API_KEY}}
PAGERDUTY_SERVICE_ID: ${{ secrets.PAGERDUTY_SERVICE_ID}}
CCLOUD_REST_PROXY_SECURITY_PLUGIN_API_KEY: ${{ secrets.CCLOUD_REST_PROXY_SECURITY_PLUGIN_API_KEY}}
CCLOUD_REST_PROXY_SECURITY_PLUGIN_API_SECRET: ${{ secrets.CCLOUD_REST_PROXY_SECURITY_PLUGIN_API_SECRET}}
SERVICENOW_URL: ${{ secrets.SERVICENOW_URL}}
SERVICENOW_PASSWORD: ${{ secrets.SERVICENOW_PASSWORD}}
SERVICENOW_DEVELOPER_USERNAME: ${{ secrets.SERVICENOW_DEVELOPER_USERNAME}}
SERVICENOW_DEVELOPER_PASSWORD: ${{ secrets.SERVICENOW_DEVELOPER_PASSWORD}}
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_ACCOUNT_NAME}}
SNOWFLAKE_USERNAME: ${{ secrets.SNOWFLAKE_USERNAME}}
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD}}
ZENDESK_URL: ${{ secrets.ZENDESK_URL}}
ZENDESK_USERNAME: ${{ secrets.ZENDESK_USERNAME}}
ZENDESK_PASSWORD: ${{ secrets.ZENDESK_PASSWORD}}
CONNECTOR_GITHUB_ACCESS_TOKEN: ${{ secrets.CONNECTOR_GITHUB_ACCESS_TOKEN}}
CI_GITHUB_TOKEN: ${{ secrets.CI_GITHUB_TOKEN}}
AUDIT_LOG_CLUSTER_BOOTSTRAP_SERVERS: ${{ secrets.AUDIT_LOG_CLUSTER_BOOTSTRAP_SERVERS}}
AUDIT_LOG_CLUSTER_API_KEY: ${{ secrets.AUDIT_LOG_CLUSTER_API_KEY}}
AUDIT_LOG_CLUSTER_API_SECRET: ${{ secrets.AUDIT_LOG_CLUSTER_API_SECRET}}
NGROK_AUTH_TOKEN: ${{ secrets.NGROK_AUTH_TOKEN}}
DATABRICKS_AWS_BUCKET_NAME: ${{ secrets.DATABRICKS_AWS_BUCKET_NAME}}
DATABRICKS_AWS_BUCKET_REGION: ${{ secrets.DATABRICKS_AWS_BUCKET_REGION}}
DATABRICKS_AWS_STAGING_S3_ACCESS_KEY_ID: ${{ secrets.DATABRICKS_AWS_STAGING_S3_ACCESS_KEY_ID}}
DATABRICKS_AWS_STAGING_S3_SECRET_ACCESS_KEY: ${{ secrets.DATABRICKS_AWS_STAGING_S3_SECRET_ACCESS_KEY}}
DATABRICKS_SERVER_HOSTNAME: ${{ secrets.DATABRICKS_SERVER_HOSTNAME}}
DATABRICKS_HTTP_PATH: ${{ secrets.DATABRICKS_HTTP_PATH}}
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN}}
ORACLE_CONTAINER_REGISTRY_USERNAME: ${{ secrets.ORACLE_CONTAINER_REGISTRY_USERNAME}}
ORACLE_CONTAINER_REGISTRY_PASSWORD: ${{ secrets.ORACLE_CONTAINER_REGISTRY_PASSWORD}}
GCP_KEYFILE_CONTENT: ${{ secrets.GCP_KEYFILE_CONTENT}}
GCP_PROJECT: ${{ secrets.GCP_PROJECT}}
HPE_MAPR_EMAIL: ${{ secrets.HPE_MAPR_EMAIL}}
HPE_MAPR_TOKEN: ${{ secrets.HPE_MAPR_TOKEN}}
post-build:
name: Cleanup resources and Update README
runs-on: ubuntu-latest
if: always()
needs: [pre-build, build, execute_one_test]
steps:
- name: Checkout code
uses: actions/checkout@v3
with:
repository: vdesabou/kafka-docker-playground
fetch-depth: 0
# submodules: recursive
ssh-key: ${{ secrets.GH_SSH_KEY_FILE }}
ssh-strict: 'false'
- name: Decrypt secrets.tar
run: |
./.github/scripts/decrypt_secret.sh
tar xvf secrets.tar
rm secrets.tar
mkdir -p $HOME/.aws
chmod -R a+rw $HOME/.aws
mkdir -p $HOME/.confluent
echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin
env:
SECRETS_ENCRYPTION_PASSWORD: ${{ secrets.SECRETS_ENCRYPTION_PASSWORD }}
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME}}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD}}
- name: Cleanup resources
run: |
./scripts/cleanup-resources.sh "--no-wait"
env:
AZ_USER: ${{ secrets.AZ_USER}}
AZ_PASS: ${{ secrets.AZ_PASS}}
CONFLUENT_CLOUD_EMAIL: ${{ secrets.CONFLUENT_CLOUD_EMAIL}}
CONFLUENT_CLOUD_PASSWORD: ${{ secrets.CONFLUENT_CLOUD_PASSWORD}}
ENVIRONMENT: ${{ secrets.ENVIRONMENT}}
CLUSTER_NAME: ${{ secrets.CLUSTER_NAME}}
CLUSTER_REGION: ${{ secrets.CLUSTER_REGION}}
CLUSTER_CLOUD: ${{ secrets.CLUSTER_CLOUD}}
CLUSTER_CREDS: ${{ secrets.CLUSTER_CREDS}}
SCHEMA_REGISTRY_CREDS: ${{ secrets.SCHEMA_REGISTRY_CREDS}}
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_ACCOUNT_NAME}}
SNOWFLAKE_USERNAME: ${{ secrets.SNOWFLAKE_USERNAME}}
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD}}
GCP_KEYFILE_CONTENT: ${{ secrets.GCP_KEYFILE_CONTENT}}
GCP_PROJECT: ${{ secrets.GCP_PROJECT}}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_REGION: ${{ secrets.AWS_REGION}}
- name: Update README
run: |
./scripts/update-readme.sh "7.1.11 7.2.9 7.3.7 7.4.4 7.5.2"
env:
GH_TOKEN: ${{ secrets.CI_GITHUB_TOKEN }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID}}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY}}
AWS_REGION: ${{ secrets.AWS_REGION}}
- name: Pushes content.md
uses: dmnemec/copy_file_to_another_repo_action@main
env:
API_TOKEN_GITHUB: ${{ secrets.CI_GITHUB_TOKEN }}
with:
source_file: './docs/content.md'
destination_repo: 'vdesabou/kafka-docker-playground-docs'
destination_folder: 'docs'
user_email: 'vincent.desaboulin@gmail.com'
user_name: 'vdesabou'
commit_message: 'updating with latest versions'
- name: Pushes introduction.md
uses: dmnemec/copy_file_to_another_repo_action@main
env:
API_TOKEN_GITHUB: ${{ secrets.CI_GITHUB_TOKEN }}
with:
source_file: './docs/introduction.md'
destination_repo: 'vdesabou/kafka-docker-playground-docs'
destination_folder: 'docs'
user_email: 'vincent.desaboulin@gmail.com'
user_name: 'vdesabou'
commit_message: 'updating with latest versions'