Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ spec:
spec:
containers:
- name: start-pyspark-job
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
image: oci.stackable.tech/sdp/tools:1.0.0-stackable25.3.0
# N.B. it is possible for the scheduler to report that a DAG exists, only for the worker task to fail if a pod is unexpectedly
# restarted. Additionally, the db-init job takes a few minutes to complete before the cluster is deployed. The wait/watch steps
# below are not "water-tight" but add a layer of stability by at least ensuring that the db is initialized and ready and that
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ spec:
spec:
containers:
- name: start-date-job
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
image: oci.stackable.tech/sdp/tools:1.0.0-stackable25.3.0
# N.B. it is possible for the scheduler to report that a DAG exists, only for the worker task to fail if a pod is unexpectedly
# restarted. Additionally, the db-init job takes a few minutes to complete before the cluster is deployed. The wait/watch steps
# below are not "water-tight" but add a layer of stability by at least ensuring that the db is initialized and ready and that
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@ spec:
serviceAccountName: demo-serviceaccount
initContainers:
- name: wait-for-kafka
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
image: oci.stackable.tech/sdp/tools:1.0.0-stackable25.3.0
command: ["bash", "-c", "echo 'Waiting for all kafka brokers to be ready' && kubectl wait --for=condition=ready --timeout=30m pod -l app.kubernetes.io/instance=kafka -l app.kubernetes.io/name=kafka"]
containers:
- name: create-nifi-ingestion-job
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable25.3.0
command:
- bash
- -euo
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,11 +12,11 @@ spec:
serviceAccountName: demo-serviceaccount
initContainers:
- name: wait-for-kafka
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
image: oci.stackable.tech/sdp/tools:1.0.0-stackable25.3.0
command: ["bash", "-c", "echo 'Waiting for all kafka brokers to be ready' && kubectl wait --for=condition=ready --timeout=30m pod -l app.kubernetes.io/name=kafka -l app.kubernetes.io/instance=kafka"]
containers:
- name: create-spark-ingestion-job
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
image: oci.stackable.tech/sdp/tools:1.0.0-stackable25.3.0
command: ["bash", "-c", "echo 'Submitting Spark job' && kubectl apply -f /tmp/manifest/spark-ingestion-job.yaml"]
volumeMounts:
- name: manifest
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@ spec:
serviceAccountName: demo-serviceaccount
initContainers:
- name: wait-for-testdata
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
image: oci.stackable.tech/sdp/tools:1.0.0-stackable25.3.0
command: ["bash", "-c", "echo 'Waiting for job load-test-data to finish' && kubectl wait --for=condition=complete --timeout=30m job/load-test-data"]
containers:
- name: create-tables-in-trino
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable25.3.0
command: ["bash", "-c", "python -u /tmp/script/script.py"]
volumeMounts:
- name: script
Expand Down
4 changes: 2 additions & 2 deletions demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ spec:
spec:
containers:
- name: setup-superset
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/superset-assets.zip && python -u /tmp/script/script.py"]
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable25.3.0
command: ["bash", "-c", "curl -o superset-assets.zip https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/data-lakehouse-iceberg-trino-spark/superset-assets.zip && python -u /tmp/script/script.py"]
volumeMounts:
- name: script
mountPath: /tmp/script
Expand Down
72 changes: 36 additions & 36 deletions demos/demos-v2.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,10 @@ demos:
- airflow
- job-scheduling
manifests:
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/airflow-scheduled-job/01-airflow-spark-clusterrole.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/airflow-scheduled-job/02-airflow-spark-clusterrolebinding.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/airflow-scheduled-job/01-airflow-spark-clusterrole.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/airflow-scheduled-job/02-airflow-spark-clusterrolebinding.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/airflow-scheduled-job/03-enable-and-run-spark-dag.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/airflow-scheduled-job/04-enable-and-run-date-dag.yaml
supportedNamespaces: []
resourceRequests:
cpu: 2401m
Expand All @@ -24,8 +24,8 @@ demos:
- hdfs
- cycling-tripdata
manifests:
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/hbase-hdfs-load-cycling-data/distcp-cycling-data.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/hbase-hdfs-load-cycling-data/distcp-cycling-data.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/hbase-hdfs-load-cycling-data/create-hfile-and-import-to-hbase.yaml
supportedNamespaces: []
resourceRequests:
cpu: "3"
Expand All @@ -43,9 +43,9 @@ demos:
- opa
- keycloak
manifests:
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/end-to-end-security/create-trino-tables.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/end-to-end-security/serviceaccount.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/end-to-end-security/create-spark-report.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/end-to-end-security/create-trino-tables.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/end-to-end-security/serviceaccount.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/end-to-end-security/create-spark-report.yaml
supportedNamespaces: []
resourceRequests:
cpu: 9000m
Expand All @@ -64,10 +64,10 @@ demos:
- s3
- earthquakes
manifests:
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/serviceaccount.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/nifi-kafka-druid-earthquake-data/serviceaccount.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/nifi-kafka-druid-earthquake-data/create-nifi-ingestion-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/nifi-kafka-druid-earthquake-data/create-druid-ingestion-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/nifi-kafka-druid-earthquake-data/setup-superset.yaml
supportedNamespaces: ["default"]
resourceRequests:
cpu: 8700m
Expand All @@ -86,10 +86,10 @@ demos:
- s3
- water-levels
manifests:
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/serviceaccount.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/nifi-kafka-druid-water-level-data/serviceaccount.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/nifi-kafka-druid-water-level-data/create-nifi-ingestion-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/nifi-kafka-druid-water-level-data/create-druid-ingestion-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/nifi-kafka-druid-water-level-data/setup-superset.yaml
supportedNamespaces: ["default"]
resourceRequests:
cpu: 8900m
Expand All @@ -106,10 +106,10 @@ demos:
- s3
- ny-taxi-data
manifests:
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/serviceaccount.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/load-test-data.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/spark-k8s-anomaly-detection-taxi-data/serviceaccount.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/spark-k8s-anomaly-detection-taxi-data/load-test-data.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/spark-k8s-anomaly-detection-taxi-data/create-spark-anomaly-detection-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/spark-k8s-anomaly-detection-taxi-data/setup-superset.yaml
supportedNamespaces: []
resourceRequests:
cpu: 6400m
Expand Down Expand Up @@ -141,9 +141,9 @@ demos:
- s3
- ny-taxi-data
manifests:
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/trino-taxi-data/load-test-data.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/trino-taxi-data/create-table-in-trino.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/trino-taxi-data/setup-superset.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/trino-taxi-data/load-test-data.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/trino-taxi-data/create-table-in-trino.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/trino-taxi-data/setup-superset.yaml
supportedNamespaces: []
resourceRequests:
cpu: 6800m
Expand All @@ -166,12 +166,12 @@ demos:
- water-levels
- earthquakes
manifests:
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/serviceaccount.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/load-test-data.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/data-lakehouse-iceberg-trino-spark/serviceaccount.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/data-lakehouse-iceberg-trino-spark/load-test-data.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/data-lakehouse-iceberg-trino-spark/create-trino-tables.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/data-lakehouse-iceberg-trino-spark/create-nifi-ingestion-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/data-lakehouse-iceberg-trino-spark/create-spark-ingestion-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/data-lakehouse-iceberg-trino-spark/setup-superset.yaml
supportedNamespaces: ["default"]
resourceRequests:
cpu: "80"
Expand All @@ -187,7 +187,7 @@ demos:
- pyspark
- ny-taxi-data
manifests:
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/jupyterhub-pyspark-hdfs-anomaly-detection-taxi-data/load-test-data.yaml
supportedNamespaces: []
resourceRequests:
cpu: 3350m
Expand All @@ -204,7 +204,7 @@ demos:
- vector
- zookeeper
manifests:
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/logging/zookeeper.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/logging/zookeeper.yaml
supportedNamespaces: []
resourceRequests:
cpu: 6500m
Expand All @@ -220,9 +220,9 @@ demos:
- grafana-dashboards
- zookeeper
manifests:
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/serviceaccount.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/create-timescale-tables.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/signal-processing/create-nifi-ingestion-job.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/signal-processing/serviceaccount.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/signal-processing/create-timescale-tables.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/signal-processing/create-nifi-ingestion-job.yaml
supportedNamespaces: []
resourceRequests:
cpu: "3"
Expand All @@ -238,7 +238,7 @@ demos:
- spark
- S3
manifests:
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/main/demos/jupyterhub-keycloak/load-gas-data.yaml
- plainYaml: https://raw.githubusercontent.com/stackabletech/demos/release-25.3/demos/jupyterhub-keycloak/load-gas-data.yaml
supportedNamespaces: []
resourceRequests:
cpu: 6400m
Expand Down
4 changes: 2 additions & 2 deletions demos/end-to-end-security/create-spark-report.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ spec:
serviceAccountName: demo-serviceaccount
initContainers:
- name: wait-for-trino-tables
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable25.3.0
command:
- bash
- -euo
Expand All @@ -23,7 +23,7 @@ spec:
kubectl wait --timeout=30m --for=condition=complete job/create-tables-in-trino
containers:
- name: create-spark-report
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable25.3.0
command:
- bash
- -euo
Expand Down
2 changes: 1 addition & 1 deletion demos/end-to-end-security/create-trino-tables.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ spec:
spec:
containers:
- name: create-tables-in-trino
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable25.3.0
command: ["bash", "-c", "python -u /tmp/script/script.py"]
volumeMounts:
- name: script
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ spec:
spec:
containers:
- name: create-hfile-and-import-to-hbase
image: oci.stackable.tech/sdp/hbase:2.6.1-stackable0.0.0-dev
image: oci.stackable.tech/sdp/hbase:2.6.1-stackable25.3.0
env:
- name: HADOOP_USER_NAME
value: stackable
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ spec:
spec:
containers:
- name: load-ny-taxi-data
image: oci.stackable.tech/sdp/hadoop:3.4.1-stackable0.0.0-dev
image: oci.stackable.tech/sdp/hadoop:3.4.1-stackable25.3.0
# yamllint disable rule:line-length
command: ["bash", "-c", "/stackable/hadoop/bin/hdfs dfs -mkdir -p /ny-taxi-data/raw \
&& cd /tmp \
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ spec:
serviceAccountName: demo-serviceaccount
initContainers:
- name: wait-for-druid-coordinator
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
image: oci.stackable.tech/sdp/tools:1.0.0-stackable25.3.0
command:
- bash
- -euo
Expand All @@ -22,7 +22,7 @@ spec:
kubectl wait --for=condition=Ready pod/druid-coordinator-default-0 --timeout=30m
containers:
- name: create-druid-ingestion-job
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable25.3.0
command:
- bash
- -euo
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ spec:
serviceAccountName: demo-serviceaccount
initContainers:
- name: wait-for-nifi
image: oci.stackable.tech/sdp/tools:1.0.0-stackable0.0.0-dev
image: oci.stackable.tech/sdp/tools:1.0.0-stackable25.3.0
command:
- bash
- -euo
Expand All @@ -22,7 +22,7 @@ spec:
kubectl wait --for=condition=Ready pod/nifi-node-default-0 --timeout=30m
containers:
- name: create-nifi-ingestion-job
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable0.0.0-dev
image: oci.stackable.tech/sdp/testing-tools:0.2.0-stackable25.3.0
command:
- bash
- -euo
Expand Down
Loading