Skip to content

Commit

Permalink
Merge branch 'devel'
Browse files Browse the repository at this point in the history
  • Loading branch information
maystery committed Nov 10, 2020
2 parents 961722b + b819dcb commit 4b49791
Show file tree
Hide file tree
Showing 10 changed files with 69 additions and 59 deletions.
Binary file modified tutorials/spark-cluster-with-python.tar.gz
Binary file not shown.
9 changes: 8 additions & 1 deletion tutorials/spark-cluster-with-python/infra-spark-cluster.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -9,9 +9,16 @@ nodes:
name: spark-worker
type: spark_worker_node
scaling:
min: 1
min: 2
max: 10

variables:
HADOOP_VERSION: 2.10.1
SPARK_VERSION: 2.4.7
SPARK_HADOOP_VERSION: 2.7
CONSUL_VERSION: 1.8.5
CONSUL_TEMPLATE_VERSION: 0.25.1

dependencies:
-
connection: [ *W, *M ]
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@ write_files:
#!/bin/bash
set -ex
HADOOP_VERSION=2.10.0
SPARK_VERSION=2.4.6
SPAR_HADOOP_VERSION=2.7
CONSUL_VERSION=1.8.0
CONSUL_TEMPLATE_VERSION=0.25.0
HADOOP_VERSION={{variables.HADOOP_VERSION}}
SPARK_VERSION={{variables.SPARK_VERSION}}
SPARK_HADOOP_VERSION={{variables.SPARK_HADOOP_VERSION}}
CONSUL_VERSION={{variables.CONSUL_VERSION}}
CONSUL_TEMPLATE_VERSION={{variables.CONSUL_TEMPLATE_VERSION}}
echo "Creating SPARKUSER starts."
adduser --disabled-password --gecos "" sparkuser
Expand Down Expand Up @@ -43,7 +43,7 @@ write_files:
echo "Install HADOOP starts."
wget -nc https://downloads.apache.org/hadoop/common/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz -O /home/sparkuser/hadoop-$HADOOP_VERSION.tar.gz
wget -nc https://archive.apache.org/dist/hadoop/common/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz -O /home/sparkuser/hadoop-$HADOOP_VERSION.tar.gz
tar -xzf /home/sparkuser/hadoop-$HADOOP_VERSION.tar.gz --directory /home/sparkuser
mkdir /home/sparkuser/hadoop
mv /home/sparkuser/hadoop-$HADOOP_VERSION/* /home/sparkuser/hadoop
Expand All @@ -52,11 +52,11 @@ write_files:
echo "Install SPARK starts."
wget -nc https://archive.apache.org/dist/spark/spark-$SPARK_VERSION/spark-$SPARK_VERSION-bin-hadoop$SPAR_HADOOP_VERSION.tgz -O /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPAR_HADOOP_VERSION.tgz
tar -zxf /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPAR_HADOOP_VERSION.tgz --directory /home/sparkuser
wget -nc https://archive.apache.org/dist/spark/spark-$SPARK_VERSION/spark-$SPARK_VERSION-bin-hadoop$SPARK_HADOOP_VERSION.tgz -O /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPARK_HADOOP_VERSION.tgz
tar -zxf /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPARK_HADOOP_VERSION.tgz --directory /home/sparkuser
mkdir /home/sparkuser/spark
mv /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPAR_HADOOP_VERSION/* /home/sparkuser/spark
rm -r /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPAR_HADOOP_VERSION.tgz /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPAR_HADOOP_VERSION
mv /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPARK_HADOOP_VERSION/* /home/sparkuser/spark
rm -r /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPARK_HADOOP_VERSION.tgz /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPARK_HADOOP_VERSION
echo "Install SPARK finished."
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@ write_files:
#!/bin/bash
set -ex
HADOOP_VERSION=2.10.0
SPARK_VERSION=2.4.6
SPAR_HADOOP_VERSION=2.7
CONSUL_VERSION=1.8.0
CONSUL_TEMPLATE_VERSION=0.25.0
HADOOP_VERSION={{variables.HADOOP_VERSION}}
SPARK_VERSION={{variables.SPARK_VERSION}}
SPARK_HADOOP_VERSION={{variables.SPARK_HADOOP_VERSION}}
CONSUL_VERSION={{variables.CONSUL_VERSION}}
CONSUL_TEMPLATE_VERSION={{variables.CONSUL_TEMPLATE_VERSION}}
echo "Creating SPARKUSER starts."
adduser --disabled-password --gecos "" sparkuser
Expand All @@ -39,20 +39,19 @@ write_files:
echo "Install HADOOP starts."
wget -nc https://downloads.apache.org/hadoop/common/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz -O /home/sparkuser/hadoop-$HADOOP_VERSION.tar.gz
tar -xzf /home/sparkuser/hadoop-$HADOOP_VERSION.tar.gz --directory /home/sparkuser
wget -nc https://archive.apache.org/dist/hadoop/common/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz -O /home/sparkuser/hadoop-$HADOOP_VERSION.tar.gz tar -xzf /home/sparkuser/hadoop-$HADOOP_VERSION.tar.gz --directory /home/sparkuser
mkdir /home/sparkuser/hadoop
mv /home/sparkuser/hadoop-$HADOOP_VERSION/* /home/sparkuser/hadoop
rm -r /home/sparkuser/hadoop-$HADOOP_VERSION.tar.gz /home/sparkuser/hadoop-$HADOOP_VERSION
echo "Install HADOOP finished."
echo "Install SPARK starts."
wget -nc https://archive.apache.org/dist/spark/spark-$SPARK_VERSION/spark-$SPARK_VERSION-bin-hadoop$SPAR_HADOOP_VERSION.tgz -O /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPAR_HADOOP_VERSION.tgz
tar -zxf /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPAR_HADOOP_VERSION.tgz --directory /home/sparkuser
wget -nc https://archive.apache.org/dist/spark/spark-$SPARK_VERSION/spark-$SPARK_VERSION-bin-hadoop$SPARK_HADOOP_VERSION.tgz -O /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPARK_HADOOP_VERSION.tgz
tar -zxf /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPARK_HADOOP_VERSION.tgz --directory /home/sparkuser
mkdir /home/sparkuser/spark
mv /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPAR_HADOOP_VERSION/* /home/sparkuser/spark
rm -r /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPAR_HADOOP_VERSION.tgz /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPAR_HADOOP_VERSION
mv /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPARK_HADOOP_VERSION/* /home/sparkuser/spark
rm -r /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPARK_HADOOP_VERSION.tgz /home/sparkuser/spark-$SPARK_VERSION-bin-hadoop$SPARK_HADOOP_VERSION
echo "Install SPARK finished."
Expand Down
42 changes: 20 additions & 22 deletions tutorials/spark-cluster-with-python/nodes/node_definitions.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2,44 +2,42 @@
-
resource:
type: nova
endpoint:
project_id:
user_domain_name:
image_id:
network_id:
flavor_name:
key_name:
endpoint: replace_with_endpoint_of_nova_interface_of_your_cloud
project_id: replace_with_projectid_to_use
user_domain_name: Default
image_id: replace_with_id_of_your_image_on_your_target_cloud
network_id: replace_with_id_of_network_on_your_target_cloud
flavor_name: replace_with_id_of_the_flavor_on_your_target_cloud
key_name: replace_with_name_of_keypair_or_remove
security_groups:
-

floating_ip: yes
replace_with_security_group_to_add_or_remove_section
floating_ip: add_yes_if_you_need_floating_ip_or_remove
floating_ip_pool: replace_with_name_of_floating_ip_pool_or_remove
contextualisation:
type: cloudinit
context_template: !yaml_import
url: file://cloud_init_spark_master.yaml
health_check:
ports:
- 8080
timeout: 6000
timeout: 2000

'node_def:spark_worker_node':
-
resource:
type: nova
endpoint:
project_id:
user_domain_name:
image_id:
network_id:
flavor_name:
key_name:
endpoint: replace_with_endpoint_of_nova_interface_of_your_cloud
project_id: replace_with_projectid_to_use
user_domain_name: Default
image_id: replace_with_id_of_your_image_on_your_target_cloud
network_id: replace_with_id_of_network_on_your_target_cloud
flavor_name: replace_with_id_of_the_flavor_on_your_target_cloud
key_name: replace_with_name_of_keypair_or_remove
security_groups:
-

# floating_ip: yes # If needed
replace_with_security_group_to_add_or_remove_section
contextualisation:
type: cloudinit
context_template: !yaml_import
url: file://cloud_init_spark_worker.yaml
health_check:
ping: False
url: file://cloud_init_spark_worker.yaml
Binary file modified tutorials/spark-cluster-with-r.tar.gz
Binary file not shown.
9 changes: 8 additions & 1 deletion tutorials/spark-cluster-with-r/infra-spark-cluster.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,17 @@ nodes:
- &W
name: spark-worker
type: spark_worker_node
scaling:
scaling:
min: 2
max: 10

variables:
HADOOP_VERSION: 2.10.1
SPARK_VERSION: 2.4.7
SPARK_HADOOP_VERSION: 2.7
CONSUL_VERSION: 1.8.5
CONSUL_TEMPLATE_VERSION: 0.25.1

dependencies:
-
connection: [ *W, *M ]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@ write_files:
#!/bin/bash
set -ex
HADOOP_VERSION=2.10.0
SPARK_VERSION=2.4.6
SPAR_HADOOP_VERSION=2.7
CONSUL_VERSION=1.8.0
CONSUL_TEMPLATE_VERSION=0.25.0
HADOOP_VERSION={{variables.HADOOP_VERSION}}
SPARK_VERSION={{variables.SPARK_VERSION}}
SPARK_HADOOP_VERSION={{variables.SPARK_HADOOP_VERSION}}
CONSUL_VERSION={{variables.CONSUL_VERSION}}
CONSUL_TEMPLATE_VERSION={{variables.CONSUL_TEMPLATE_VERSION}}
RSTUDIO_VERSION=1.3.1073-amd64
Expand Down Expand Up @@ -45,7 +45,7 @@ write_files:
echo "Install HADOOP starts."
wget -nc https://downloads.apache.org/hadoop/common/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz -O /home/sparkuser/hadoop-$HADOOP_VERSION.tar.gz
wget -nc https://archive.apache.org/dist/hadoop/common/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz -O /home/sparkuser/hadoop-$HADOOP_VERSION.tar.gz
tar -xzf /home/sparkuser/hadoop-$HADOOP_VERSION.tar.gz --directory /home/sparkuser
mkdir /home/sparkuser/hadoop
mv /home/sparkuser/hadoop-$HADOOP_VERSION/* /home/sparkuser/hadoop
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,11 @@ write_files:
#!/bin/bash
set -ex
HADOOP_VERSION=2.10.0
SPARK_VERSION=2.4.6
SPAR_HADOOP_VERSION=2.7
CONSUL_VERSION=1.8.0
CONSUL_TEMPLATE_VERSION=0.25.0
HADOOP_VERSION={{variables.HADOOP_VERSION}}
SPARK_VERSION={{variables.SPARK_VERSION}}
SPARK_HADOOP_VERSION={{variables.SPARK_HADOOP_VERSION}}
CONSUL_VERSION={{variables.CONSUL_VERSION}}
CONSUL_TEMPLATE_VERSION={{variables.CONSUL_TEMPLATE_VERSION}}
echo "Creating SPARKUSER starts."
adduser --disabled-password --gecos "" sparkuser
Expand All @@ -39,8 +39,7 @@ write_files:
echo "Install HADOOP starts."
wget -nc https://downloads.apache.org/hadoop/common/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz -O /home/sparkuser/hadoop-$HADOOP_VERSION.tar.gz
tar -xzf /home/sparkuser/hadoop-$HADOOP_VERSION.tar.gz --directory /home/sparkuser
wget -nc https://archive.apache.org/dist/hadoop/common/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz -O /home/sparkuser/hadoop-$HADOOP_VERSION.tar.gz tar -xzf /home/sparkuser/hadoop-$HADOOP_VERSION.tar.gz --directory /home/sparkuser
mkdir /home/sparkuser/hadoop
mv /home/sparkuser/hadoop-$HADOOP_VERSION/* /home/sparkuser/hadoop
rm -r /home/sparkuser/hadoop-$HADOOP_VERSION.tar.gz /home/sparkuser/hadoop-$HADOOP_VERSION
Expand Down
2 changes: 1 addition & 1 deletion tutorials/spark-cluster-with-r/nodes/node_definitions.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
health_check:
ports:
- 8080
timeout: 600
timeout: 2000

'node_def:spark_worker_node':
-
Expand Down

0 comments on commit 4b49791

Please sign in to comment.