This repository has been archived by the owner on Mar 4, 2024. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 10
/
actions.yaml
76 lines (76 loc) · 2.69 KB
/
actions.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
smoke-test:
description: Verify that Spark is working by calculating pi.
sparkpi:
description: Calculate Pi
params:
partitions:
description: Number of partitions to use for the SparkPi job
type: string
default: "10"
logisticregression:
description: Run the Spark Bench LogisticRegression benchmark.
matrixfactorization:
description: Run the Spark Bench MatrixFactorization benchmark.
pagerank:
description: Run the Spark Bench PageRank benchmark.
sql:
description: Run the Spark Bench SQL benchmark.
streaming:
description: Run the Spark Bench Streaming benchmark.
svdplusplus:
description: Run the Spark Bench SVDPlusPlus benchmark.
svm:
description: Run the Spark Bench SVM benchmark.
trianglecount:
description: Run the Spark Bench TriangleCount benchmark.
restart-spark-job-history-server:
description: Restart the Spark job history server.
start-spark-job-history-server:
description: Start the Spark job history server.
stop-spark-job-history-server:
description: Stop the Spark job history server.
list-spark-versions:
description: Get a list of availbale spark versions.
submit:
description: Submit a job to Spark.
required: ['job']
params:
job:
description: >
URL to a JAR or Python file. This can be any URL supported by
spark-submit, such as a remote URL, an hdfs:// path (if
connected to HDFS), etc.
type: string
class:
description: >
If a JAR is given, this should be the name of the class within
the JAR to run.
type: string
job-args:
description: Arguments for the job.
packages:
description: Comma-separated list of packages to include.
type: string
py-files:
description: Comma-separated list of Python packages to include.
type: string
extra-params:
description: >
Additional params to pass to spark-submit.
For example: "--executor-memory 1000M --supervise"
type: string
cron:
description: >
Schedule the job to be run periodically, according to the
given cron rule. For example: "*/5 * * * *" will run the
job every 5 minutes.
type: string
list-jobs:
description: List scheduled periodic jobs.
remove-job:
description: Remove a job previously scheduled for repeated execution.
required: ['action-id']
params:
action-id:
type: string
description: The ID returned by the action that scheduled the job.