-
Notifications
You must be signed in to change notification settings - Fork 232
/
azure.synapse.spark.aio.operations.SparkBatchOperations.yml
155 lines (148 loc) · 5.19 KB
/
azure.synapse.spark.aio.operations.SparkBatchOperations.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
### YamlMime:PythonClass
uid: azure.synapse.spark.aio.operations.SparkBatchOperations
name: SparkBatchOperations
fullName: azure.synapse.spark.aio.operations.SparkBatchOperations
module: azure.synapse.spark.aio.operations
inheritances:
- builtins.object
summary: 'SparkBatchOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client
instance that
instantiates it for you and attaches it as an attribute.'
constructor:
syntax: SparkBatchOperations(client, config, serializer, deserializer)
parameters:
- name: client
description: Client for service requests.
isRequired: true
- name: config
description: Configuration of service client.
isRequired: true
- name: serializer
description: An object model serializer.
isRequired: true
- name: deserializer
description: An object model deserializer.
isRequired: true
variables:
- description: Alias to model classes used in this operation group.
name: models
methods:
- uid: azure.synapse.spark.aio.operations.SparkBatchOperations.cancel_spark_batch_job
name: cancel_spark_batch_job
summary: Cancels a running spark batch job.
signature: 'async cancel_spark_batch_job(batch_id: int, **kwargs: Any) -> None'
parameters:
- name: batch_id
description: Identifier for the batch job.
isRequired: true
types:
- <xref:int>
keywordOnlyParameters:
- name: cls
description: A custom type or function that will be passed the direct response
types:
- <xref:callable>
return:
description: None, or the result of cls(response)
types:
- <xref:None>
exceptions:
- type: azure.core.exceptions.HttpResponseError
- uid: azure.synapse.spark.aio.operations.SparkBatchOperations.create_spark_batch_job
name: create_spark_batch_job
summary: Create new spark batch job.
signature: 'async create_spark_batch_job(spark_batch_job_options: SparkBatchJobOptions,
detailed: bool | None = None, **kwargs: Any) -> SparkBatchJob'
parameters:
- name: spark_batch_job_options
description: Livy compatible batch job request payload.
isRequired: true
types:
- <xref:azure.synapse.spark.models.SparkBatchJobOptions>
- name: detailed
description: 'Optional query param specifying whether detailed response is returned
beyond
plain livy.'
defaultValue: None
types:
- <xref:bool>
keywordOnlyParameters:
- name: cls
description: A custom type or function that will be passed the direct response
types:
- <xref:callable>
return:
description: SparkBatchJob, or the result of cls(response)
types:
- <xref:azure.synapse.spark.models.SparkBatchJob>
exceptions:
- type: azure.core.exceptions.HttpResponseError
- uid: azure.synapse.spark.aio.operations.SparkBatchOperations.get_spark_batch_job
name: get_spark_batch_job
summary: Gets a single spark batch job.
signature: 'async get_spark_batch_job(batch_id: int, detailed: bool | None = None,
**kwargs: Any) -> SparkBatchJob'
parameters:
- name: batch_id
description: Identifier for the batch job.
isRequired: true
types:
- <xref:int>
- name: detailed
description: 'Optional query param specifying whether detailed response is returned
beyond
plain livy.'
defaultValue: None
types:
- <xref:bool>
keywordOnlyParameters:
- name: cls
description: A custom type or function that will be passed the direct response
types:
- <xref:callable>
return:
description: SparkBatchJob, or the result of cls(response)
types:
- <xref:azure.synapse.spark.models.SparkBatchJob>
exceptions:
- type: azure.core.exceptions.HttpResponseError
- uid: azure.synapse.spark.aio.operations.SparkBatchOperations.get_spark_batch_jobs
name: get_spark_batch_jobs
summary: List all spark batch jobs which are running under a particular spark pool.
signature: 'async get_spark_batch_jobs(from_parameter: int | None = None, size:
int | None = None, detailed: bool | None = None, **kwargs: Any) -> SparkBatchJobCollection'
parameters:
- name: from_parameter
description: Optional param specifying which index the list should begin from.
defaultValue: None
types:
- <xref:int>
- name: size
description: 'Optional param specifying the size of the returned list.
By default it is 20 and that is the maximum.'
defaultValue: None
types:
- <xref:int>
- name: detailed
description: 'Optional query param specifying whether detailed response is returned
beyond
plain livy.'
defaultValue: None
types:
- <xref:bool>
keywordOnlyParameters:
- name: cls
description: A custom type or function that will be passed the direct response
types:
- <xref:callable>
return:
description: SparkBatchJobCollection, or the result of cls(response)
types:
- <xref:azure.synapse.spark.models.SparkBatchJobCollection>
exceptions:
- type: azure.core.exceptions.HttpResponseError
attributes:
- uid: azure.synapse.spark.aio.operations.SparkBatchOperations.models
name: models
signature: models = <module 'azure.synapse.spark.models' from 'C:\\hostedtoolcache\\windows\\Python\\3.11.9\\x64\\Lib\\site-packages\\azure\\synapse\\spark\\models\\__init__.py'>