/
SparkBatchClient.cs
195 lines (181 loc) · 9.14 KB
/
SparkBatchClient.cs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// <auto-generated/>
#nullable disable
using System;
using System.Threading;
using System.Threading.Tasks;
using Azure;
using Azure.Analytics.Synapse.Spark.Models;
using Azure.Core;
using Azure.Core.Pipeline;
namespace Azure.Analytics.Synapse.Spark
{
/// <summary> The SparkBatch service client. </summary>
public partial class SparkBatchClient
{
private readonly ClientDiagnostics _clientDiagnostics;
private readonly HttpPipeline _pipeline;
internal SparkBatchRestClient RestClient { get; }
/// <summary> Initializes a new instance of SparkBatchClient for mocking. </summary>
protected SparkBatchClient()
{
}
/// <summary> Initializes a new instance of SparkBatchClient. </summary>
/// <param name="endpoint"> The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net. </param>
/// <param name="sparkPoolName"> Name of the spark pool. </param>
/// <param name="credential"> A credential used to authenticate to an Azure Service. </param>
/// <param name="livyApiVersion"> Valid api-version for the request. </param>
/// <param name="options"> The options for configuring the client. </param>
public SparkBatchClient(Uri endpoint, string sparkPoolName, TokenCredential credential, string livyApiVersion = "2019-11-01-preview", SparkClientOptions options = null)
{
if (endpoint == null)
{
throw new ArgumentNullException(nameof(endpoint));
}
if (sparkPoolName == null)
{
throw new ArgumentNullException(nameof(sparkPoolName));
}
if (credential == null)
{
throw new ArgumentNullException(nameof(credential));
}
if (livyApiVersion == null)
{
throw new ArgumentNullException(nameof(livyApiVersion));
}
options ??= new SparkClientOptions();
_clientDiagnostics = new ClientDiagnostics(options);
string[] scopes = { "https://dev.azuresynapse.net/.default" };
_pipeline = HttpPipelineBuilder.Build(options, new BearerTokenAuthenticationPolicy(credential, scopes));
RestClient = new SparkBatchRestClient(_clientDiagnostics, _pipeline, endpoint, sparkPoolName, livyApiVersion);
}
/// <summary> Initializes a new instance of SparkBatchClient. </summary>
/// <param name="clientDiagnostics"> The handler for diagnostic messaging in the client. </param>
/// <param name="pipeline"> The HTTP pipeline for sending and receiving REST requests and responses. </param>
/// <param name="endpoint"> The workspace development endpoint, for example https://myworkspace.dev.azuresynapse.net. </param>
/// <param name="sparkPoolName"> Name of the spark pool. </param>
/// <param name="livyApiVersion"> Valid api-version for the request. </param>
internal SparkBatchClient(ClientDiagnostics clientDiagnostics, HttpPipeline pipeline, Uri endpoint, string sparkPoolName, string livyApiVersion = "2019-11-01-preview")
{
RestClient = new SparkBatchRestClient(clientDiagnostics, pipeline, endpoint, sparkPoolName, livyApiVersion);
_clientDiagnostics = clientDiagnostics;
_pipeline = pipeline;
}
/// <summary> List all spark batch jobs which are running under a particular spark pool. </summary>
/// <param name="from"> Optional param specifying which index the list should begin from. </param>
/// <param name="size">
/// Optional param specifying the size of the returned list.
/// By default it is 20 and that is the maximum.
/// </param>
/// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain livy. </param>
/// <param name="cancellationToken"> The cancellation token to use. </param>
public virtual async Task<Response<SparkBatchJobCollection>> GetSparkBatchJobsAsync(int? @from = null, int? size = null, bool? detailed = null, CancellationToken cancellationToken = default)
{
using var scope = _clientDiagnostics.CreateScope("SparkBatchClient.GetSparkBatchJobs");
scope.Start();
try
{
return await RestClient.GetSparkBatchJobsAsync(@from, size, detailed, cancellationToken).ConfigureAwait(false);
}
catch (Exception e)
{
scope.Failed(e);
throw;
}
}
/// <summary> List all spark batch jobs which are running under a particular spark pool. </summary>
/// <param name="from"> Optional param specifying which index the list should begin from. </param>
/// <param name="size">
/// Optional param specifying the size of the returned list.
/// By default it is 20 and that is the maximum.
/// </param>
/// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain livy. </param>
/// <param name="cancellationToken"> The cancellation token to use. </param>
public virtual Response<SparkBatchJobCollection> GetSparkBatchJobs(int? @from = null, int? size = null, bool? detailed = null, CancellationToken cancellationToken = default)
{
using var scope = _clientDiagnostics.CreateScope("SparkBatchClient.GetSparkBatchJobs");
scope.Start();
try
{
return RestClient.GetSparkBatchJobs(@from, size, detailed, cancellationToken);
}
catch (Exception e)
{
scope.Failed(e);
throw;
}
}
/// <summary> Gets a single spark batch job. </summary>
/// <param name="batchId"> Identifier for the batch job. </param>
/// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain livy. </param>
/// <param name="cancellationToken"> The cancellation token to use. </param>
public virtual async Task<Response<SparkBatchJob>> GetSparkBatchJobAsync(int batchId, bool? detailed = null, CancellationToken cancellationToken = default)
{
using var scope = _clientDiagnostics.CreateScope("SparkBatchClient.GetSparkBatchJob");
scope.Start();
try
{
return await RestClient.GetSparkBatchJobAsync(batchId, detailed, cancellationToken).ConfigureAwait(false);
}
catch (Exception e)
{
scope.Failed(e);
throw;
}
}
/// <summary> Gets a single spark batch job. </summary>
/// <param name="batchId"> Identifier for the batch job. </param>
/// <param name="detailed"> Optional query param specifying whether detailed response is returned beyond plain livy. </param>
/// <param name="cancellationToken"> The cancellation token to use. </param>
public virtual Response<SparkBatchJob> GetSparkBatchJob(int batchId, bool? detailed = null, CancellationToken cancellationToken = default)
{
using var scope = _clientDiagnostics.CreateScope("SparkBatchClient.GetSparkBatchJob");
scope.Start();
try
{
return RestClient.GetSparkBatchJob(batchId, detailed, cancellationToken);
}
catch (Exception e)
{
scope.Failed(e);
throw;
}
}
/// <summary> Cancels a running spark batch job. </summary>
/// <param name="batchId"> Identifier for the batch job. </param>
/// <param name="cancellationToken"> The cancellation token to use. </param>
public virtual async Task<Response> CancelSparkBatchJobAsync(int batchId, CancellationToken cancellationToken = default)
{
using var scope = _clientDiagnostics.CreateScope("SparkBatchClient.CancelSparkBatchJob");
scope.Start();
try
{
return await RestClient.CancelSparkBatchJobAsync(batchId, cancellationToken).ConfigureAwait(false);
}
catch (Exception e)
{
scope.Failed(e);
throw;
}
}
/// <summary> Cancels a running spark batch job. </summary>
/// <param name="batchId"> Identifier for the batch job. </param>
/// <param name="cancellationToken"> The cancellation token to use. </param>
public virtual Response CancelSparkBatchJob(int batchId, CancellationToken cancellationToken = default)
{
using var scope = _clientDiagnostics.CreateScope("SparkBatchClient.CancelSparkBatchJob");
scope.Start();
try
{
return RestClient.CancelSparkBatchJob(batchId, cancellationToken);
}
catch (Exception e)
{
scope.Failed(e);
throw;
}
}
}
}