Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

NIFI-10152 Storage client caching in Azure ADLS processors #6158

Closed
wants to merge 6 commits into from
Closed
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,11 @@ The following binary components are provided under the Apache Software License v
Reactive Streams Netty Driver
Copyright 2020, Project Reactor

(ASLv2) Caffeine (com.github.ben-manes.caffeine:caffeine:jar:2.9.2 - https://github.com/ben-manes/caffeine)
The following NOTICE information applies:
Caffeine (caching library)
Copyright Ben Manes

************************
Common Development and Distribution License 1.0
************************
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,12 @@
<groupId>commons-io</groupId>
<artifactId>commons-io</artifactId>
</dependency>
<dependency>
<groupId>com.github.ben-manes.caffeine</groupId>
<artifactId>caffeine</artifactId>
<version>2.9.2</version>
</dependency>

<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-mock</artifactId>
Expand All @@ -159,7 +165,6 @@
<version>1.18.0-SNAPSHOT</version>
<scope>test</scope>
</dependency>

<dependency>
<groupId>org.apache.nifi</groupId>
<artifactId>nifi-schema-registry-service-api</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,10 @@
*/
package org.apache.nifi.processors.azure;

import com.azure.core.credential.AccessToken;
import com.azure.core.credential.TokenCredential;
import com.azure.core.http.HttpClient;
import com.azure.core.http.netty.NettyAsyncHttpClientBuilder;
import com.azure.identity.ClientSecretCredential;
import com.azure.identity.ClientSecretCredentialBuilder;
import com.azure.identity.ManagedIdentityCredential;
import com.azure.identity.ManagedIdentityCredentialBuilder;
import com.azure.storage.common.StorageSharedKeyCredential;
import com.azure.storage.file.datalake.DataLakeServiceClient;
import com.azure.storage.file.datalake.DataLakeServiceClientBuilder;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.annotation.lifecycle.OnScheduled;
import org.apache.nifi.annotation.lifecycle.OnStopped;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.components.ValidationContext;
import org.apache.nifi.components.ValidationResult;
Expand All @@ -40,9 +32,10 @@
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils;
import org.apache.nifi.processors.azure.storage.utils.DataLakeServiceClientFactory;
import org.apache.nifi.services.azure.storage.ADLSCredentialsDetails;
import org.apache.nifi.services.azure.storage.ADLSCredentialsService;
import reactor.core.publisher.Mono;

import java.util.Arrays;
import java.util.Collections;
Expand All @@ -51,7 +44,6 @@
import java.util.Set;

import static org.apache.nifi.processors.azure.storage.utils.ADLSAttributes.ATTR_NAME_FILENAME;
import static org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils.getProxyOptions;

public abstract class AbstractAzureDataLakeStorageProcessor extends AbstractProcessor {

Expand All @@ -65,7 +57,7 @@ public abstract class AbstractAzureDataLakeStorageProcessor extends AbstractProc

public static final PropertyDescriptor FILESYSTEM = new PropertyDescriptor.Builder()
.name("filesystem-name").displayName("Filesystem Name")
.description("Name of the Azure Storage File System. It is assumed to be already existing.")
.description("Name of the Azure Storage File System (also called Container). It is assumed to be already existing.")
.addValidator(StandardValidators.NON_BLANK_VALIDATOR)
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.required(true)
Expand Down Expand Up @@ -103,65 +95,35 @@ public abstract class AbstractAzureDataLakeStorageProcessor extends AbstractProc

public static final String TEMP_FILE_DIRECTORY = "_nifitempdirectory";

private DataLakeServiceClientFactory clientFactory;

@Override
public Set<Relationship> getRelationships() {
return RELATIONSHIPS;
}

public static DataLakeServiceClient getStorageClient(PropertyContext context, FlowFile flowFile) {
final Map<String, String> attributes = flowFile != null ? flowFile.getAttributes() : Collections.emptyMap();
@OnScheduled
public void onScheduled() {
clientFactory = new DataLakeServiceClientFactory(getLogger());
}

final ADLSCredentialsService credentialsService = context.getProperty(ADLS_CREDENTIALS_SERVICE).asControllerService(ADLSCredentialsService.class);
@OnStopped
public void onStopped() {
clientFactory = null;
}

final ADLSCredentialsDetails credentialsDetails = credentialsService.getCredentialsDetails(attributes);
public DataLakeServiceClientFactory getStorageClientFactory() {
return clientFactory;
}

final String accountName = credentialsDetails.getAccountName();
final String accountKey = credentialsDetails.getAccountKey();
final String sasToken = credentialsDetails.getSasToken();
final AccessToken accessToken = credentialsDetails.getAccessToken();
final String endpointSuffix = credentialsDetails.getEndpointSuffix();
final boolean useManagedIdentity = credentialsDetails.getUseManagedIdentity();
final String managedIdentityClientId = credentialsDetails.getManagedIdentityClientId();
final String servicePrincipalTenantId = credentialsDetails.getServicePrincipalTenantId();
final String servicePrincipalClientId = credentialsDetails.getServicePrincipalClientId();
final String servicePrincipalClientSecret = credentialsDetails.getServicePrincipalClientSecret();

final String endpoint = String.format("https://%s.%s", accountName, endpointSuffix);

final DataLakeServiceClientBuilder dataLakeServiceClientBuilder = new DataLakeServiceClientBuilder();
dataLakeServiceClientBuilder.endpoint(endpoint);

if (StringUtils.isNotBlank(accountKey)) {
final StorageSharedKeyCredential credential = new StorageSharedKeyCredential(accountName, accountKey);
dataLakeServiceClientBuilder.credential(credential);
} else if (StringUtils.isNotBlank(sasToken)) {
dataLakeServiceClientBuilder.sasToken(sasToken);
} else if (accessToken != null) {
final TokenCredential credential = tokenRequestContext -> Mono.just(accessToken);
dataLakeServiceClientBuilder.credential(credential);
} else if (useManagedIdentity) {
final ManagedIdentityCredential misCredential = new ManagedIdentityCredentialBuilder()
.clientId(managedIdentityClientId)
.build();
dataLakeServiceClientBuilder.credential(misCredential);
} else if (StringUtils.isNoneBlank(servicePrincipalTenantId, servicePrincipalClientId, servicePrincipalClientSecret)) {
final ClientSecretCredential credential = new ClientSecretCredentialBuilder()
.tenantId(servicePrincipalTenantId)
.clientId(servicePrincipalClientId)
.clientSecret(servicePrincipalClientSecret)
.build();
dataLakeServiceClientBuilder.credential(credential);
} else {
throw new IllegalArgumentException("No valid credentials were provided");
}
public DataLakeServiceClient getStorageClient(PropertyContext context, FlowFile flowFile) {
final Map<String, String> attributes = flowFile != null ? flowFile.getAttributes() : Collections.emptyMap();

final NettyAsyncHttpClientBuilder nettyClientBuilder = new NettyAsyncHttpClientBuilder();
nettyClientBuilder.proxy(getProxyOptions(context));
final ADLSCredentialsService credentialsService = context.getProperty(ADLS_CREDENTIALS_SERVICE).asControllerService(ADLSCredentialsService.class);

final HttpClient nettyClient = nettyClientBuilder.build();
dataLakeServiceClientBuilder.httpClient(nettyClient);
final ADLSCredentialsDetails credentialsDetails = credentialsService.getCredentialsDetails(attributes);

final DataLakeServiceClient storageClient = dataLakeServiceClientBuilder.buildClient();
final DataLakeServiceClient storageClient = getStorageClientFactory().getStorageClient(credentialsDetails, AzureStorageUtils.getProxyOptions(context));

return storageClient;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,10 @@
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.processors.azure.storage.utils.ADLSFileInfo;
import org.apache.nifi.processors.azure.storage.utils.AzureStorageUtils;
import org.apache.nifi.processors.azure.storage.utils.DataLakeServiceClientFactory;
import org.apache.nifi.serialization.record.RecordSchema;
import org.apache.nifi.services.azure.storage.ADLSCredentialsDetails;
import org.apache.nifi.services.azure.storage.ADLSCredentialsService;

import java.io.IOException;
import java.util.Arrays;
Expand All @@ -66,7 +69,6 @@
import static org.apache.nifi.processors.azure.AbstractAzureDataLakeStorageProcessor.TEMP_FILE_DIRECTORY;
import static org.apache.nifi.processors.azure.AbstractAzureDataLakeStorageProcessor.evaluateDirectoryProperty;
import static org.apache.nifi.processors.azure.AbstractAzureDataLakeStorageProcessor.evaluateFileSystemProperty;
import static org.apache.nifi.processors.azure.AbstractAzureDataLakeStorageProcessor.getStorageClient;
import static org.apache.nifi.processors.azure.storage.utils.ADLSAttributes.ATTR_DESCRIPTION_DIRECTORY;
import static org.apache.nifi.processors.azure.storage.utils.ADLSAttributes.ATTR_DESCRIPTION_ETAG;
import static org.apache.nifi.processors.azure.storage.utils.ADLSAttributes.ATTR_DESCRIPTION_FILENAME;
Expand Down Expand Up @@ -170,6 +172,8 @@ public class ListAzureDataLakeStorage extends AbstractListAzureProcessor<ADLSFil
private volatile Pattern filePattern;
private volatile Pattern pathPattern;

private DataLakeServiceClientFactory clientFactory;

@Override
protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
return PROPERTIES;
Expand All @@ -179,12 +183,14 @@ protected List<PropertyDescriptor> getSupportedPropertyDescriptors() {
public void onScheduled(final ProcessContext context) {
filePattern = getPattern(context, FILE_FILTER);
pathPattern = getPattern(context, PATH_FILTER);
clientFactory = new DataLakeServiceClientFactory(getLogger());
}

@OnStopped
public void onStopped() {
filePattern = null;
pathPattern = null;
clientFactory = null;
}

@Override
Expand Down Expand Up @@ -254,6 +260,10 @@ protected Map<String, String> createAttributes(final ADLSFileInfo fileInfo, fina
return attributes;
}

public DataLakeServiceClientFactory getStorageClientFactory() {
return clientFactory;
}

private List<ADLSFileInfo> performListing(final ProcessContext context, final Long minTimestamp, final ListingMode listingMode,
final boolean applyFilters) throws IOException {
try {
Expand All @@ -264,7 +274,11 @@ private List<ADLSFileInfo> performListing(final ProcessContext context, final Lo
final Pattern filePattern = listingMode == ListingMode.EXECUTION ? this.filePattern : getPattern(context, FILE_FILTER);
final Pattern pathPattern = listingMode == ListingMode.EXECUTION ? this.pathPattern : getPattern(context, PATH_FILTER);

final DataLakeServiceClient storageClient = getStorageClient(context, null);
final ADLSCredentialsService credentialsService = context.getProperty(ADLS_CREDENTIALS_SERVICE).asControllerService(ADLSCredentialsService.class);

final ADLSCredentialsDetails credentialsDetails = credentialsService.getCredentialsDetails(Collections.emptyMap());

final DataLakeServiceClient storageClient = clientFactory.getStorageClient(credentialsDetails, AzureStorageUtils.getProxyOptions(context));
final DataLakeFileSystemClient fileSystemClient = storageClient.getFileSystemClient(fileSystem);

final ListPathsOptions options = new ListPathsOptions();
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.azure.storage.utils;

import com.azure.core.credential.AccessToken;
import com.azure.core.credential.TokenCredential;
import com.azure.core.http.HttpClient;
import com.azure.core.http.ProxyOptions;
import com.azure.core.http.netty.NettyAsyncHttpClientBuilder;
import com.azure.identity.ClientSecretCredential;
import com.azure.identity.ClientSecretCredentialBuilder;
import com.azure.identity.ManagedIdentityCredential;
import com.azure.identity.ManagedIdentityCredentialBuilder;
import com.azure.storage.common.StorageSharedKeyCredential;
import com.azure.storage.file.datalake.DataLakeServiceClient;
import com.azure.storage.file.datalake.DataLakeServiceClientBuilder;
import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine;
import org.apache.commons.lang3.StringUtils;
import org.apache.nifi.logging.ComponentLog;
import org.apache.nifi.services.azure.storage.ADLSCredentialsDetails;
import reactor.core.publisher.Mono;

public class DataLakeServiceClientFactory {

private static final long STORAGE_CLIENT_CACHE_SIZE = 10;

private final ComponentLog logger;

private final Cache<ADLSCredentialsDetails, DataLakeServiceClient> clientCache;

public DataLakeServiceClientFactory(ComponentLog logger) {
this.logger = logger;
this.clientCache = createCache();
}

private Cache<ADLSCredentialsDetails, DataLakeServiceClient> createCache() {
return Caffeine.newBuilder()
.maximumSize(STORAGE_CLIENT_CACHE_SIZE)
.build();
}

public Cache<ADLSCredentialsDetails, DataLakeServiceClient> getCache() {
return clientCache;
}
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@nandorsoma I think it is a bit overkill to expose internal fields publicly for integration tests only. Furthermore, the IT does not really test we should check: it asserts the cache size at the end (1 client in the cache) but it is more important to check how many client instance creation have happened (only 1).
It could be tested with a unit test and in that case the public getter would not be needed.


/**
* Retrieves a {@link DataLakeServiceClient}
*
* @param credentialsDetails used for caching because it can contain properties that are results of an expression
* @param proxyOptions not used for caching, because proxy parameters are set in the ProxyConfiguration service
* @return DataLakeServiceClient
*/
public DataLakeServiceClient getStorageClient(ADLSCredentialsDetails credentialsDetails, ProxyOptions proxyOptions) {
return clientCache.get(credentialsDetails, __ -> {
logger.debug("DataLakeServiceClient is not found in the cache with the given credentials. Creating it.");
return createStorageClient(credentialsDetails, proxyOptions);
});
}

private static DataLakeServiceClient createStorageClient(ADLSCredentialsDetails credentialsDetails, ProxyOptions proxyOptions) {
final String accountName = credentialsDetails.getAccountName();
final String accountKey = credentialsDetails.getAccountKey();
final String sasToken = credentialsDetails.getSasToken();
final AccessToken accessToken = credentialsDetails.getAccessToken();
final String endpointSuffix = credentialsDetails.getEndpointSuffix();
final boolean useManagedIdentity = credentialsDetails.getUseManagedIdentity();
final String managedIdentityClientId = credentialsDetails.getManagedIdentityClientId();
final String servicePrincipalTenantId = credentialsDetails.getServicePrincipalTenantId();
final String servicePrincipalClientId = credentialsDetails.getServicePrincipalClientId();
final String servicePrincipalClientSecret = credentialsDetails.getServicePrincipalClientSecret();

final String endpoint = String.format("https://%s.%s", accountName, endpointSuffix);

final DataLakeServiceClientBuilder dataLakeServiceClientBuilder = new DataLakeServiceClientBuilder();
dataLakeServiceClientBuilder.endpoint(endpoint);

if (StringUtils.isNotBlank(accountKey)) {
final StorageSharedKeyCredential credential = new StorageSharedKeyCredential(accountName, accountKey);
dataLakeServiceClientBuilder.credential(credential);
} else if (StringUtils.isNotBlank(sasToken)) {
dataLakeServiceClientBuilder.sasToken(sasToken);
} else if (accessToken != null) {
final TokenCredential credential = tokenRequestContext -> Mono.just(accessToken);
dataLakeServiceClientBuilder.credential(credential);
} else if (useManagedIdentity) {
final ManagedIdentityCredential misCredential = new ManagedIdentityCredentialBuilder()
.clientId(managedIdentityClientId)
.build();
dataLakeServiceClientBuilder.credential(misCredential);
} else if (StringUtils.isNoneBlank(servicePrincipalTenantId, servicePrincipalClientId, servicePrincipalClientSecret)) {
final ClientSecretCredential credential = new ClientSecretCredentialBuilder()
.tenantId(servicePrincipalTenantId)
.clientId(servicePrincipalClientId)
.clientSecret(servicePrincipalClientSecret)
.build();
dataLakeServiceClientBuilder.credential(credential);
} else {
throw new IllegalArgumentException("No valid credentials were provided");
}

final NettyAsyncHttpClientBuilder nettyClientBuilder = new NettyAsyncHttpClientBuilder();
nettyClientBuilder.proxy(proxyOptions);

final HttpClient nettyClient = nettyClientBuilder.build();
dataLakeServiceClientBuilder.httpClient(nettyClient);

return dataLakeServiceClientBuilder.buildClient();
}
}