Skip to content

Commit

Permalink
perf(docker): Fix for ConcurrentHashMap cast error. Added unit tests (#…
Browse files Browse the repository at this point in the history
…3942)

* perf(docker): Use parallel streams for caching docker images

* chore(docker): Using java syntax for map updates

* perf(docker): Only parallelize loop of tags

* chore(docker): Use ConcurrentMap instead of ConcurrentHashMap

* perf(docker): Fix for ConcurrentHashMap cast error. Added unit tests

* perf(docker): Use parallel streams for caching docker images
  • Loading branch information
german-muzquiz authored and ezimanyi committed Aug 15, 2019
1 parent 09e0c13 commit 011da56
Show file tree
Hide file tree
Showing 4 changed files with 140 additions and 19 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
package com.netflix.spinnaker.clouddriver.docker.registry.cache

import com.netflix.spinnaker.cats.cache.DefaultCacheData
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.ConcurrentMap

class DefaultCacheDataBuilder {
String id = ''
Expand All @@ -28,7 +30,7 @@ class DefaultCacheDataBuilder {
new DefaultCacheData(id, ttlSeconds, attributes, relationships)
}

public static Map<String, DefaultCacheDataBuilder> defaultCacheDataBuilderMap() {
return [:].withDefault { String id -> new DefaultCacheDataBuilder(id: id) }
public static ConcurrentMap<String, DefaultCacheDataBuilder> defaultCacheDataBuilderMap() {
return new ConcurrentHashMap<String, DefaultCacheDataBuilder>()
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import com.netflix.spinnaker.clouddriver.docker.registry.security.DockerRegistry
import groovy.util.logging.Slf4j
import retrofit.RetrofitError

import java.util.concurrent.ConcurrentMap
import java.util.concurrent.TimeUnit

import static java.util.Collections.unmodifiableSet
Expand Down Expand Up @@ -128,11 +129,11 @@ class DockerRegistryImageCachingAgent implements CachingAgent, AccountAware, Age
private CacheResult buildCacheResult(Map<String, Set<String>> tagMap) {
log.info("Describing items in ${agentType}")

Map<String, DefaultCacheDataBuilder> cachedTags = DefaultCacheDataBuilder.defaultCacheDataBuilderMap()
Map<String, DefaultCacheDataBuilder> cachedIds = DefaultCacheDataBuilder.defaultCacheDataBuilderMap()
ConcurrentMap<String, DefaultCacheDataBuilder> cachedTags = DefaultCacheDataBuilder.defaultCacheDataBuilderMap()
ConcurrentMap<String, DefaultCacheDataBuilder> cachedIds = DefaultCacheDataBuilder.defaultCacheDataBuilderMap()

tagMap.forEach { repository, tags ->
tags.forEach { tag ->
tags.parallelStream().forEach { tag ->
if (!tag) {
log.warn("Empty tag encountered for $accountName/$repository, not caching")
return
Expand All @@ -158,23 +159,26 @@ class DockerRegistryImageCachingAgent implements CachingAgent, AccountAware, Age
}
}
}
try {
creationDate = credentials.client.getCreationDate(repository, tag)
} catch (Exception e) {
log.warn("Unable to fetch tag creation date, reason: {} (tag: {}, repository: {})", e.message, tag, repository)
}

cachedTags[tagKey].with {
attributes.name = "${repository}:${tag}".toString()
attributes.account = accountName
attributes.digest = digest
attributes.date = creationDate
if (credentials.sortTagsByDate) {
try {
creationDate = credentials.client.getCreationDate(repository, tag)
} catch (Exception e) {
log.warn("Unable to fetch tag creation date, reason: {} (tag: {}, repository: {})", e.message, tag, repository)
}
}

cachedIds[imageIdKey].with {
attributes.tagKey = tagKey
attributes.account = accountName
}
def tagData = new DefaultCacheDataBuilder()
tagData.attributes.put("name", "${repository}:${tag}".toString())
tagData.attributes.put("account", accountName)
tagData.attributes.put("digest", digest)
tagData.attributes.put("date", creationDate)
cachedTags.put(tagKey, tagData)

def idData = new DefaultCacheDataBuilder()
idData.attributes.put("tagKey", tagKey)
idData.attributes.put("account", accountName)
cachedIds.put(imageIdKey, idData)
}

null
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,10 @@ class DockerRegistryCredentials {
return trackDigests
}

boolean getSortTagsByDate() {
return sortTagsByDate
}

List<String> getSkip(){
return skip
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,111 @@
/*
* Copyright 2019 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.netflix.spinnaker.clouddriver.docker.registry.provider.agent


import com.netflix.spinnaker.clouddriver.docker.registry.DockerRegistryCloudProvider
import com.netflix.spinnaker.clouddriver.docker.registry.api.v2.client.DockerRegistryClient
import com.netflix.spinnaker.clouddriver.docker.registry.api.v2.client.DockerRegistryTags
import com.netflix.spinnaker.clouddriver.docker.registry.security.DockerRegistryCredentials
import spock.lang.Specification

import java.time.Instant

class DockerRegistryImageCachingAgentTest extends Specification {

DockerRegistryImageCachingAgent agent
def credentials = Mock(DockerRegistryCredentials)
def provider = Mock(DockerRegistryCloudProvider)
def client = Mock(DockerRegistryClient)

def setup() {
credentials.client >> client
agent = new DockerRegistryImageCachingAgent(provider, "test-docker", credentials, 0, 1, 1, "test-registry")
}

def "tags loaded from docker registry should be cached"() {
given:
credentials.repositories >> repositories
def total = 0
for (def i = 0; i < repositories.size(); i++) {
client.getTags(repositories[i]) >> new DockerRegistryTags().tap {
name=repositories[i]
tags=repoTags[i]
}
total += repoTags[i].size()
}

when:
def cacheResult = agent.loadData(null)

then:
cacheResult.cacheResults.get("taggedImage").size() == total
cacheResult.cacheResults.get("imageId").size() == total
for (def i = 0; i < total; i++) {
def repoAndTag = cacheResult.cacheResults.get("taggedImage")[i].attributes.get("name").split(":")
repoTags[repositories.indexOf(repoAndTag[0])].contains(repoAndTag[1])
cacheResult.cacheResults.get("taggedImage")[i].attributes.get("digest") == null
cacheResult.cacheResults.get("taggedImage")[i].attributes.get("date") == null
}

where:
repositories | repoTags
["repo-1"] | [["tag-1", "tag-2"]]
["repo-1", "repo-2" ] | [["tag-1-1"], ["tag-2-1", "tag-2-2"]]
}

def "cached tags should include creation date"() {
given:
credentials.sortTagsByDate >> true
credentials.repositories >> ["repo-1"]
client.getTags("repo-1") >> new DockerRegistryTags().tap {
name="repo-1"
tags=["tag-1", "tag-2"]
}
client.getCreationDate(*_) >> Instant.EPOCH

when:
def cacheResult = agent.loadData(null)

then:
cacheResult.cacheResults.get("taggedImage").size() == 2
cacheResult.cacheResults.get("imageId").size() == 2
cacheResult.cacheResults.get("taggedImage")[0].attributes.get("date") == Instant.EPOCH
cacheResult.cacheResults.get("taggedImage")[1].attributes.get("date") == Instant.EPOCH
}

def "cached tags should include digest"() {
given:
credentials.trackDigests >> true
credentials.repositories >> ["repo-1"]
client.getTags("repo-1") >> new DockerRegistryTags().tap {
name="repo-1"
tags=["tag-1", "tag-2"]
}
client.getDigest(*_) >> "123"

when:
def cacheResult = agent.loadData(null)

then:
cacheResult.cacheResults.get("taggedImage").size() == 2
cacheResult.cacheResults.get("imageId").size() == 2
cacheResult.cacheResults.get("taggedImage")[0].attributes.get("digest") == "123"
cacheResult.cacheResults.get("taggedImage")[1].attributes.get("digest") == "123"
}

}

0 comments on commit 011da56

Please sign in to comment.