Skip to content

Commit

Permalink
Adds some tests to DruidFilterInfo.
Browse files Browse the repository at this point in the history
--Also fixes a few edge cases that came up from the tests (we weren't
handling `null` correctly, and the `NotFilter` is not in fact a
MulticlauseFilter despite having a child).

--Also fixes a test that started failing `AsyncInvalidApiRequest`. It
was using the `GroovyTestUtils::compareJson` rather than
`GroovyTestUtils::compareErrorPayload` which does something very
similar, except it ignores the `requestId`. I don't know why this test
wasn't failing earlier.
  • Loading branch information
archolewa committed Dec 8, 2016
1 parent e846399 commit 8d5c155
Show file tree
Hide file tree
Showing 3 changed files with 46 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -3,13 +3,15 @@
package com.yahoo.bard.webservice.logging.blocks;

import com.yahoo.bard.webservice.druid.model.filter.MultiClauseFilter;
import com.yahoo.bard.webservice.druid.model.filter.NotFilter;
import com.yahoo.bard.webservice.logging.LogInfo;

import com.fasterxml.jackson.annotation.JsonAutoDetect;

import com.yahoo.bard.webservice.druid.model.filter.Filter;

import java.util.ArrayDeque;
import java.util.Collections;
import java.util.Deque;
import java.util.LinkedHashMap;
import java.util.Map;
Expand Down Expand Up @@ -41,6 +43,9 @@ public DruidFilterInfo(Filter filter) {
* @return A map containing a count of each type of filter
*/
private Map<String, Long> buildFilterCount(Filter filter) {
if (filter == null) {
return Collections.emptyMap();
}
Map<String, Long> filterTypeCounter = new LinkedHashMap<>();
Deque<Filter> filterStack = new ArrayDeque<>();
filterStack.add(filter);
Expand All @@ -52,6 +57,8 @@ private Map<String, Long> buildFilterCount(Filter filter) {
);
if (currentFilter instanceof MultiClauseFilter) {
filterStack.addAll(((MultiClauseFilter) currentFilter).getFields());
} else if (currentFilter instanceof NotFilter) {
filterStack.add(((NotFilter) currentFilter).getField());
}
}
return filterTypeCounter;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,8 @@ class AsyncInvalidApiRequest extends AsyncFunctionalSpec {

@Override
Map<String, Closure<Void>> getResultAssertions() {
[ data: { assert GroovyTestUtils.compareJson(it.readEntity(String), EXPECTED_ERROR_MESSAGE) } ]
[ data: { assert GroovyTestUtils.compareErrorPayload(it.readEntity(String), EXPECTED_ERROR_MESSAGE) } ]

}

@Override
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
package com.yahoo.bard.webservice.logging.blocks

import com.yahoo.bard.webservice.data.dimension.Dimension
import com.yahoo.bard.webservice.druid.model.filter.AndFilter
import com.yahoo.bard.webservice.druid.model.filter.Filter
import com.yahoo.bard.webservice.druid.model.filter.NotFilter
import com.yahoo.bard.webservice.druid.model.filter.OrFilter
import com.yahoo.bard.webservice.druid.model.filter.SelectorFilter

import spock.lang.Shared
import spock.lang.Specification
import spock.lang.Unroll


class DruidFilterInfoSpec extends Specification {

@Shared Filter selector = new SelectorFilter(Stub(Dimension), "value")

static final String SELECTOR_NAME = SelectorFilter.class.simpleName
static final String AND_NAME = AndFilter.class.simpleName
static final String OR_NAME = OrFilter.class.simpleName
static final String NOT_NAME = NotFilter.class.simpleName

@Unroll
def "The DruidFilterInfo correctly analyzes #filter"() {
expect:
new DruidFilterInfo(filter).numEachFilterType == expectedMap

where:
filter | expectedMap
null | [:]
selector | [(SELECTOR_NAME): 1L]
new AndFilter([selector, selector]) | [(AND_NAME): 1L, (SELECTOR_NAME): 2L]
new NotFilter(new AndFilter([selector, selector])) | [(NOT_NAME): 1L, (AND_NAME): 1L, (SELECTOR_NAME): 2L]
new OrFilter([new AndFilter([selector, selector]), new NotFilter(new AndFilter([selector, selector]))]) | [(OR_NAME): 1L, (AND_NAME): 2L, (NOT_NAME): 1L, (SELECTOR_NAME): 4L]
}
}

0 comments on commit 8d5c155

Please sign in to comment.