Skip to content

Commit

Permalink
[Bugfix] Better exception handling in search pipelines (#7735)
Browse files Browse the repository at this point in the history
* [Bugfix] Better exception handling in search pipelines

Thanks to @noCharger for reporting a failing negative test case.

Since we were rethrowing exceptions when resolving search pipelines and
processing search requests, that could end up killing the listener
thread.

Also, we want to make sure that any exception thrown from search
pipelines are wrapped in SearchPipelineProcessingException.

Signed-off-by: Michael Froh <froh@amazon.com>

* Add changelog entry and unit tests

Signed-off-by: Michael Froh <froh@amazon.com>

* Add check on error message for negative test

Signed-off-by: Michael Froh <froh@amazon.com>

* Fix misleading commment in test

Signed-off-by: Michael Froh <froh@amazon.com>

---------

Signed-off-by: Michael Froh <froh@amazon.com>
  • Loading branch information
msfroh committed May 25, 2023
1 parent 89edd55 commit cf02b96
Show file tree
Hide file tree
Showing 5 changed files with 97 additions and 5 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
- Add more index blocks check for resize APIs ([#6774](https://github.com/opensearch-project/OpenSearch/pull/6774))
- Replaces ZipInputStream with ZipFile to fix Zip Slip vulnerability ([#7230](https://github.com/opensearch-project/OpenSearch/pull/7230))
- Add missing validation/parsing of SearchBackpressureMode of SearchBackpressureSettings ([#7541](https://github.com/opensearch-project/OpenSearch/pull/7541))
- [Search Pipelines] Better exception handling in search pipelines ([#7735](https://github.com/opensearch-project/OpenSearch/pull/7735))

### Security

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,3 +122,27 @@ teardown:
index: test
body: { }
- match: { hits.total.value: 2 }
---
"Test invalid inline query":
- do:
catch: bad_request
search:
index: test
body: {
search_pipeline: {
"request_processors": [
{
"filter_query": {
"query": {
"woozlewuzzle": {
"field": "foo"
}
}
}
}
]
}
}
- match: { status: 400 }
- match: { error.type: "parsing_exception"}
- match: { error.reason: "unknown query [woozlewuzzle]"}
Original file line number Diff line number Diff line change
Expand Up @@ -401,7 +401,7 @@ private void executeRequest(
);
} catch (Exception e) {
originalListener.onFailure(e);
throw new RuntimeException(e);
return;
}

ActionListener<SearchSourceBuilder> rewriteListener = ActionListener.wrap(source -> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -400,8 +400,12 @@ public PipelinedRequest resolvePipeline(SearchRequest searchRequest) throws Exce
pipeline = pipelineHolder.pipeline;
}
}
SearchRequest transformedRequest = pipeline.transformRequest(searchRequest);
return new PipelinedRequest(pipeline, transformedRequest);
try {
SearchRequest transformedRequest = pipeline.transformRequest(searchRequest);
return new PipelinedRequest(pipeline, transformedRequest);
} catch (Exception e) {
throw new SearchPipelineProcessingException(e);
}
}

Map<String, Processor.Factory<SearchRequestProcessor>> getRequestProcessorFactories() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -680,12 +680,12 @@ public void testInlinePipeline() throws Exception {
requestProcessorConfig.put("scale", 2);
Map<String, Object> requestProcessorObject = new HashMap<>();
requestProcessorObject.put("scale_request_size", requestProcessorConfig);
pipelineSourceMap.put("request_processors", List.of(requestProcessorObject));
pipelineSourceMap.put(Pipeline.REQUEST_PROCESSORS_KEY, List.of(requestProcessorObject));
Map<String, Object> responseProcessorConfig = new HashMap<>();
responseProcessorConfig.put("score", 2);
Map<String, Object> responseProcessorObject = new HashMap<>();
responseProcessorObject.put("fixed_score", responseProcessorConfig);
pipelineSourceMap.put("response_processors", List.of(responseProcessorObject));
pipelineSourceMap.put(Pipeline.RESPONSE_PROCESSORS_KEY, List.of(responseProcessorObject));

SearchSourceBuilder sourceBuilder = SearchSourceBuilder.searchSource().size(100).searchPipelineSource(pipelineSourceMap);
SearchRequest searchRequest = new SearchRequest().source(sourceBuilder);
Expand Down Expand Up @@ -723,4 +723,67 @@ public void testInfo() {
assertTrue(info.containsProcessor(Pipeline.REQUEST_PROCESSORS_KEY, "scale_request_size"));
assertTrue(info.containsProcessor(Pipeline.RESPONSE_PROCESSORS_KEY, "fixed_score"));
}

public void testExceptionOnPipelineCreation() {
Map<String, Processor.Factory<SearchRequestProcessor>> badFactory = Map.of(
"bad_factory",
(pf, t, f, c) -> { throw new RuntimeException(); }
);
SearchPipelineService searchPipelineService = createWithProcessors(badFactory, Collections.emptyMap());

Map<String, Object> pipelineSourceMap = new HashMap<>();
pipelineSourceMap.put(Pipeline.REQUEST_PROCESSORS_KEY, List.of(Map.of("bad_factory", Collections.emptyMap())));

SearchSourceBuilder sourceBuilder = SearchSourceBuilder.searchSource().searchPipelineSource(pipelineSourceMap);
SearchRequest searchRequest = new SearchRequest().source(sourceBuilder);

// Exception thrown when creating the pipeline
expectThrows(SearchPipelineProcessingException.class, () -> searchPipelineService.resolvePipeline(searchRequest));

}

public void testExceptionOnRequestProcessing() {
SearchRequestProcessor throwingRequestProcessor = new FakeRequestProcessor("throwing_request", null, null, r -> {
throw new RuntimeException();
});
Map<String, Processor.Factory<SearchRequestProcessor>> throwingRequestProcessorFactory = Map.of(
"throwing_request",
(pf, t, f, c) -> throwingRequestProcessor
);

SearchPipelineService searchPipelineService = createWithProcessors(throwingRequestProcessorFactory, Collections.emptyMap());

Map<String, Object> pipelineSourceMap = new HashMap<>();
pipelineSourceMap.put(Pipeline.REQUEST_PROCESSORS_KEY, List.of(Map.of("throwing_request", Collections.emptyMap())));

SearchSourceBuilder sourceBuilder = SearchSourceBuilder.searchSource().searchPipelineSource(pipelineSourceMap);
SearchRequest searchRequest = new SearchRequest().source(sourceBuilder);

// Exception thrown when processing the request
expectThrows(SearchPipelineProcessingException.class, () -> searchPipelineService.resolvePipeline(searchRequest));
}

public void testExceptionOnResponseProcessing() throws Exception {
SearchResponseProcessor throwingResponseProcessor = new FakeResponseProcessor("throwing_response", null, null, r -> {
throw new RuntimeException();
});
Map<String, Processor.Factory<SearchResponseProcessor>> throwingResponseProcessorFactory = Map.of(
"throwing_response",
(pf, t, f, c) -> throwingResponseProcessor
);

SearchPipelineService searchPipelineService = createWithProcessors(Collections.emptyMap(), throwingResponseProcessorFactory);

Map<String, Object> pipelineSourceMap = new HashMap<>();
pipelineSourceMap.put(Pipeline.RESPONSE_PROCESSORS_KEY, List.of(Map.of("throwing_response", Collections.emptyMap())));

SearchSourceBuilder sourceBuilder = SearchSourceBuilder.searchSource().size(100).searchPipelineSource(pipelineSourceMap);
SearchRequest searchRequest = new SearchRequest().source(sourceBuilder);

PipelinedRequest pipelinedRequest = searchPipelineService.resolvePipeline(searchRequest);

SearchResponse response = new SearchResponse(null, null, 0, 0, 0, 0, null, null);
// Exception thrown when processing response
expectThrows(SearchPipelineProcessingException.class, () -> pipelinedRequest.transformResponse(response));
}
}

0 comments on commit cf02b96

Please sign in to comment.