Skip to content

Commit 3b0b8f0

Browse files
authored
Make search pipelines asynchronous (#10598) (#11010)
* Make search pipelines asynchronous If a search processor needs to make a call out to another service, we should not risk blocking on the transport thread. We should support async execution. Signed-off-by: Michael Froh <froh@amazon.com> * Compute pipelineStart before building request callback chain Also, IntelliJ suggested refactoring creation of the terminal request callback into a separate method since the existing method was really big. I liked that suggestion. Signed-off-by: Michael Froh <froh@amazon.com> * Rename async methods (put async at end) Signed-off-by: Michael Froh <froh@amazon.com> --------- Signed-off-by: Michael Froh <froh@amazon.com> (cherry picked from commit da011ba)
1 parent 53b3505 commit 3b0b8f0

File tree

8 files changed

+291
-124
lines changed

8 files changed

+291
-124
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
3535
- Force merge with `only_expunge_deletes` honors max segment size ([#10036](https://github.com/opensearch-project/OpenSearch/pull/10036))
3636
- Add the means to extract the contextual properties from HttpChannel, TcpCChannel and TrasportChannel without excessive typecasting ([#10562](https://github.com/opensearch-project/OpenSearch/pull/10562))
3737
- Backport the PR #9107 for updating CONCURRENT_SEGMENT_SEARCH_TARGET_MAX_SLICE_COUNT_KEY setting to a dynamic setting ([#10606](https://github.com/opensearch-project/OpenSearch/pull/10606))
38+
- Search pipelines now support asynchronous request and response processors to avoid blocking on a transport thread ([#10598](https://github.com/opensearch-project/OpenSearch/pull/10598))
3839
- [Remote Store] Add Remote Store backpressure rejection stats to `_nodes/stats` ([#10524](https://github.com/opensearch-project/OpenSearch/pull/10524))
3940
- [BUG] Fix java.lang.SecurityException in repository-gcs plugin ([#10642](https://github.com/opensearch-project/OpenSearch/pull/10642))
4041
- Add telemetry tracer/metric enable flag and integ test. ([#10395](https://github.com/opensearch-project/OpenSearch/pull/10395))

server/src/main/java/org/opensearch/action/search/TransportSearchAction.java

Lines changed: 38 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -506,24 +506,51 @@ private void executeRequest(
506506
ActionListener<SearchResponse> listener;
507507
try {
508508
searchRequest = searchPipelineService.resolvePipeline(originalSearchRequest);
509-
listener = ActionListener.wrap(
510-
r -> originalListener.onResponse(searchRequest.transformResponse(r)),
511-
originalListener::onFailure
512-
);
509+
listener = searchRequest.transformResponseListener(originalListener);
513510
} catch (Exception e) {
514511
originalListener.onFailure(e);
515512
return;
516513
}
517514

518-
if (searchQueryMetricsEnabled) {
519-
try {
520-
searchQueryCategorizer.categorize(searchRequest.source());
521-
} catch (Exception e) {
522-
logger.error("Error while trying to categorize the query.", e);
515+
ActionListener<SearchRequest> requestTransformListener = ActionListener.wrap(sr -> {
516+
if (searchQueryMetricsEnabled) {
517+
try {
518+
searchQueryCategorizer.categorize(sr.source());
519+
} catch (Exception e) {
520+
logger.error("Error while trying to categorize the query.", e);
521+
}
523522
}
524-
}
525523

526-
ActionListener<SearchSourceBuilder> rewriteListener = ActionListener.wrap(source -> {
524+
ActionListener<SearchSourceBuilder> rewriteListener = buildRewriteListener(
525+
sr,
526+
task,
527+
timeProvider,
528+
searchAsyncActionProvider,
529+
listener,
530+
searchRequestOperationsListener
531+
);
532+
if (sr.source() == null) {
533+
rewriteListener.onResponse(sr.source());
534+
} else {
535+
Rewriteable.rewriteAndFetch(
536+
sr.source(),
537+
searchService.getRewriteContext(timeProvider::getAbsoluteStartMillis),
538+
rewriteListener
539+
);
540+
}
541+
}, listener::onFailure);
542+
searchRequest.transformRequest(requestTransformListener);
543+
}
544+
545+
private ActionListener<SearchSourceBuilder> buildRewriteListener(
546+
SearchRequest searchRequest,
547+
Task task,
548+
SearchTimeProvider timeProvider,
549+
SearchAsyncActionProvider searchAsyncActionProvider,
550+
ActionListener<SearchResponse> listener,
551+
SearchRequestOperationsListener searchRequestOperationsListener
552+
) {
553+
return ActionListener.wrap(source -> {
527554
if (source != searchRequest.source()) {
528555
// only set it if it changed - we don't allow null values to be set but it might be already null. this way we catch
529556
// situations when source is rewritten to null due to a bug
@@ -634,15 +661,6 @@ private void executeRequest(
634661
}
635662
}
636663
}, listener::onFailure);
637-
if (searchRequest.source() == null) {
638-
rewriteListener.onResponse(searchRequest.source());
639-
} else {
640-
Rewriteable.rewriteAndFetch(
641-
searchRequest.source(),
642-
searchService.getRewriteContext(timeProvider::getAbsoluteStartMillis),
643-
rewriteListener
644-
);
645-
}
646664
}
647665

648666
static boolean shouldMinimizeRoundtrips(SearchRequest searchRequest) {

server/src/main/java/org/opensearch/search/pipeline/Pipeline.java

Lines changed: 127 additions & 79 deletions
Original file line numberDiff line numberDiff line change
@@ -16,11 +16,13 @@
1616
import org.opensearch.action.search.SearchResponse;
1717
import org.opensearch.common.Nullable;
1818
import org.opensearch.common.io.stream.BytesStreamOutput;
19+
import org.opensearch.core.action.ActionListener;
1920
import org.opensearch.core.common.io.stream.NamedWriteableAwareStreamInput;
2021
import org.opensearch.core.common.io.stream.NamedWriteableRegistry;
2122
import org.opensearch.core.common.io.stream.StreamInput;
2223
import org.opensearch.search.SearchPhaseResult;
2324

25+
import java.io.IOException;
2426
import java.util.Collections;
2527
import java.util.List;
2628
import java.util.concurrent.TimeUnit;
@@ -117,92 +119,138 @@ protected void afterResponseProcessor(Processor processor, long timeInNanos) {}
117119

118120
protected void onResponseProcessorFailed(Processor processor) {}
119121

120-
SearchRequest transformRequest(SearchRequest request) throws SearchPipelineProcessingException {
121-
if (searchRequestProcessors.isEmpty() == false) {
122-
long pipelineStart = relativeTimeSupplier.getAsLong();
123-
beforeTransformRequest();
124-
try {
125-
try (BytesStreamOutput bytesStreamOutput = new BytesStreamOutput()) {
126-
request.writeTo(bytesStreamOutput);
127-
try (StreamInput in = bytesStreamOutput.bytes().streamInput()) {
128-
try (StreamInput input = new NamedWriteableAwareStreamInput(in, namedWriteableRegistry)) {
129-
request = new SearchRequest(input);
130-
}
131-
}
132-
}
133-
for (SearchRequestProcessor processor : searchRequestProcessors) {
134-
beforeRequestProcessor(processor);
135-
long start = relativeTimeSupplier.getAsLong();
136-
try {
137-
request = processor.processRequest(request);
138-
} catch (Exception e) {
139-
onRequestProcessorFailed(processor);
140-
if (processor.isIgnoreFailure()) {
141-
logger.warn(
142-
"The exception from request processor ["
143-
+ processor.getType()
144-
+ "] in the search pipeline ["
145-
+ id
146-
+ "] was ignored",
147-
e
148-
);
149-
} else {
150-
throw e;
151-
}
152-
} finally {
153-
long took = TimeUnit.NANOSECONDS.toMillis(relativeTimeSupplier.getAsLong() - start);
154-
afterRequestProcessor(processor, took);
155-
}
122+
void transformRequest(SearchRequest request, ActionListener<SearchRequest> requestListener) throws SearchPipelineProcessingException {
123+
if (searchRequestProcessors.isEmpty()) {
124+
requestListener.onResponse(request);
125+
return;
126+
}
127+
128+
try (BytesStreamOutput bytesStreamOutput = new BytesStreamOutput()) {
129+
request.writeTo(bytesStreamOutput);
130+
try (StreamInput in = bytesStreamOutput.bytes().streamInput()) {
131+
try (StreamInput input = new NamedWriteableAwareStreamInput(in, namedWriteableRegistry)) {
132+
request = new SearchRequest(input);
156133
}
157-
} catch (Exception e) {
158-
onTransformRequestFailure();
159-
throw new SearchPipelineProcessingException(e);
160-
} finally {
161-
long took = TimeUnit.NANOSECONDS.toMillis(relativeTimeSupplier.getAsLong() - pipelineStart);
162-
afterTransformRequest(took);
163134
}
135+
} catch (IOException e) {
136+
requestListener.onFailure(new SearchPipelineProcessingException(e));
137+
return;
164138
}
165-
return request;
139+
140+
ActionListener<SearchRequest> finalListener = getTerminalSearchRequestActionListener(requestListener);
141+
142+
// Chain listeners back-to-front
143+
ActionListener<SearchRequest> currentListener = finalListener;
144+
for (int i = searchRequestProcessors.size() - 1; i >= 0; i--) {
145+
final ActionListener<SearchRequest> nextListener = currentListener;
146+
SearchRequestProcessor processor = searchRequestProcessors.get(i);
147+
currentListener = ActionListener.wrap(r -> {
148+
long start = relativeTimeSupplier.getAsLong();
149+
beforeRequestProcessor(processor);
150+
processor.processRequestAsync(r, ActionListener.wrap(rr -> {
151+
long took = TimeUnit.NANOSECONDS.toMillis(relativeTimeSupplier.getAsLong() - start);
152+
afterRequestProcessor(processor, took);
153+
nextListener.onResponse(rr);
154+
}, e -> {
155+
long took = TimeUnit.NANOSECONDS.toMillis(relativeTimeSupplier.getAsLong() - start);
156+
afterRequestProcessor(processor, took);
157+
onRequestProcessorFailed(processor);
158+
if (processor.isIgnoreFailure()) {
159+
logger.warn(
160+
"The exception from request processor ["
161+
+ processor.getType()
162+
+ "] in the search pipeline ["
163+
+ id
164+
+ "] was ignored",
165+
e
166+
);
167+
nextListener.onResponse(r);
168+
} else {
169+
nextListener.onFailure(new SearchPipelineProcessingException(e));
170+
}
171+
}));
172+
}, finalListener::onFailure);
173+
}
174+
175+
beforeTransformRequest();
176+
currentListener.onResponse(request);
166177
}
167178

168-
SearchResponse transformResponse(SearchRequest request, SearchResponse response) throws SearchPipelineProcessingException {
169-
if (searchResponseProcessors.isEmpty() == false) {
170-
long pipelineStart = relativeTimeSupplier.getAsLong();
171-
beforeTransformResponse();
172-
try {
173-
for (SearchResponseProcessor processor : searchResponseProcessors) {
174-
beforeResponseProcessor(processor);
175-
long start = relativeTimeSupplier.getAsLong();
176-
try {
177-
response = processor.processResponse(request, response);
178-
} catch (Exception e) {
179-
onResponseProcessorFailed(processor);
180-
if (processor.isIgnoreFailure()) {
181-
logger.warn(
182-
"The exception from response processor ["
183-
+ processor.getType()
184-
+ "] in the search pipeline ["
185-
+ id
186-
+ "] was ignored",
187-
e
188-
);
189-
} else {
190-
throw e;
191-
}
192-
} finally {
193-
long took = TimeUnit.NANOSECONDS.toMillis(relativeTimeSupplier.getAsLong() - start);
194-
afterResponseProcessor(processor, took);
179+
private ActionListener<SearchRequest> getTerminalSearchRequestActionListener(ActionListener<SearchRequest> requestListener) {
180+
final long pipelineStart = relativeTimeSupplier.getAsLong();
181+
182+
return ActionListener.wrap(r -> {
183+
long took = TimeUnit.NANOSECONDS.toMillis(relativeTimeSupplier.getAsLong() - pipelineStart);
184+
afterTransformRequest(took);
185+
requestListener.onResponse(new PipelinedRequest(this, r));
186+
}, e -> {
187+
long took = TimeUnit.NANOSECONDS.toMillis(relativeTimeSupplier.getAsLong() - pipelineStart);
188+
afterTransformRequest(took);
189+
onTransformRequestFailure();
190+
requestListener.onFailure(new SearchPipelineProcessingException(e));
191+
});
192+
}
193+
194+
ActionListener<SearchResponse> transformResponseListener(SearchRequest request, ActionListener<SearchResponse> responseListener) {
195+
if (searchResponseProcessors.isEmpty()) {
196+
// No response transformation necessary
197+
return responseListener;
198+
}
199+
200+
long[] pipelineStart = new long[1];
201+
202+
final ActionListener<SearchResponse> originalListener = responseListener;
203+
responseListener = ActionListener.wrap(r -> {
204+
long took = TimeUnit.NANOSECONDS.toMillis(relativeTimeSupplier.getAsLong() - pipelineStart[0]);
205+
afterTransformResponse(took);
206+
originalListener.onResponse(r);
207+
}, e -> {
208+
long took = TimeUnit.NANOSECONDS.toMillis(relativeTimeSupplier.getAsLong() - pipelineStart[0]);
209+
afterTransformResponse(took);
210+
onTransformResponseFailure();
211+
originalListener.onFailure(e);
212+
});
213+
ActionListener<SearchResponse> finalListener = responseListener; // Jump directly to this one on exception.
214+
215+
for (int i = searchResponseProcessors.size() - 1; i >= 0; i--) {
216+
final ActionListener<SearchResponse> currentFinalListener = responseListener;
217+
final SearchResponseProcessor processor = searchResponseProcessors.get(i);
218+
219+
responseListener = ActionListener.wrap(r -> {
220+
beforeResponseProcessor(processor);
221+
final long start = relativeTimeSupplier.getAsLong();
222+
processor.processResponseAsync(request, r, ActionListener.wrap(rr -> {
223+
long took = TimeUnit.NANOSECONDS.toMillis(relativeTimeSupplier.getAsLong() - start);
224+
afterResponseProcessor(processor, took);
225+
currentFinalListener.onResponse(rr);
226+
}, e -> {
227+
onResponseProcessorFailed(processor);
228+
long took = TimeUnit.NANOSECONDS.toMillis(relativeTimeSupplier.getAsLong() - start);
229+
afterResponseProcessor(processor, took);
230+
if (processor.isIgnoreFailure()) {
231+
logger.warn(
232+
"The exception from response processor ["
233+
+ processor.getType()
234+
+ "] in the search pipeline ["
235+
+ id
236+
+ "] was ignored",
237+
e
238+
);
239+
// Pass the previous response through to the next processor in the chain
240+
currentFinalListener.onResponse(r);
241+
} else {
242+
currentFinalListener.onFailure(new SearchPipelineProcessingException(e));
195243
}
196-
}
197-
} catch (Exception e) {
198-
onTransformResponseFailure();
199-
throw new SearchPipelineProcessingException(e);
200-
} finally {
201-
long took = TimeUnit.NANOSECONDS.toMillis(relativeTimeSupplier.getAsLong() - pipelineStart);
202-
afterTransformResponse(took);
203-
}
244+
}));
245+
}, finalListener::onFailure);
204246
}
205-
return response;
247+
final ActionListener<SearchResponse> chainListener = responseListener;
248+
return ActionListener.wrap(r -> {
249+
beforeTransformResponse();
250+
pipelineStart[0] = relativeTimeSupplier.getAsLong();
251+
chainListener.onResponse(r);
252+
}, originalListener::onFailure);
253+
206254
}
207255

208256
<Result extends SearchPhaseResult> void runSearchPhaseResultsTransformer(

server/src/main/java/org/opensearch/search/pipeline/PipelinedRequest.java

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212
import org.opensearch.action.search.SearchPhaseResults;
1313
import org.opensearch.action.search.SearchRequest;
1414
import org.opensearch.action.search.SearchResponse;
15+
import org.opensearch.core.action.ActionListener;
1516
import org.opensearch.search.SearchPhaseResult;
1617

1718
/**
@@ -27,8 +28,12 @@ public final class PipelinedRequest extends SearchRequest {
2728
this.pipeline = pipeline;
2829
}
2930

30-
public SearchResponse transformResponse(SearchResponse response) {
31-
return pipeline.transformResponse(this, response);
31+
public void transformRequest(ActionListener<SearchRequest> requestListener) {
32+
pipeline.transformRequest(this, requestListener);
33+
}
34+
35+
public ActionListener<SearchResponse> transformResponseListener(ActionListener<SearchResponse> responseListener) {
36+
return pipeline.transformResponseListener(this, responseListener);
3237
}
3338

3439
public <Result extends SearchPhaseResult> void transformSearchPhaseResults(

server/src/main/java/org/opensearch/search/pipeline/SearchPipelineService.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -408,8 +408,7 @@ public PipelinedRequest resolvePipeline(SearchRequest searchRequest) {
408408
pipeline = pipelineHolder.pipeline;
409409
}
410410
}
411-
SearchRequest transformedRequest = pipeline.transformRequest(searchRequest);
412-
return new PipelinedRequest(pipeline, transformedRequest);
411+
return new PipelinedRequest(pipeline, searchRequest);
413412
}
414413

415414
Map<String, Processor.Factory<SearchRequestProcessor>> getRequestProcessorFactories() {

server/src/main/java/org/opensearch/search/pipeline/SearchRequestProcessor.java

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,37 @@
99
package org.opensearch.search.pipeline;
1010

1111
import org.opensearch.action.search.SearchRequest;
12+
import org.opensearch.core.action.ActionListener;
1213

1314
/**
1415
* Interface for a search pipeline processor that modifies a search request.
1516
*/
1617
public interface SearchRequestProcessor extends Processor {
18+
19+
/**
20+
* Transform a {@link SearchRequest}. Executed on the coordinator node before any {@link org.opensearch.action.search.SearchPhase}
21+
* executes.
22+
* <p>
23+
* Implement this method if the processor makes no asynchronous calls.
24+
* @param request the executed {@link SearchRequest}
25+
* @return a new {@link SearchRequest} (or the input {@link SearchRequest} if no changes)
26+
* @throws Exception if an error occurs during processing
27+
*/
1728
SearchRequest processRequest(SearchRequest request) throws Exception;
29+
30+
/**
31+
* Transform a {@link SearchRequest}. Executed on the coordinator node before any {@link org.opensearch.action.search.SearchPhase}
32+
* executes.
33+
* <p>
34+
* Expert method: Implement this if the processor needs to make asynchronous calls. Otherwise, implement processRequest.
35+
* @param request the executed {@link SearchRequest}
36+
* @param requestListener callback to be invoked on successful processing or on failure
37+
*/
38+
default void processRequestAsync(SearchRequest request, ActionListener<SearchRequest> requestListener) {
39+
try {
40+
requestListener.onResponse(processRequest(request));
41+
} catch (Exception e) {
42+
requestListener.onFailure(e);
43+
}
44+
}
1845
}

0 commit comments

Comments
 (0)