From 853efd6be34f981740dec99dd649754eb085de83 Mon Sep 17 00:00:00 2001 From: Mingshi Liu Date: Fri, 13 Sep 2024 11:15:32 -0700 Subject: [PATCH] fix full_response false and no mapping exceptions (#2944) Signed-off-by: Mingshi Liu --- .../MLInferenceSearchResponseProcessor.java | 2 +- ...InferenceSearchResponseProcessorTests.java | 213 ++++++++++++++++-- 2 files changed, 194 insertions(+), 21 deletions(-) diff --git a/plugin/src/main/java/org/opensearch/ml/processor/MLInferenceSearchResponseProcessor.java b/plugin/src/main/java/org/opensearch/ml/processor/MLInferenceSearchResponseProcessor.java index 38e62528f3..2164877b9f 100644 --- a/plugin/src/main/java/org/opensearch/ml/processor/MLInferenceSearchResponseProcessor.java +++ b/plugin/src/main/java/org/opensearch/ml/processor/MLInferenceSearchResponseProcessor.java @@ -634,7 +634,7 @@ private static Map getDefaultOutputMapping(Integer mappingIndex, Map outputMapping; if (processOutputMap == null || processOutputMap.size() == 0) { outputMapping = new HashMap<>(); - outputMapping.put(DEFAULT_OUTPUT_FIELD_NAME, "$." + DEFAULT_OUTPUT_FIELD_NAME); + outputMapping.put(DEFAULT_OUTPUT_FIELD_NAME, null); } else { outputMapping = processOutputMap.get(mappingIndex); } diff --git a/plugin/src/test/java/org/opensearch/ml/processor/MLInferenceSearchResponseProcessorTests.java b/plugin/src/test/java/org/opensearch/ml/processor/MLInferenceSearchResponseProcessorTests.java index 62b397f84b..8f04cab9d4 100644 --- a/plugin/src/test/java/org/opensearch/ml/processor/MLInferenceSearchResponseProcessorTests.java +++ b/plugin/src/test/java/org/opensearch/ml/processor/MLInferenceSearchResponseProcessorTests.java @@ -351,6 +351,158 @@ public void onFailure(Exception e) { verify(client, times(1)).execute(any(), any(), any()); } + /** + * Tests create processor with one_to_one is false + * with custom prompt + * with many to one prediction, 5 documents in hits are calling 1 prediction tasks + * with full response path false and no output mapping is provided + * @throws Exception if an error occurs during the test + */ + public void testProcessResponseManyToOneWithCustomPromptFullResponsePathFalse() throws Exception { + + String documentField = "text"; + String modelInputField = "context"; + List> inputMap = new ArrayList<>(); + Map input = new HashMap<>(); + input.put(modelInputField, documentField); + inputMap.add(input); + + Map modelConfig = new HashMap<>(); + modelConfig + .put( + "prompt", + "\\n\\nHuman: You are a professional data analyst. You will always answer question based on the given context first. If the answer is not directly shown in the context, you will analyze the data and find the answer. If you don't know the answer, just say I don't know. Context: ${parameters.context}. \\n\\n Human: please summarize the documents \\n\\n Assistant:" + ); + MLInferenceSearchResponseProcessor responseProcessor = new MLInferenceSearchResponseProcessor( + "model1", + inputMap, + null, + modelConfig, + DEFAULT_MAX_PREDICTION_TASKS, + PROCESSOR_TAG, + DESCRIPTION, + false, + "remote", + false, + false, + false, + "{ \"parameters\": ${ml_inference.parameters} }", + client, + TEST_XCONTENT_REGISTRY_FOR_QUERY, + false + ); + + SearchRequest request = getSearchRequest(); + String fieldName = "text"; + SearchResponse response = getSearchResponse(5, true, fieldName); + Map predictionResult = ImmutableMap.of("response", "here is a summary of the documents"); + + ModelTensor modelTensor = ModelTensor.builder().dataAsMap(predictionResult).build(); + ModelTensors modelTensors = ModelTensors.builder().mlModelTensors(Arrays.asList(modelTensor)).build(); + ModelTensorOutput mlModelTensorOutput = ModelTensorOutput.builder().mlModelOutputs(Arrays.asList(modelTensors)).build(); + + doAnswer(invocation -> { + ActionListener actionListener = invocation.getArgument(2); + actionListener.onResponse(MLTaskResponse.builder().output(mlModelTensorOutput).build()); + return null; + }).when(client).execute(any(), any(), any()); + + ActionListener listener = new ActionListener<>() { + @Override + public void onResponse(SearchResponse newSearchResponse) { + assertEquals(newSearchResponse.getHits().getHits().length, 5); + assertEquals(newSearchResponse.getHits().getHits()[0].getSourceAsMap().get("inference_results"), predictionResult); + assertEquals(newSearchResponse.getHits().getHits()[1].getSourceAsMap().get("inference_results"), predictionResult); + assertEquals(newSearchResponse.getHits().getHits()[2].getSourceAsMap().get("inference_results"), predictionResult); + assertEquals(newSearchResponse.getHits().getHits()[3].getSourceAsMap().get("inference_results"), predictionResult); + } + + @Override + public void onFailure(Exception e) { + throw new RuntimeException(e); + } + + }; + responseProcessor.processResponseAsync(request, response, responseContext, listener); + verify(client, times(1)).execute(any(), any(), any()); + } + + /** + * Tests create processor with one_to_one is false + * with custom prompt + * with many to one prediction, 5 documents in hits are calling 1 prediction tasks + * with full response path true and no output mapping is provided + * @throws Exception if an error occurs during the test + */ + public void testProcessResponseManyToOneWithCustomPromptFullResponsePathTrue() throws Exception { + + String documentField = "text"; + String modelInputField = "context"; + List> inputMap = new ArrayList<>(); + Map input = new HashMap<>(); + input.put(modelInputField, documentField); + inputMap.add(input); + + Map modelConfig = new HashMap<>(); + modelConfig + .put( + "prompt", + "\\n\\nHuman: You are a professional data analyst. You will always answer question based on the given context first. If the answer is not directly shown in the context, you will analyze the data and find the answer. If you don't know the answer, just say I don't know. Context: ${parameters.context}. \\n\\n Human: please summarize the documents \\n\\n Assistant:" + ); + MLInferenceSearchResponseProcessor responseProcessor = new MLInferenceSearchResponseProcessor( + "model1", + inputMap, + null, + modelConfig, + DEFAULT_MAX_PREDICTION_TASKS, + PROCESSOR_TAG, + DESCRIPTION, + false, + "remote", + true, + false, + false, + "{ \"parameters\": ${ml_inference.parameters} }", + client, + TEST_XCONTENT_REGISTRY_FOR_QUERY, + false + ); + + SearchRequest request = getSearchRequest(); + String fieldName = "text"; + SearchResponse response = getSearchResponse(5, true, fieldName); + Map predictionResult = ImmutableMap.of("response", "here is a summary of the documents"); + ModelTensor modelTensor = ModelTensor.builder().dataAsMap(predictionResult).build(); + ModelTensors modelTensors = ModelTensors.builder().mlModelTensors(Arrays.asList(modelTensor)).build(); + ModelTensorOutput mlModelTensorOutput = ModelTensorOutput.builder().mlModelOutputs(Arrays.asList(modelTensors)).build(); + Map fullPredictionResult = generateInferenceResult("here is a summary of the documents"); + + doAnswer(invocation -> { + ActionListener actionListener = invocation.getArgument(2); + actionListener.onResponse(MLTaskResponse.builder().output(mlModelTensorOutput).build()); + return null; + }).when(client).execute(any(), any(), any()); + + ActionListener listener = new ActionListener<>() { + @Override + public void onResponse(SearchResponse newSearchResponse) { + assertEquals(newSearchResponse.getHits().getHits().length, 5); + assertEquals(newSearchResponse.getHits().getHits()[0].getSourceAsMap().get("inference_results"), fullPredictionResult); + assertEquals(newSearchResponse.getHits().getHits()[1].getSourceAsMap().get("inference_results"), fullPredictionResult); + assertEquals(newSearchResponse.getHits().getHits()[2].getSourceAsMap().get("inference_results"), fullPredictionResult); + assertEquals(newSearchResponse.getHits().getHits()[3].getSourceAsMap().get("inference_results"), fullPredictionResult); + } + + @Override + public void onFailure(Exception e) { + throw new RuntimeException(e); + } + + }; + responseProcessor.processResponseAsync(request, response, responseContext, listener); + verify(client, times(1)).execute(any(), any(), any()); + } + /** * Tests create processor with one_to_one is true * with no mapping provided @@ -401,23 +553,23 @@ public void onResponse(SearchResponse newSearchResponse) { assertEquals(newSearchResponse.getHits().getHits().length, 5); assertEquals( newSearchResponse.getHits().getHits()[0].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); assertEquals( newSearchResponse.getHits().getHits()[1].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); assertEquals( newSearchResponse.getHits().getHits()[2].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); assertEquals( newSearchResponse.getHits().getHits()[3].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); assertEquals( newSearchResponse.getHits().getHits()[4].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); } @@ -482,23 +634,23 @@ public void onResponse(SearchResponse newSearchResponse) { assertEquals(newSearchResponse.getHits().getHits().length, 5); assertEquals( newSearchResponse.getHits().getHits()[0].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); assertEquals( newSearchResponse.getHits().getHits()[1].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); assertEquals( newSearchResponse.getHits().getHits()[2].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); assertEquals( newSearchResponse.getHits().getHits()[3].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); assertEquals( newSearchResponse.getHits().getHits()[4].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); } @@ -1893,23 +2045,23 @@ public void onResponse(SearchResponse newSearchResponse) { assertEquals(newSearchResponse.getHits().getHits().length, 5); assertEquals( newSearchResponse.getHits().getHits()[0].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); assertEquals( newSearchResponse.getHits().getHits()[1].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); assertEquals( newSearchResponse.getHits().getHits()[2].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); assertEquals( newSearchResponse.getHits().getHits()[3].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); assertEquals( newSearchResponse.getHits().getHits()[4].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); } @@ -1973,23 +2125,23 @@ public void onResponse(SearchResponse newSearchResponse) { assertEquals(newSearchResponse.getHits().getHits().length, 5); assertEquals( newSearchResponse.getHits().getHits()[0].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); assertEquals( newSearchResponse.getHits().getHits()[1].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); assertEquals( newSearchResponse.getHits().getHits()[2].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); assertEquals( newSearchResponse.getHits().getHits()[3].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); assertEquals( newSearchResponse.getHits().getHits()[4].getSourceAsMap().get(DEFAULT_OUTPUT_FIELD_NAME).toString(), - "[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]" + "{inference_results=[{output=[{dataAsMap={response=[0.0, 1.0, 2.0, 3.0, 4.0]}}]}]}" ); } @@ -3054,6 +3206,27 @@ private static SearchRequest getSearchRequest() { return request; } + private static Map generateInferenceResult(String response) { + Map inferenceResult = new HashMap<>(); + List> inferenceResults = new ArrayList<>(); + + Map outputMap = new HashMap<>(); + List> outputs = new ArrayList<>(); + + Map responseOutput = new HashMap<>(); + Map dataAsMap = new HashMap<>(); + dataAsMap.put("response", response); + responseOutput.put("dataAsMap", dataAsMap); + + outputs.add(responseOutput); + outputMap.put("output", outputs); + + inferenceResults.add(outputMap); + inferenceResult.put("inference_results", inferenceResults); + + return inferenceResult; + } + /** * Helper method to create an instance of the MLInferenceSearchResponseProcessor with the specified parameters in * single pair of input and output mapping.