-
Notifications
You must be signed in to change notification settings - Fork 25.2k
Add Hugging Face Rerank support #127966
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Add Hugging Face Rerank support #127966
Changes from 8 commits
4d9ec59
b58aab4
c567137
a4ebb87
5d316c1
f74e9e5
2ea07f0
0054891
82fd86d
733818c
cb67ac0
f97f818
88d6929
a52a1d8
2eae767
c8c74d6
ae1a1d2
1764a4d
7f30c6a
4ee7f1f
887389f
68755a6
43398ca
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -171,6 +171,20 @@ static String mockDenseServiceModelConfig() { | |
"""; | ||
} | ||
|
||
static String mockRerankServiceModelConfig() { | ||
return """ | ||
{ | ||
"service": "test_reranking_service", | ||
"service_settings": { | ||
"model_id": "my_model", | ||
"api_key": "abc64" | ||
}, | ||
"task_settings": { | ||
} | ||
} | ||
"""; | ||
} | ||
|
||
static void deleteModel(String modelId) throws IOException { | ||
var request = new Request("DELETE", "_inference/" + modelId); | ||
var response = client().performRequest(request); | ||
|
@@ -484,6 +498,10 @@ private String jsonBody(List<String> input, @Nullable String query) { | |
@SuppressWarnings("unchecked") | ||
protected void assertNonEmptyInferenceResults(Map<String, Object> resultMap, int expectedNumberOfResults, TaskType taskType) { | ||
switch (taskType) { | ||
case RERANK -> { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. It looks like this method is not called with TaskType.RERANK param anywhere. meaning assertion isn't triggered. |
||
var results = (List<Map<String, Object>>) resultMap.get(TaskType.RERANK.toString()); | ||
assertThat(results, hasSize(expectedNumberOfResults)); | ||
} | ||
case SPARSE_EMBEDDING -> { | ||
var results = (List<Map<String, Object>>) resultMap.get(TaskType.SPARSE_EMBEDDING.toString()); | ||
assertThat(results, hasSize(expectedNumberOfResults)); | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,67 @@ | ||
/* | ||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one | ||
* or more contributor license agreements. Licensed under the Elastic License | ||
* 2.0; you may not use this file except in compliance with the Elastic License | ||
* 2.0. | ||
*/ | ||
|
||
package org.elasticsearch.xpack.inference; | ||
|
||
import org.elasticsearch.inference.TaskType; | ||
|
||
import java.io.IOException; | ||
import java.util.List; | ||
import java.util.Map; | ||
|
||
public class MockRerankInferenceServiceIT extends InferenceBaseRestTest { | ||
|
||
@SuppressWarnings("unchecked") | ||
public void testMockService() throws IOException { | ||
String inferenceEntityId = "test-mock"; | ||
var putModel = putModel(inferenceEntityId, mockRerankServiceModelConfig(), TaskType.RERANK); | ||
var model = getModels(inferenceEntityId, TaskType.RERANK).get(0); | ||
|
||
for (var modelMap : List.of(putModel, model)) { | ||
assertEquals(inferenceEntityId, modelMap.get("inference_id")); | ||
assertEquals(TaskType.RERANK, TaskType.fromString((String) modelMap.get("task_type"))); | ||
assertEquals("test_reranking_service", modelMap.get("service")); | ||
} | ||
|
||
List<String> input = List.of(randomAlphaOfLength(10)); | ||
var inference = infer(inferenceEntityId, input); | ||
assertNonEmptyInferenceResults(inference, 1, TaskType.RERANK); | ||
assertEquals(inference, infer(inferenceEntityId, input)); | ||
assertNotEquals(inference, infer(inferenceEntityId, randomValueOtherThan(input, () -> List.of(randomAlphaOfLength(10))))); | ||
} | ||
|
||
public void testMockServiceWithMultipleInputs() throws IOException { | ||
String inferenceEntityId = "test-mock-with-multi-inputs"; | ||
putModel(inferenceEntityId, mockRerankServiceModelConfig(), TaskType.RERANK); | ||
var queryParams = Map.of("timeout", "120s"); | ||
|
||
var inference = infer( | ||
inferenceEntityId, | ||
TaskType.RERANK, | ||
List.of(randomAlphaOfLength(5), randomAlphaOfLength(10)), | ||
"What if?", | ||
queryParams | ||
); | ||
|
||
assertNonEmptyInferenceResults(inference, 2, TaskType.RERANK); | ||
} | ||
|
||
@SuppressWarnings("unchecked") | ||
public void testMockService_DoesNotReturnSecretsInGetResponse() throws IOException { | ||
String inferenceEntityId = "test-mock"; | ||
var putModel = putModel(inferenceEntityId, mockRerankServiceModelConfig(), TaskType.RERANK); | ||
var model = getModels(inferenceEntityId, TaskType.RERANK).get(0); | ||
|
||
var serviceSettings = (Map<String, Object>) model.get("service_settings"); | ||
assertNull(serviceSettings.get("api_key")); | ||
assertNotNull(serviceSettings.get("model_id")); | ||
|
||
var putServiceSettings = (Map<String, Object>) putModel.get("service_settings"); | ||
assertNull(putServiceSettings.get("api_key")); | ||
assertNotNull(putServiceSettings.get("model_id")); | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -26,6 +26,7 @@ | |
import org.elasticsearch.xpack.inference.services.ServiceUtils; | ||
import org.elasticsearch.xpack.inference.services.huggingface.action.HuggingFaceActionCreator; | ||
|
||
import java.util.Collections; | ||
import java.util.Map; | ||
|
||
import static org.elasticsearch.xpack.inference.services.ServiceUtils.createInvalidModelException; | ||
|
@@ -57,6 +58,11 @@ public void parseRequestConfig( | |
) { | ||
try { | ||
Map<String, Object> serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); | ||
Map<String, Object> taskSettingsMap = Collections.emptyMap(); | ||
|
||
if (TaskType.RERANK.equals(taskType)) { | ||
taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The task settings should be optional. I don't think we want to throw if the user does not specify any. In other services like cohere we default to an empty map like this:
Let's remove the if-block and use the |
||
} | ||
|
||
ChunkingSettings chunkingSettings = null; | ||
if (TaskType.TEXT_EMBEDDING.equals(taskType)) { | ||
|
@@ -66,17 +72,21 @@ public void parseRequestConfig( | |
} | ||
|
||
var model = createModel( | ||
inferenceEntityId, | ||
taskType, | ||
serviceSettingsMap, | ||
chunkingSettings, | ||
serviceSettingsMap, | ||
TaskType.unsupportedTaskTypeErrorMsg(taskType, name()), | ||
ConfigurationParseContext.REQUEST | ||
new HuggingFaceModelParameters( | ||
inferenceEntityId, | ||
taskType, | ||
serviceSettingsMap, | ||
taskSettingsMap, | ||
chunkingSettings, | ||
serviceSettingsMap, | ||
TaskType.unsupportedTaskTypeErrorMsg(taskType, name()), | ||
ConfigurationParseContext.REQUEST | ||
) | ||
); | ||
|
||
throwIfNotEmptyMap(config, name()); | ||
throwIfNotEmptyMap(serviceSettingsMap, name()); | ||
throwIfNotEmptyMap(taskSettingsMap, name()); | ||
|
||
parsedModelListener.onResponse(model); | ||
} catch (Exception e) { | ||
|
@@ -93,52 +103,60 @@ public HuggingFaceModel parsePersistedConfigWithSecrets( | |
) { | ||
Map<String, Object> serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); | ||
Map<String, Object> secretSettingsMap = removeFromMapOrThrowIfNull(secrets, ModelSecrets.SECRET_SETTINGS); | ||
Map<String, Object> taskSettingsMap = Collections.emptyMap(); | ||
|
||
if (TaskType.RERANK.equals(taskType)) { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Same comment as above, let's use |
||
taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); | ||
} | ||
|
||
ChunkingSettings chunkingSettings = null; | ||
if (TaskType.TEXT_EMBEDDING.equals(taskType)) { | ||
chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); | ||
} | ||
|
||
return createModel( | ||
inferenceEntityId, | ||
taskType, | ||
serviceSettingsMap, | ||
chunkingSettings, | ||
secretSettingsMap, | ||
parsePersistedConfigErrorMsg(inferenceEntityId, name()), | ||
ConfigurationParseContext.PERSISTENT | ||
new HuggingFaceModelParameters( | ||
inferenceEntityId, | ||
taskType, | ||
serviceSettingsMap, | ||
taskSettingsMap, | ||
chunkingSettings, | ||
secretSettingsMap, | ||
parsePersistedConfigErrorMsg(inferenceEntityId, name()), | ||
ConfigurationParseContext.PERSISTENT | ||
) | ||
); | ||
} | ||
|
||
@Override | ||
public HuggingFaceModel parsePersistedConfig(String inferenceEntityId, TaskType taskType, Map<String, Object> config) { | ||
Map<String, Object> serviceSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.SERVICE_SETTINGS); | ||
Map<String, Object> taskSettingsMap = Collections.emptyMap(); | ||
|
||
if (TaskType.RERANK.equals(taskType)) { | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Same comment as above let's use:
|
||
taskSettingsMap = removeFromMapOrThrowIfNull(config, ModelConfigurations.TASK_SETTINGS); | ||
} | ||
|
||
ChunkingSettings chunkingSettings = null; | ||
if (TaskType.TEXT_EMBEDDING.equals(taskType)) { | ||
chunkingSettings = ChunkingSettingsBuilder.fromMap(removeFromMap(config, ModelConfigurations.CHUNKING_SETTINGS)); | ||
} | ||
|
||
return createModel( | ||
inferenceEntityId, | ||
taskType, | ||
serviceSettingsMap, | ||
chunkingSettings, | ||
null, | ||
parsePersistedConfigErrorMsg(inferenceEntityId, name()), | ||
ConfigurationParseContext.PERSISTENT | ||
new HuggingFaceModelParameters( | ||
inferenceEntityId, | ||
taskType, | ||
serviceSettingsMap, | ||
taskSettingsMap, | ||
chunkingSettings, | ||
null, | ||
parsePersistedConfigErrorMsg(inferenceEntityId, name()), | ||
ConfigurationParseContext.PERSISTENT | ||
) | ||
); | ||
} | ||
|
||
protected abstract HuggingFaceModel createModel( | ||
String inferenceEntityId, | ||
TaskType taskType, | ||
Map<String, Object> serviceSettings, | ||
ChunkingSettings chunkingSettings, | ||
Map<String, Object> secretSettings, | ||
String failureMessage, | ||
ConfigurationParseContext context | ||
); | ||
protected abstract HuggingFaceModel createModel(HuggingFaceModelParameters input); | ||
|
||
@Override | ||
public void doInfer( | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I'm wondering if methods you've added to this class are actually used somewhere. Methods you've taken for reference are being called. The ones you've added - are not.
Uh oh!
There was an error while loading. Please reload this page.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Thanks for noticing. It's used now