Skip to content

Commit ec7efe7

Browse files
committed
add feature flag for offline batch inference
Signed-off-by: Xun Zhang <xunzh@amazon.com>
1 parent 655be06 commit ec7efe7

11 files changed

+117
-11
lines changed

plugin/src/main/java/org/opensearch/ml/action/tasks/CancelBatchJobTransportAction.java

+10-3
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
import static org.opensearch.ml.common.CommonValue.ML_CONNECTOR_INDEX;
1010
import static org.opensearch.ml.common.CommonValue.ML_TASK_INDEX;
1111
import static org.opensearch.ml.common.connector.ConnectorAction.ActionType.CANCEL_BATCH_PREDICT;
12+
import static org.opensearch.ml.utils.MLExceptionUtils.BATCH_INFERENCE_DISABLED_ERR_MSG;
1213
import static org.opensearch.ml.utils.MLNodeUtils.createXContentParserFromRegistry;
1314

1415
import java.util.HashMap;
@@ -51,8 +52,8 @@
5152
import org.opensearch.ml.engine.algorithms.remote.RemoteConnectorExecutor;
5253
import org.opensearch.ml.engine.encryptor.EncryptorImpl;
5354
import org.opensearch.ml.helper.ConnectorAccessControlHelper;
54-
import org.opensearch.ml.model.MLModelCacheHelper;
5555
import org.opensearch.ml.model.MLModelManager;
56+
import org.opensearch.ml.settings.MLFeatureEnabledSetting;
5657
import org.opensearch.ml.task.MLTaskManager;
5758
import org.opensearch.script.ScriptService;
5859
import org.opensearch.tasks.Task;
@@ -74,7 +75,7 @@ public class CancelBatchJobTransportAction extends HandledTransportAction<Action
7475
MLModelManager mlModelManager;
7576

7677
MLTaskManager mlTaskManager;
77-
MLModelCacheHelper modelCacheHelper;
78+
private MLFeatureEnabledSetting mlFeatureEnabledSetting;
7879

7980
@Inject
8081
public CancelBatchJobTransportAction(
@@ -87,7 +88,8 @@ public CancelBatchJobTransportAction(
8788
ConnectorAccessControlHelper connectorAccessControlHelper,
8889
EncryptorImpl encryptor,
8990
MLTaskManager mlTaskManager,
90-
MLModelManager mlModelManager
91+
MLModelManager mlModelManager,
92+
MLFeatureEnabledSetting mlFeatureEnabledSetting
9193
) {
9294
super(MLCancelBatchJobAction.NAME, transportService, actionFilters, MLCancelBatchJobRequest::new);
9395
this.client = client;
@@ -98,6 +100,7 @@ public CancelBatchJobTransportAction(
98100
this.encryptor = encryptor;
99101
this.mlTaskManager = mlTaskManager;
100102
this.mlModelManager = mlModelManager;
103+
this.mlFeatureEnabledSetting = mlFeatureEnabledSetting;
101104
}
102105

103106
@Override
@@ -116,6 +119,10 @@ protected void doExecute(Task task, ActionRequest request, ActionListener<MLCanc
116119
MLTask mlTask = MLTask.parse(parser);
117120

118121
// check if function is remote and task is of type batch prediction
122+
if (mlTask.getTaskType() == MLTaskType.BATCH_PREDICTION
123+
&& !mlFeatureEnabledSetting.isOfflineBatchInferenceEnabled()) {
124+
throw new IllegalStateException(BATCH_INFERENCE_DISABLED_ERR_MSG);
125+
}
119126
if (mlTask.getTaskType() == MLTaskType.BATCH_PREDICTION && mlTask.getFunctionName() == FunctionName.REMOTE) {
120127
processRemoteBatchPrediction(mlTask, actionListener);
121128
} else {

plugin/src/main/java/org/opensearch/ml/action/tasks/GetTaskTransportAction.java

+9-2
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
import static org.opensearch.ml.settings.MLCommonsSettings.ML_COMMONS_REMOTE_JOB_STATUS_COMPLETED_REGEX;
2121
import static org.opensearch.ml.settings.MLCommonsSettings.ML_COMMONS_REMOTE_JOB_STATUS_EXPIRED_REGEX;
2222
import static org.opensearch.ml.settings.MLCommonsSettings.ML_COMMONS_REMOTE_JOB_STATUS_FIELD;
23+
import static org.opensearch.ml.utils.MLExceptionUtils.BATCH_INFERENCE_DISABLED_ERR_MSG;
2324
import static org.opensearch.ml.utils.MLExceptionUtils.logException;
2425
import static org.opensearch.ml.utils.MLNodeUtils.createXContentParserFromRegistry;
2526

@@ -68,8 +69,8 @@
6869
import org.opensearch.ml.engine.algorithms.remote.RemoteConnectorExecutor;
6970
import org.opensearch.ml.engine.encryptor.EncryptorImpl;
7071
import org.opensearch.ml.helper.ConnectorAccessControlHelper;
71-
import org.opensearch.ml.model.MLModelCacheHelper;
7272
import org.opensearch.ml.model.MLModelManager;
73+
import org.opensearch.ml.settings.MLFeatureEnabledSetting;
7374
import org.opensearch.ml.task.MLTaskManager;
7475
import org.opensearch.script.ScriptService;
7576
import org.opensearch.tasks.Task;
@@ -91,7 +92,7 @@ public class GetTaskTransportAction extends HandledTransportAction<ActionRequest
9192
MLModelManager mlModelManager;
9293

9394
MLTaskManager mlTaskManager;
94-
MLModelCacheHelper modelCacheHelper;
95+
private MLFeatureEnabledSetting mlFeatureEnabledSetting;
9596

9697
volatile List<String> remoteJobStatusFields;
9798
volatile Pattern remoteJobCompletedStatusRegexPattern;
@@ -111,6 +112,7 @@ public GetTaskTransportAction(
111112
EncryptorImpl encryptor,
112113
MLTaskManager mlTaskManager,
113114
MLModelManager mlModelManager,
115+
MLFeatureEnabledSetting mlFeatureEnabledSetting,
114116
Settings settings
115117
) {
116118
super(MLTaskGetAction.NAME, transportService, actionFilters, MLTaskGetRequest::new);
@@ -122,6 +124,7 @@ public GetTaskTransportAction(
122124
this.encryptor = encryptor;
123125
this.mlTaskManager = mlTaskManager;
124126
this.mlModelManager = mlModelManager;
127+
this.mlFeatureEnabledSetting = mlFeatureEnabledSetting;
125128

126129
remoteJobStatusFields = ML_COMMONS_REMOTE_JOB_STATUS_FIELD.get(settings);
127130
clusterService.getClusterSettings().addSettingsUpdateConsumer(ML_COMMONS_REMOTE_JOB_STATUS_FIELD, it -> remoteJobStatusFields = it);
@@ -178,6 +181,10 @@ protected void doExecute(Task task, ActionRequest request, ActionListener<MLTask
178181
MLTask mlTask = MLTask.parse(parser);
179182

180183
// check if function is remote and task is of type batch prediction
184+
if (mlTask.getTaskType() == MLTaskType.BATCH_PREDICTION
185+
&& !mlFeatureEnabledSetting.isOfflineBatchInferenceEnabled()) {
186+
throw new IllegalStateException(BATCH_INFERENCE_DISABLED_ERR_MSG);
187+
}
181188
if (mlTask.getTaskType() == MLTaskType.BATCH_PREDICTION && mlTask.getFunctionName() == FunctionName.REMOTE) {
182189
processRemoteBatchPrediction(mlTask, taskId, actionListener);
183190
} else {

plugin/src/main/java/org/opensearch/ml/plugin/MachineLearningPlugin.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -972,7 +972,8 @@ public List<Setting<?>> getSettings() {
972972
MLCommonsSettings.ML_COMMONS_REMOTE_JOB_STATUS_CANCELLING_REGEX,
973973
MLCommonsSettings.ML_COMMONS_REMOTE_JOB_STATUS_EXPIRED_REGEX,
974974
MLCommonsSettings.ML_COMMONS_CONTROLLER_ENABLED,
975-
MLCommonsSettings.ML_COMMONS_OFFLINE_BATCH_INGESTION_ENABLED
975+
MLCommonsSettings.ML_COMMONS_OFFLINE_BATCH_INGESTION_ENABLED,
976+
MLCommonsSettings.ML_COMMONS_OFFLINE_BATCH_INFERENCE_ENABLED
976977
);
977978
return settings;
978979
}

plugin/src/main/java/org/opensearch/ml/rest/RestMLGetTaskAction.java

+2-2
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@
2424
import com.google.common.collect.ImmutableList;
2525

2626
public class RestMLGetTaskAction extends BaseRestHandler {
27-
private static final String ML_GET_Task_ACTION = "ml_get_task_action";
27+
private static final String ML_GET_TASK_ACTION = "ml_get_task_action";
2828

2929
/**
3030
* Constructor
@@ -33,7 +33,7 @@ public RestMLGetTaskAction() {}
3333

3434
@Override
3535
public String getName() {
36-
return ML_GET_Task_ACTION;
36+
return ML_GET_TASK_ACTION;
3737
}
3838

3939
@Override

plugin/src/main/java/org/opensearch/ml/rest/RestMLPredictionAction.java

+3
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77

88
import static org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken;
99
import static org.opensearch.ml.plugin.MachineLearningPlugin.ML_BASE_URI;
10+
import static org.opensearch.ml.utils.MLExceptionUtils.BATCH_INFERENCE_DISABLED_ERR_MSG;
1011
import static org.opensearch.ml.utils.MLExceptionUtils.LOCAL_MODEL_DISABLED_ERR_MSG;
1112
import static org.opensearch.ml.utils.MLExceptionUtils.REMOTE_INFERENCE_DISABLED_ERR_MSG;
1213
import static org.opensearch.ml.utils.RestActionUtils.PARAMETER_ALGORITHM;
@@ -131,6 +132,8 @@ MLPredictionTaskRequest getRequest(String modelId, String algorithm, RestRequest
131132
throw new IllegalStateException(REMOTE_INFERENCE_DISABLED_ERR_MSG);
132133
} else if (FunctionName.isDLModel(FunctionName.from(algorithm.toUpperCase())) && !mlFeatureEnabledSetting.isLocalModelEnabled()) {
133134
throw new IllegalStateException(LOCAL_MODEL_DISABLED_ERR_MSG);
135+
} else if (ActionType.BATCH_PREDICT == actionType && !mlFeatureEnabledSetting.isOfflineBatchInferenceEnabled()) {
136+
throw new IllegalStateException(BATCH_INFERENCE_DISABLED_ERR_MSG);
134137
} else if (!ActionType.isValidActionInModelPrediction(actionType)) {
135138
throw new IllegalArgumentException("Wrong action type in the rest request path!");
136139
}

plugin/src/main/java/org/opensearch/ml/settings/MLCommonsSettings.java

+3
Original file line numberDiff line numberDiff line change
@@ -139,6 +139,9 @@ private MLCommonsSettings() {}
139139
public static final Setting<Boolean> ML_COMMONS_OFFLINE_BATCH_INGESTION_ENABLED = Setting
140140
.boolSetting("plugins.ml_commons.offline_batch_ingestion_enabled", true, Setting.Property.NodeScope, Setting.Property.Dynamic);
141141

142+
public static final Setting<Boolean> ML_COMMONS_OFFLINE_BATCH_INFERENCE_ENABLED = Setting
143+
.boolSetting("plugins.ml_commons.offline_batch_inference_enabled", true, Setting.Property.NodeScope, Setting.Property.Dynamic);
144+
142145
public static final Setting<List<String>> ML_COMMONS_TRUSTED_CONNECTOR_ENDPOINTS_REGEX = Setting
143146
.listSetting(
144147
"plugins.ml_commons.trusted_connector_endpoints_regex",

plugin/src/main/java/org/opensearch/ml/settings/MLFeatureEnabledSetting.java

+13
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
import static org.opensearch.ml.settings.MLCommonsSettings.ML_COMMONS_CONNECTOR_PRIVATE_IP_ENABLED;
1212
import static org.opensearch.ml.settings.MLCommonsSettings.ML_COMMONS_CONTROLLER_ENABLED;
1313
import static org.opensearch.ml.settings.MLCommonsSettings.ML_COMMONS_LOCAL_MODEL_ENABLED;
14+
import static org.opensearch.ml.settings.MLCommonsSettings.ML_COMMONS_OFFLINE_BATCH_INFERENCE_ENABLED;
1415
import static org.opensearch.ml.settings.MLCommonsSettings.ML_COMMONS_OFFLINE_BATCH_INGESTION_ENABLED;
1516
import static org.opensearch.ml.settings.MLCommonsSettings.ML_COMMONS_REMOTE_INFERENCE_ENABLED;
1617

@@ -29,6 +30,7 @@ public class MLFeatureEnabledSetting {
2930

3031
private volatile Boolean isControllerEnabled;
3132
private volatile Boolean isBatchIngestionEnabled;
33+
private volatile Boolean isBatchInferenceEnabled;
3234

3335
public MLFeatureEnabledSetting(ClusterService clusterService, Settings settings) {
3436
isRemoteInferenceEnabled = ML_COMMONS_REMOTE_INFERENCE_ENABLED.get(settings);
@@ -37,6 +39,7 @@ public MLFeatureEnabledSetting(ClusterService clusterService, Settings settings)
3739
isConnectorPrivateIpEnabled = new AtomicBoolean(ML_COMMONS_CONNECTOR_PRIVATE_IP_ENABLED.get(settings));
3840
isControllerEnabled = ML_COMMONS_CONTROLLER_ENABLED.get(settings);
3941
isBatchIngestionEnabled = ML_COMMONS_OFFLINE_BATCH_INGESTION_ENABLED.get(settings);
42+
isBatchInferenceEnabled = ML_COMMONS_OFFLINE_BATCH_INFERENCE_ENABLED.get(settings);
4043

4144
clusterService
4245
.getClusterSettings()
@@ -52,6 +55,9 @@ public MLFeatureEnabledSetting(ClusterService clusterService, Settings settings)
5255
clusterService
5356
.getClusterSettings()
5457
.addSettingsUpdateConsumer(ML_COMMONS_OFFLINE_BATCH_INGESTION_ENABLED, it -> isBatchIngestionEnabled = it);
58+
clusterService
59+
.getClusterSettings()
60+
.addSettingsUpdateConsumer(ML_COMMONS_OFFLINE_BATCH_INFERENCE_ENABLED, it -> isBatchInferenceEnabled = it);
5561
}
5662

5763
/**
@@ -98,4 +104,11 @@ public Boolean isOfflineBatchIngestionEnabled() {
98104
return isBatchIngestionEnabled;
99105
}
100106

107+
/**
108+
* Whether the offline batch inference is enabled. If disabled, APIs in ml-commons will block offline batch inference.
109+
* @return whether the feature is enabled.
110+
*/
111+
public Boolean isOfflineBatchInferenceEnabled() {
112+
return isBatchInferenceEnabled;
113+
}
101114
}

plugin/src/main/java/org/opensearch/ml/utils/MLExceptionUtils.java

+2
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,8 @@ public class MLExceptionUtils {
2222
"Remote Inference is currently disabled. To enable it, update the setting \"plugins.ml_commons.remote_inference_enabled\" to true.";
2323
public static final String LOCAL_MODEL_DISABLED_ERR_MSG =
2424
"Local Model is currently disabled. To enable it, update the setting \"plugins.ml_commons.local_model.enabled\" to true.";
25+
public static final String BATCH_INFERENCE_DISABLED_ERR_MSG =
26+
"Offline Batch Inference is currently disabled. To enable it, update the setting \"plugins.ml_commons.offline_batch_inference_enabled\" to true.";
2527
public static final String AGENT_FRAMEWORK_DISABLED_ERR_MSG =
2628
"Agent Framework is currently disabled. To enable it, update the setting \"plugins.ml_commons.agent_framework_enabled\" to true.";
2729
public static final String CONTROLLER_DISABLED_ERR_MSG =

plugin/src/test/java/org/opensearch/ml/action/tasks/CancelBatchJobTransportActionTests.java

+29-2
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,7 @@
6161
import org.opensearch.ml.engine.encryptor.EncryptorImpl;
6262
import org.opensearch.ml.helper.ConnectorAccessControlHelper;
6363
import org.opensearch.ml.model.MLModelManager;
64+
import org.opensearch.ml.settings.MLFeatureEnabledSetting;
6465
import org.opensearch.ml.task.MLTaskManager;
6566
import org.opensearch.script.ScriptService;
6667
import org.opensearch.test.OpenSearchTestCase;
@@ -106,6 +107,9 @@ public class CancelBatchJobTransportActionTests extends OpenSearchTestCase {
106107
@Mock
107108
private MLTaskManager mlTaskManager;
108109

110+
@Mock
111+
private MLFeatureEnabledSetting mlFeatureEnabledSetting;
112+
109113
@Rule
110114
public ExpectedException exceptionRule = ExpectedException.none();
111115

@@ -139,7 +143,8 @@ public void setup() throws IOException {
139143
connectorAccessControlHelper,
140144
encryptor,
141145
mlTaskManager,
142-
mlModelManager
146+
mlModelManager,
147+
mlFeatureEnabledSetting
143148
)
144149
);
145150

@@ -182,7 +187,7 @@ public void setup() throws IOException {
182187
listener.onResponse(connector);
183188
return null;
184189
}).when(connectorAccessControlHelper).getConnector(eq(client), anyString(), any());
185-
190+
when(mlFeatureEnabledSetting.isOfflineBatchInferenceEnabled()).thenReturn(true);
186191
}
187192

188193
public void testGetTask_NullResponse() {
@@ -221,6 +226,28 @@ public void testGetTask_IndexNotFoundException() {
221226
assertEquals("Fail to find task", argumentCaptor.getValue().getMessage());
222227
}
223228

229+
public void testGetTask_FeatureFlagDisabled() throws IOException {
230+
Map<String, Object> remoteJob = new HashMap<>();
231+
remoteJob.put("Status", "IN PROGRESS");
232+
remoteJob.put("TransformJobName", "SM-offline-batch-transform13");
233+
234+
GetResponse getResponse = prepareMLTask(FunctionName.REMOTE, MLTaskType.BATCH_PREDICTION, remoteJob);
235+
236+
doAnswer(invocation -> {
237+
ActionListener<GetResponse> actionListener = invocation.getArgument(1);
238+
actionListener.onResponse(getResponse);
239+
return null;
240+
}).when(client).get(any(), any());
241+
when(mlFeatureEnabledSetting.isOfflineBatchInferenceEnabled()).thenReturn(false);
242+
cancelBatchJobTransportAction.doExecute(null, mlCancelBatchJobRequest, actionListener);
243+
ArgumentCaptor<IllegalStateException> argumentCaptor = ArgumentCaptor.forClass(IllegalStateException.class);
244+
verify(actionListener).onFailure(argumentCaptor.capture());
245+
assertEquals(
246+
"Offline Batch Inference is currently disabled. To enable it, update the setting \"plugins.ml_commons.offline_batch_inference_enabled\" to true.",
247+
argumentCaptor.getValue().getMessage()
248+
);
249+
}
250+
224251
@Ignore
225252
public void testGetTask_SuccessBatchPredictCancel() throws IOException {
226253
Map<String, Object> remoteJob = new HashMap<>();

plugin/src/test/java/org/opensearch/ml/action/tasks/GetTaskTransportActionTests.java

+31-1
Original file line numberDiff line numberDiff line change
@@ -71,6 +71,7 @@
7171
import org.opensearch.ml.engine.encryptor.EncryptorImpl;
7272
import org.opensearch.ml.helper.ConnectorAccessControlHelper;
7373
import org.opensearch.ml.model.MLModelManager;
74+
import org.opensearch.ml.settings.MLFeatureEnabledSetting;
7475
import org.opensearch.ml.task.MLTaskManager;
7576
import org.opensearch.script.ScriptService;
7677
import org.opensearch.test.OpenSearchTestCase;
@@ -116,6 +117,9 @@ public class GetTaskTransportActionTests extends OpenSearchTestCase {
116117
@Mock
117118
private MLTaskManager mlTaskManager;
118119

120+
@Mock
121+
private MLFeatureEnabledSetting mlFeatureEnabledSetting;
122+
119123
@Rule
120124
public ExpectedException exceptionRule = ExpectedException.none();
121125

@@ -172,6 +176,7 @@ public void setup() throws IOException {
172176
encryptor,
173177
mlTaskManager,
174178
mlModelManager,
179+
mlFeatureEnabledSetting,
175180
settings
176181
)
177182
);
@@ -215,7 +220,7 @@ public void setup() throws IOException {
215220
listener.onResponse(connector);
216221
return null;
217222
}).when(connectorAccessControlHelper).getConnector(eq(client), anyString(), any());
218-
223+
when(mlFeatureEnabledSetting.isOfflineBatchInferenceEnabled()).thenReturn(true);
219224
}
220225

221226
public void testGetTask_NullResponse() {
@@ -299,6 +304,31 @@ public void test_BatchPredictStatus_NoConnector() throws IOException {
299304
assertEquals("You don't have permission to access this connector", argumentCaptor.getValue().getMessage());
300305
}
301306

307+
public void test_BatchPredictStatus_FeatureFlagDisabled() throws IOException {
308+
Map<String, Object> remoteJob = new HashMap<>();
309+
remoteJob.put("Status", "IN PROGRESS");
310+
remoteJob.put("TransformJobName", "SM-offline-batch-transform13");
311+
312+
when(connectorAccessControlHelper.validateConnectorAccess(eq(client), any())).thenReturn(false);
313+
314+
GetResponse getResponse = prepareMLTask(FunctionName.REMOTE, MLTaskType.BATCH_PREDICTION, remoteJob);
315+
316+
doAnswer(invocation -> {
317+
ActionListener<GetResponse> actionListener = invocation.getArgument(1);
318+
actionListener.onResponse(getResponse);
319+
return null;
320+
}).when(client).get(any(), any());
321+
when(mlFeatureEnabledSetting.isOfflineBatchInferenceEnabled()).thenReturn(false);
322+
323+
getTaskTransportAction.doExecute(null, mlTaskGetRequest, actionListener);
324+
ArgumentCaptor<IllegalStateException> argumentCaptor = ArgumentCaptor.forClass(IllegalStateException.class);
325+
verify(actionListener).onFailure(argumentCaptor.capture());
326+
assertEquals(
327+
"Offline Batch Inference is currently disabled. To enable it, update the setting \"plugins.ml_commons.offline_batch_inference_enabled\" to true.",
328+
argumentCaptor.getValue().getMessage()
329+
);
330+
}
331+
302332
public void test_BatchPredictStatus_NoAccessToConnector() throws IOException {
303333
Map<String, Object> remoteJob = new HashMap<>();
304334
remoteJob.put("Status", "IN PROGRESS");

plugin/src/test/java/org/opensearch/ml/rest/RestMLPredictionActionTests.java

+13
Original file line numberDiff line numberDiff line change
@@ -157,13 +157,26 @@ public void testPrepareRequest() throws Exception {
157157

158158
public void testPrepareBatchRequest() throws Exception {
159159
RestRequest request = getBatchRestRequest();
160+
when(mlFeatureEnabledSetting.isOfflineBatchInferenceEnabled()).thenReturn(true);
160161
restMLPredictionAction.handleRequest(request, channel, client);
161162
ArgumentCaptor<MLPredictionTaskRequest> argumentCaptor = ArgumentCaptor.forClass(MLPredictionTaskRequest.class);
162163
verify(client, times(1)).execute(eq(MLPredictionTaskAction.INSTANCE), argumentCaptor.capture(), any());
163164
MLInput mlInput = argumentCaptor.getValue().getMlInput();
164165
verifyParsedBatchMLInput(mlInput);
165166
}
166167

168+
public void testPrepareBatchRequest_FeatureFlagDisabled() throws Exception {
169+
thrown.expect(IllegalStateException.class);
170+
thrown
171+
.expectMessage(
172+
"Offline Batch Inference is currently disabled. To enable it, update the setting \"plugins.ml_commons.offline_batch_inference_enabled\" to true."
173+
);
174+
175+
RestRequest request = getBatchRestRequest();
176+
when(mlFeatureEnabledSetting.isOfflineBatchInferenceEnabled()).thenReturn(false);
177+
restMLPredictionAction.handleRequest(request, channel, client);
178+
}
179+
167180
public void testPrepareBatchRequest_WrongActionType() throws Exception {
168181
thrown.expect(IllegalArgumentException.class);
169182
thrown.expectMessage("Wrong Action Type");

0 commit comments

Comments
 (0)