|
8 | 8 | import java.io.IOException;
|
9 | 9 | import java.util.List;
|
10 | 10 | import java.util.Map;
|
11 |
| -import java.util.concurrent.TimeUnit; |
12 | 11 | import java.util.function.Consumer;
|
13 | 12 |
|
14 | 13 | import org.apache.commons.lang3.exception.ExceptionUtils;
|
@@ -216,49 +215,49 @@ public void testDeployRemoteModel() throws IOException, InterruptedException {
|
216 | 215 | waitForTask(taskId, MLTaskState.COMPLETED);
|
217 | 216 | }
|
218 | 217 |
|
219 |
| -// public void testPredictWithAutoDeployAndTTL_RemoteModel() throws IOException, InterruptedException { |
220 |
| -// // Skip test if key is null |
221 |
| -// if (OPENAI_KEY == null) { |
222 |
| -// System.out.println("OPENAI_KEY is null"); |
223 |
| -// return; |
224 |
| -// } |
225 |
| -// Response updateCBSettingResponse = TestHelper |
226 |
| -// .makeRequest( |
227 |
| -// client(), |
228 |
| -// "PUT", |
229 |
| -// "_cluster/settings", |
230 |
| -// null, |
231 |
| -// "{\"persistent\":{\"plugins.ml_commons.jvm_heap_memory_threshold\":100}}", |
232 |
| -// ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "")) |
233 |
| -// ); |
234 |
| -// assertEquals(200, updateCBSettingResponse.getStatusLine().getStatusCode()); |
235 |
| -// |
236 |
| -// Response response = createConnector(completionModelConnectorEntity); |
237 |
| -// Map responseMap = parseResponseToMap(response); |
238 |
| -// String connectorId = (String) responseMap.get("connector_id"); |
239 |
| -// response = registerRemoteModelWithTTLAndSkipHeapMemCheck("openAI-GPT-3.5 completions", connectorId, 1); |
240 |
| -// responseMap = parseResponseToMap(response); |
241 |
| -// String modelId = (String) responseMap.get("model_id"); |
242 |
| -// String predictInput = "{\n" + " \"parameters\": {\n" + " \"prompt\": \"Say this is a test\"\n" + " }\n" + "}"; |
243 |
| -// response = predictRemoteModel(modelId, predictInput); |
244 |
| -// responseMap = parseResponseToMap(response); |
245 |
| -// List responseList = (List) responseMap.get("inference_results"); |
246 |
| -// responseMap = (Map) responseList.get(0); |
247 |
| -// responseList = (List) responseMap.get("output"); |
248 |
| -// responseMap = (Map) responseList.get(0); |
249 |
| -// responseMap = (Map) responseMap.get("dataAsMap"); |
250 |
| -// responseList = (List) responseMap.get("choices"); |
251 |
| -// if (responseList == null) { |
252 |
| -// assertTrue(checkThrottlingOpenAI(responseMap)); |
253 |
| -// return; |
254 |
| -// } |
255 |
| -// responseMap = (Map) responseList.get(0); |
256 |
| -// assertFalse(((String) responseMap.get("text")).isEmpty()); |
257 |
| -// |
258 |
| -// getModelProfile(modelId, verifyRemoteModelDeployed()); |
259 |
| -// TimeUnit.SECONDS.sleep(71); |
260 |
| -// assertTrue(getModelProfile(modelId, verifyRemoteModelDeployed()).isEmpty()); |
261 |
| -// } |
| 218 | + // public void testPredictWithAutoDeployAndTTL_RemoteModel() throws IOException, InterruptedException { |
| 219 | + // // Skip test if key is null |
| 220 | + // if (OPENAI_KEY == null) { |
| 221 | + // System.out.println("OPENAI_KEY is null"); |
| 222 | + // return; |
| 223 | + // } |
| 224 | + // Response updateCBSettingResponse = TestHelper |
| 225 | + // .makeRequest( |
| 226 | + // client(), |
| 227 | + // "PUT", |
| 228 | + // "_cluster/settings", |
| 229 | + // null, |
| 230 | + // "{\"persistent\":{\"plugins.ml_commons.jvm_heap_memory_threshold\":100}}", |
| 231 | + // ImmutableList.of(new BasicHeader(HttpHeaders.USER_AGENT, "")) |
| 232 | + // ); |
| 233 | + // assertEquals(200, updateCBSettingResponse.getStatusLine().getStatusCode()); |
| 234 | + // |
| 235 | + // Response response = createConnector(completionModelConnectorEntity); |
| 236 | + // Map responseMap = parseResponseToMap(response); |
| 237 | + // String connectorId = (String) responseMap.get("connector_id"); |
| 238 | + // response = registerRemoteModelWithTTLAndSkipHeapMemCheck("openAI-GPT-3.5 completions", connectorId, 1); |
| 239 | + // responseMap = parseResponseToMap(response); |
| 240 | + // String modelId = (String) responseMap.get("model_id"); |
| 241 | + // String predictInput = "{\n" + " \"parameters\": {\n" + " \"prompt\": \"Say this is a test\"\n" + " }\n" + "}"; |
| 242 | + // response = predictRemoteModel(modelId, predictInput); |
| 243 | + // responseMap = parseResponseToMap(response); |
| 244 | + // List responseList = (List) responseMap.get("inference_results"); |
| 245 | + // responseMap = (Map) responseList.get(0); |
| 246 | + // responseList = (List) responseMap.get("output"); |
| 247 | + // responseMap = (Map) responseList.get(0); |
| 248 | + // responseMap = (Map) responseMap.get("dataAsMap"); |
| 249 | + // responseList = (List) responseMap.get("choices"); |
| 250 | + // if (responseList == null) { |
| 251 | + // assertTrue(checkThrottlingOpenAI(responseMap)); |
| 252 | + // return; |
| 253 | + // } |
| 254 | + // responseMap = (Map) responseList.get(0); |
| 255 | + // assertFalse(((String) responseMap.get("text")).isEmpty()); |
| 256 | + // |
| 257 | + // getModelProfile(modelId, verifyRemoteModelDeployed()); |
| 258 | + // TimeUnit.SECONDS.sleep(71); |
| 259 | + // assertTrue(getModelProfile(modelId, verifyRemoteModelDeployed()).isEmpty()); |
| 260 | + // } |
262 | 261 |
|
263 | 262 | public void testPredictRemoteModelWithInterface(String testCase, Consumer<Map> verifyResponse, Consumer<Exception> verifyException)
|
264 | 263 | throws IOException,
|
|
0 commit comments