@@ -170,21 +170,27 @@ public void setup() throws Exception {
170
170
+ " \" properties\" : {\n "
171
171
+ " \" diary_embedding_size\" : {\n "
172
172
+ " \" type\" : \" keyword\" \n "
173
+ + " },\n "
174
+ + " \" diary_embedding_size_int\" : {\n "
175
+ + " \" type\" : \" integer\" \n "
173
176
+ " }\n "
174
177
+ " }\n "
175
178
+ " }\n "
176
179
+ "}" ;
180
+
177
181
String uploadDocumentRequestBodyDoc1 = "{\n "
178
182
+ " \" id\" : 1,\n "
179
183
+ " \" diary\" : [\" happy\" ,\" first day at school\" ],\n "
180
184
+ " \" diary_embedding_size\" : \" 1536\" ,\n " // embedding size for ada model
185
+ + " \" diary_embedding_size_int\" : 1536,\n "
181
186
+ " \" weather\" : \" rainy\" \n "
182
187
+ " }" ;
183
188
184
189
String uploadDocumentRequestBodyDoc2 = "{\n "
185
190
+ " \" id\" : 2,\n "
186
191
+ " \" diary\" : [\" bored\" ,\" at home\" ],\n "
187
192
+ " \" diary_embedding_size\" : \" 768\" ,\n " // embedding size for local text embedding model
193
+ + " \" diary_embedding_size_int\" : 768,\n "
188
194
+ " \" weather\" : \" sunny\" \n "
189
195
+ " }" ;
190
196
@@ -389,7 +395,7 @@ public void testMLInferenceProcessorRemoteModelOptionalInputs() throws Exception
389
395
+ " \" model_id\" : \" "
390
396
+ this .bedrockMultiModalEmbeddingModelId
391
397
+ "\" ,\n "
392
- + " \" query_template\" : \" {\\ \" size\\ \" : 2,\\ \" query\\ \" : {\\ \" range\\ \" : {\\ \" diary_embedding_size \\ \" : {\\ \" gte\\ \" : ${modelPrediction}}}}}\" ,\n "
398
+ + " \" query_template\" : \" {\\ \" size\\ \" : 2,\\ \" query\\ \" : {\\ \" range\\ \" : {\\ \" diary_embedding_size_int \\ \" : {\\ \" gte\\ \" : ${modelPrediction}}}}}\" ,\n "
393
399
+ " \" optional_input_map\" : [\n "
394
400
+ " {\n "
395
401
+ " \" inputText\" : \" query.term.diary.value\" ,\n "
@@ -415,9 +421,9 @@ public void testMLInferenceProcessorRemoteModelOptionalInputs() throws Exception
415
421
createSearchPipelineProcessor (createPipelineRequestBody , pipelineName );
416
422
417
423
Map response = searchWithPipeline (client (), index_name , pipelineName , query );
418
-
424
+ System . out . println ( "successfully run ***********************" );
419
425
assertEquals ((int ) JsonPath .parse (response ).read ("$.hits.hits.length()" ), 1 );
420
- Assert . assertEquals (JsonPath .parse (response ).read ("$.hits.hits[0]._source.diary_embedding_size " ), " 1536" );
426
+ assertEquals (( double ) JsonPath .parse (response ).read ("$.hits.hits[0]._source.diary_embedding_size_int " ), 1536.0 , 0.0001 );
421
427
}
422
428
423
429
/**
@@ -447,35 +453,35 @@ public void testMLInferenceProcessorLocalModel() throws Exception {
447
453
});
448
454
449
455
String createPipelineRequestBody = "{\n "
450
- + " \" request_processors\" : [\n "
451
- + " {\n "
452
- + " \" ml_inference\" : {\n "
453
- + " \" tag\" : \" ml_inference\" ,\n "
454
- + " \" description\" : \" This processor is going to run ml inference during search request\" ,\n "
455
- + " \" model_id\" : \" "
456
- + this .localModelId
457
- + "\" ,\n "
458
- + " \" model_input\" : \" { \\ \" text_docs\\ \" : [\\ \" ${ml_inference.text_docs}\\ \" ] ,\\ \" return_number\\ \" : true,\\ \" target_response\\ \" : [\\ \" sentence_embedding\\ \" ]}\" ,\n "
459
- + " \" function_name\" : \" text_embedding\" ,\n "
460
- + " \" full_response_path\" : true,\n "
461
- + " \" input_map\" : [\n "
462
- + " {\n "
463
- + " \" text_docs\" : \" query.term.diary_embedding_size.value\" \n "
464
- + " }\n "
465
- + " ],\n "
466
- + " \" output_map\" : [\n "
467
- + " {\n "
468
- + " \" query.term.diary_embedding_size.value\" : \" $.inference_results[0].output[0].data.length()\" \n "
469
- + " }\n "
470
- + " ],\n "
471
- + " \n "
472
- + " \" ignore_missing\" :false,\n "
473
- + " \" ignore_failure\" : false\n "
474
- + " \n "
475
- + " }\n "
476
- + " }\n "
477
- + " ]\n "
478
- + "}" ;
456
+ + " \" request_processors\" : [\n "
457
+ + " {\n "
458
+ + " \" ml_inference\" : {\n "
459
+ + " \" tag\" : \" ml_inference\" ,\n "
460
+ + " \" description\" : \" This processor is going to run ml inference during search request\" ,\n "
461
+ + " \" model_id\" : \" "
462
+ + this .localModelId
463
+ + "\" ,\n "
464
+ + " \" model_input\" : \" { \\ \" text_docs\\ \" : [\\ \" ${ml_inference.text_docs}\\ \" ] ,\\ \" return_number\\ \" : true,\\ \" target_response\\ \" : [\\ \" sentence_embedding\\ \" ]}\" ,\n "
465
+ + " \" function_name\" : \" text_embedding\" ,\n "
466
+ + " \" full_response_path\" : true,\n "
467
+ + " \" input_map\" : [\n "
468
+ + " {\n "
469
+ + " \" text_docs\" : \" query.term.diary_embedding_size.value\" \n "
470
+ + " }\n "
471
+ + " ],\n "
472
+ + " \" output_map\" : [\n "
473
+ + " {\n "
474
+ + " \" query.term.diary_embedding_size.value\" : \" $.inference_results[0].output[0].data.length()\" \n "
475
+ + " }\n "
476
+ + " ],\n "
477
+ + " \n "
478
+ + " \" ignore_missing\" :false,\n "
479
+ + " \" ignore_failure\" : false\n "
480
+ + " \n "
481
+ + " }\n "
482
+ + " }\n "
483
+ + " ]\n "
484
+ + "}" ;
479
485
480
486
String index_name = "daily_index" ;
481
487
String pipelineName = "diary_embedding_pipeline_local" ;
0 commit comments