Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Backport 2.x] Model changes for query_group_hashcode and id #192

Merged
merged 1 commit into from
Jan 16, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -288,7 +288,7 @@ private void constructSearchQueryRecord(final SearchPhaseContext context, final
// Add hashcode attribute when grouping is enabled
if (queryInsightsService.isGroupingEnabled()) {
String hashcode = queryShapeGenerator.getShapeHashCodeAsString(queryShape);
attributes.put(Attribute.ID, hashcode);
attributes.put(Attribute.QUERY_GROUP_HASHCODE, hashcode);
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -271,7 +271,7 @@ int numberOfTopGroups() {
private String getGroupingId(final SearchQueryRecord searchQueryRecord) {
switch (groupingType) {
case SIMILARITY:
return searchQueryRecord.getAttributes().get(Attribute.ID).toString();
return searchQueryRecord.getAttributes().get(Attribute.QUERY_GROUP_HASHCODE).toString();
case NONE:
throw new IllegalArgumentException("Should not try to group queries if grouping type is NONE");
default:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,9 +58,9 @@ public enum Attribute {
*/
LABELS,
/**
* Query Group hashcode or query hashcode representing a unique identifier for the query/group
* Query Group hashcode
*/
ID,
QUERY_GROUP_HASHCODE,
/**
* Grouping type of the query record (none, similarity)
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.UUID;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.opensearch.Version;
Expand All @@ -41,6 +42,8 @@ public class SearchQueryRecord implements ToXContentObject, Writeable {
private final long timestamp;
private final Map<MetricType, Measurement> measurements;
private final Map<Attribute, Object> attributes;
private final String id;

/**
* Timestamp
*/
Expand Down Expand Up @@ -93,12 +96,16 @@ public class SearchQueryRecord implements ToXContentObject, Writeable {
* Grouping type of the query record (none, similarity)
*/
public static final String GROUP_BY = "group_by";

/**
* Query Group hashcode or query hashcode representing a unique identifier for the query/group
* UUID
*/
public static final String ID = "id";

/**
* Query Group hashcode
*/
public static final String QUERY_GROUP_HASHCODE = "query_group_hashcode";

public static final String MEASUREMENTS = "measurements";
private String groupingId;

Expand All @@ -111,6 +118,7 @@ public class SearchQueryRecord implements ToXContentObject, Writeable {
*/
public SearchQueryRecord(final StreamInput in) throws IOException, ClassCastException {
this.timestamp = in.readLong();
this.id = in.readString();
if (in.getVersion().onOrAfter(Version.V_2_17_0)) {
measurements = new LinkedHashMap<>();
in.readOrderedMap(MetricType::readFromStream, Measurement::readFromStream)
Expand All @@ -137,12 +145,30 @@ public SearchQueryRecord(final StreamInput in) throws IOException, ClassCastExce
* @param attributes A list of Attributes associated with this query
*/
public SearchQueryRecord(final long timestamp, Map<MetricType, Measurement> measurements, final Map<Attribute, Object> attributes) {
this(timestamp, measurements, attributes, UUID.randomUUID().toString());
}

/**
* Constructor of SearchQueryRecord
*
* @param timestamp The timestamp of the query.
* @param measurements A list of Measurement associated with this query
* @param attributes A list of Attributes associated with this query
* @param id unique id for a search query record
*/
public SearchQueryRecord(
final long timestamp,
Map<MetricType, Measurement> measurements,
final Map<Attribute, Object> attributes,
String id
) {
if (measurements == null) {
throw new IllegalArgumentException("Measurements cannot be null");
}
this.measurements = measurements;
this.attributes = attributes;
this.timestamp = timestamp;
this.id = id;
}

/**
Expand All @@ -156,6 +182,7 @@ public static SearchQueryRecord fromXContent(XContentParser parser) throws IOExc
long timestamp = 0L;
Map<MetricType, Measurement> measurements = new HashMap<>();
Map<Attribute, Object> attributes = new HashMap<>();
String id = null;

parser.nextToken();
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
Expand All @@ -167,6 +194,9 @@ public static SearchQueryRecord fromXContent(XContentParser parser) throws IOExc
case TIMESTAMP:
timestamp = parser.longValue();
break;
case ID:
id = parser.text();
break;
case LATENCY:
case CPU:
case MEMORY:
Expand All @@ -179,8 +209,8 @@ public static SearchQueryRecord fromXContent(XContentParser parser) throws IOExc
case GROUP_BY:
attributes.put(Attribute.GROUP_BY, parser.text());
break;
case ID:
attributes.put(Attribute.ID, parser.text());
case QUERY_GROUP_HASHCODE:
attributes.put(Attribute.QUERY_GROUP_HASHCODE, parser.text());
break;
case SOURCE:
XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser);
Expand Down Expand Up @@ -264,7 +294,7 @@ public static SearchQueryRecord fromXContent(XContentParser parser) throws IOExc
log.error("Error when parsing through search hit", e);
}
}
return new SearchQueryRecord(timestamp, measurements, attributes);
return new SearchQueryRecord(timestamp, measurements, attributes, id);
}

/**
Expand Down Expand Up @@ -337,6 +367,8 @@ public void addAttribute(final Attribute attribute, final Object value) {
public XContentBuilder toXContent(final XContentBuilder builder, final Params params) throws IOException {
builder.startObject();
builder.field("timestamp", timestamp);
builder.field("id", id);

for (Map.Entry<Attribute, Object> entry : attributes.entrySet()) {
builder.field(entry.getKey().toString(), entry.getValue());
}
Expand All @@ -358,6 +390,7 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa
@Override
public void writeTo(final StreamOutput out) throws IOException {
out.writeLong(timestamp);
out.writeString(id);
if (out.getVersion().onOrAfter(Version.V_2_17_0)) {
out.writeMap(
measurements,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ public static List<SearchQueryRecord> generateQueryInsightRecords(
attributes.put(Attribute.TOTAL_SHARDS, randomIntBetween(1, 100));
attributes.put(Attribute.INDICES, randomArray(1, 3, Object[]::new, () -> randomAlphaOfLengthBetween(5, 10)));
attributes.put(Attribute.PHASE_LATENCY_MAP, phaseLatencyMap);
attributes.put(Attribute.ID, Objects.hashCode(i));
attributes.put(Attribute.QUERY_GROUP_HASHCODE, Objects.hashCode(i));
attributes.put(Attribute.GROUP_BY, GroupingType.NONE);
attributes.put(
Attribute.TASK_RESOURCE_USAGES,
Expand Down Expand Up @@ -200,13 +200,13 @@ public static List<List<SearchQueryRecord>> generateMultipleQueryInsightsRecords

public static void populateSameQueryHashcodes(List<SearchQueryRecord> searchQueryRecords) {
for (SearchQueryRecord record : searchQueryRecords) {
record.getAttributes().put(Attribute.ID, 1);
record.getAttributes().put(Attribute.QUERY_GROUP_HASHCODE, 1);
}
}

public static void populateHashcode(List<SearchQueryRecord> searchQueryRecords, int hash) {
for (SearchQueryRecord record : searchQueryRecords) {
record.getAttributes().put(Attribute.ID, hash);
record.getAttributes().put(Attribute.QUERY_GROUP_HASHCODE, hash);
}
}

Expand All @@ -223,7 +223,7 @@ public static TopQueries createRandomTopQueries() {
return new TopQueries(node, records);
}

public static TopQueries createFixedTopQueries() {
public static TopQueries createFixedTopQueries(String id) {
DiscoveryNode node = new DiscoveryNode(
"node_for_top_queries_test",
buildNewFakeTransportAddress(),
Expand All @@ -232,12 +232,12 @@ public static TopQueries createFixedTopQueries() {
VersionUtils.randomVersion(random())
);
List<SearchQueryRecord> records = new ArrayList<>();
records.add(createFixedSearchQueryRecord());
records.add(createFixedSearchQueryRecord(id));

return new TopQueries(node, records);
}

public static SearchQueryRecord createFixedSearchQueryRecord() {
public static SearchQueryRecord createFixedSearchQueryRecord(String id) {
long timestamp = 1706574180000L;
Map<MetricType, Measurement> measurements = Map.of(MetricType.LATENCY, new Measurement(1L));

Expand All @@ -256,7 +256,7 @@ public static SearchQueryRecord createFixedSearchQueryRecord() {
)
);

return new SearchQueryRecord(timestamp, measurements, attributes);
return new SearchQueryRecord(timestamp, measurements, attributes, id);
}

public static void compareJson(ToXContent param1, ToXContent param2) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ public void testWithAllDifferentHashcodes() {
Set<Integer> hashcodeSet = new HashSet<>();
for (SearchQueryRecord record : records) {
groupedRecord = minMaxHeapQueryGrouper.add(record);
int hashcode = (int) groupedRecord.getAttributes().get(Attribute.ID);
int hashcode = (int) groupedRecord.getAttributes().get(Attribute.QUERY_GROUP_HASHCODE);
hashcodeSet.add(hashcode);
}
assertEquals(numOfRecords, hashcodeSet.size());
Expand All @@ -58,7 +58,7 @@ public void testWithAllSameHashcodes() {
Set<Integer> hashcodeSet = new HashSet<>();
for (SearchQueryRecord record : records) {
groupedRecord = minMaxHeapQueryGrouper.add(record);
int hashcode = (int) groupedRecord.getAttributes().get(Attribute.ID);
int hashcode = (int) groupedRecord.getAttributes().get(Attribute.QUERY_GROUP_HASHCODE);
hashcodeSet.add(hashcode);
}
assertEquals(1, hashcodeSet.size());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,18 +40,61 @@ public void testSerialize() throws Exception {
}

public void testToXContent() throws IOException {
char[] expectedXcontent =
"{\"top_queries\":[{\"timestamp\":1706574180000,\"node_id\":\"node_for_top_queries_test\",\"phase_latency_map\":{\"expand\":1,\"query\":10,\"fetch\":1},\"task_resource_usages\":[{\"action\":\"action\",\"taskId\":2,\"parentTaskId\":1,\"nodeId\":\"id\",\"taskResourceUsage\":{\"cpu_time_in_nanos\":1000,\"memory_in_bytes\":2000}},{\"action\":\"action2\",\"taskId\":3,\"parentTaskId\":1,\"nodeId\":\"id2\",\"taskResourceUsage\":{\"cpu_time_in_nanos\":2000,\"memory_in_bytes\":1000}}],\"search_type\":\"query_then_fetch\",\"measurements\":{\"latency\":{\"number\":1,\"count\":1,\"aggregationType\":\"NONE\"}}}]}"
.toCharArray();
TopQueries topQueries = QueryInsightsTestUtils.createFixedTopQueries();
String id = "sample_id";

char[] expectedXContent = ("{"
+ "\"top_queries\":[{"
+ "\"timestamp\":1706574180000,"
+ "\"node_id\":\"node_for_top_queries_test\","
+ "\"phase_latency_map\":{"
+ "\"expand\":1,"
+ "\"query\":10,"
+ "\"fetch\":1"
+ "},"
+ "\"task_resource_usages\":[{"
+ "\"action\":\"action\","
+ "\"taskId\":2,"
+ "\"parentTaskId\":1,"
+ "\"nodeId\":\"id\","
+ "\"taskResourceUsage\":{"
+ "\"cpu_time_in_nanos\":1000,"
+ "\"memory_in_bytes\":2000"
+ "}"
+ "},{"
+ "\"action\":\"action2\","
+ "\"taskId\":3,"
+ "\"parentTaskId\":1,"
+ "\"nodeId\":\"id2\","
+ "\"taskResourceUsage\":{"
+ "\"cpu_time_in_nanos\":2000,"
+ "\"memory_in_bytes\":1000"
+ "}"
+ "}],"
+ "\"search_type\":\"query_then_fetch\","
+ "\"measurements\":{"
+ "\"latency\":{"
+ "\"number\":1,"
+ "\"count\":1,"
+ "\"aggregationType\":\"NONE\""
+ "}"
+ "},"
+ "\"id\":\""
+ id
+ "\""
+ "}]"
+ "}").toCharArray();

TopQueries topQueries = QueryInsightsTestUtils.createFixedTopQueries(id);
ClusterName clusterName = new ClusterName("test-cluster");
TopQueriesResponse response = new TopQueriesResponse(clusterName, List.of(topQueries), new ArrayList<>(), 10, MetricType.LATENCY);

XContentBuilder builder = MediaTypeRegistry.contentBuilder(MediaTypeRegistry.JSON);
char[] xContent = BytesReference.bytes(response.toXContent(builder, ToXContent.EMPTY_PARAMS)).utf8ToString().toCharArray();
Arrays.sort(expectedXcontent);

Arrays.sort(expectedXContent);
Arrays.sort(xContent);

assertEquals(Arrays.hashCode(expectedXcontent), Arrays.hashCode(xContent));
assertEquals(Arrays.hashCode(expectedXContent), Arrays.hashCode(xContent));
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,19 +45,19 @@ public void testAllMetricTypes() {
}

public void testCompare() {
SearchQueryRecord record1 = QueryInsightsTestUtils.createFixedSearchQueryRecord();
SearchQueryRecord record2 = QueryInsightsTestUtils.createFixedSearchQueryRecord();
SearchQueryRecord record1 = QueryInsightsTestUtils.createFixedSearchQueryRecord("id");
SearchQueryRecord record2 = QueryInsightsTestUtils.createFixedSearchQueryRecord("id");
assertEquals(0, SearchQueryRecord.compare(record1, record2, MetricType.LATENCY));
}

public void testEqual() {
SearchQueryRecord record1 = QueryInsightsTestUtils.createFixedSearchQueryRecord();
SearchQueryRecord record2 = QueryInsightsTestUtils.createFixedSearchQueryRecord();
SearchQueryRecord record1 = QueryInsightsTestUtils.createFixedSearchQueryRecord("id");
SearchQueryRecord record2 = QueryInsightsTestUtils.createFixedSearchQueryRecord("id");
assertEquals(record1, record2);
}

public void testFromXContent() {
SearchQueryRecord record = QueryInsightsTestUtils.createFixedSearchQueryRecord();
SearchQueryRecord record = QueryInsightsTestUtils.createFixedSearchQueryRecord("id");
try (XContentParser recordParser = createParser(JsonXContent.jsonXContent, record.toString())) {
SearchQueryRecord parsedRecord = SearchQueryRecord.fromXContent(recordParser);
QueryInsightsTestUtils.checkRecordsEquals(List.of(record), List.of(parsedRecord));
Expand Down
Loading