Skip to content

Commit 7d5c2ec

Browse files
authored
Merge branch 'main' into r1
Signed-off-by: Sandesh Kumar <sandeshkr419@gmail.com>
2 parents 2d7dfb1 + ebd743a commit 7d5c2ec

File tree

11 files changed

+314
-28
lines changed

11 files changed

+314
-28
lines changed

.github/workflows/benchmark-pull-request.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ jobs:
2222
echo "PR_NUMBER=${{ github.event.issue.number }}" >> $GITHUB_ENV
2323
echo "REPOSITORY=${{ github.event.repository.full_name }}" >> $GITHUB_ENV
2424
OPENSEARCH_VERSION=$(awk -F '=' '/^opensearch[[:space:]]*=/ {gsub(/[[:space:]]/, "", $2); print $2}' buildSrc/version.properties)
25-
echo "OPENSEARCH_VERSION=$OPENSEARCH_VERSION-alpha1" >> $GITHUB_ENV
25+
echo "OPENSEARCH_VERSION=$OPENSEARCH_VERSION-beta1" >> $GITHUB_ENV
2626
major_version=$(echo $OPENSEARCH_VERSION | cut -d'.' -f1)
2727
echo "OPENSEARCH_MAJOR_VERSION=$major_version" >> $GITHUB_ENV
2828
echo "USER_TAGS=pull_request_number:${{ github.event.issue.number }},repository:OpenSearch" >> $GITHUB_ENV
@@ -147,7 +147,7 @@ jobs:
147147
distribution: 'temurin'
148148
- name: Build and Assemble OpenSearch from PR
149149
run: |
150-
./gradlew :distribution:archives:linux-tar:assemble -Dbuild.snapshot=false -Dbuild.version_qualifier=alpha1
150+
./gradlew :distribution:archives:linux-tar:assemble -Dbuild.snapshot=false -Dbuild.version_qualifier=beta1
151151
- name: Configure AWS credentials
152152
uses: aws-actions/configure-aws-credentials@v4
153153
with:

.github/workflows/dependabot_pr.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ jobs:
5656
commit_options: '--signoff'
5757

5858
- name: Update the changelog
59-
uses: dangoslen/dependabot-changelog-helper@v3
59+
uses: dangoslen/dependabot-changelog-helper@v4
6060
with:
6161
version: 'Unreleased 2.x'
6262

.github/workflows/publish-maven-snapshots.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -37,4 +37,4 @@ jobs:
3737
export SONATYPE_PASSWORD=$(aws secretsmanager get-secret-value --secret-id maven-snapshots-password --query SecretString --output text)
3838
echo "::add-mask::$SONATYPE_USERNAME"
3939
echo "::add-mask::$SONATYPE_PASSWORD"
40-
./gradlew publishNebulaPublicationToSnapshotsRepository -Dbuild.version_qualifier=alpha1
40+
./gradlew publishNebulaPublicationToSnapshotsRepository -Dbuild.version_qualifier=beta1

CHANGELOG.md

+3
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,13 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
88
- Change priority for scheduling reroute during timeout([#16445](https://github.com/opensearch-project/OpenSearch/pull/16445))
99
- Renaming the node role search to warm ([#17573](https://github.com/opensearch-project/OpenSearch/pull/17573))
1010
- Introduce a new search node role to hold search only shards ([#17620](https://github.com/opensearch-project/OpenSearch/pull/17620))
11+
- Add dfs transformation function in XContentMapValues ([#17612](https://github.com/opensearch-project/OpenSearch/pull/17612))
1112
- [Star Tree] [Search] Resolving numeric range aggregation with metric aggregation using star-tree ([#17273](https://github.com/opensearch-project/OpenSearch/pull/17273))
1213

1314
### Dependencies
1415
- Bump `ch.qos.logback:logback-core` from 1.5.16 to 1.5.17 ([#17609](https://github.com/opensearch-project/OpenSearch/pull/17609))
1516
- Bump `org.jruby.joni:joni` from 2.2.3 to 2.2.5 ([#17608](https://github.com/opensearch-project/OpenSearch/pull/17608))
17+
- Bump `dangoslen/dependabot-changelog-helper` from 3 to 4 ([#17498](https://github.com/opensearch-project/OpenSearch/pull/17498))
1618

1719
### Changed
1820

@@ -21,6 +23,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
2123
### Removed
2224

2325
### Fixed
26+
- Fix bytes parameter on `_cat/recovery` ([#17598](https://github.com/opensearch-project/OpenSearch/pull/17598))
2427

2528
### Security
2629

plugins/repository-s3/src/internalClusterTest/java/org/opensearch/repositories/s3/S3RemoteStoreIT.java

+2
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@
1010

1111
import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
1212

13+
import org.apache.lucene.tests.util.LuceneTestCase;
1314
import org.opensearch.action.admin.indices.delete.DeleteIndexRequest;
1415
import org.opensearch.common.SuppressForbidden;
1516
import org.opensearch.common.blobstore.BlobPath;
@@ -42,6 +43,7 @@
4243
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
4344

4445
@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
46+
@LuceneTestCase.AwaitsFix(bugUrl = "Flakiness seen for this class")
4547
public class S3RemoteStoreIT extends RemoteStoreCoreTestCase {
4648

4749
@Override

rest-api-spec/src/main/resources/rest-api-spec/test/cat.recovery/10_basic.yml

+13-10
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,8 @@
11
---
22
"Test cat recovery output":
3+
- skip:
4+
version: " - 2.99.99"
5+
reason: Output format changed in 3.0
36

47
- do:
58
cat.recovery: {}
@@ -35,10 +38,10 @@
3538
\d+ \s+ # files_recovered
3639
\d+\.\d+% \s+ # files_percent
3740
\d+ \s+ # files_total
38-
\d+ \s+ # bytes
39-
\d+ \s+ # bytes_recovered
41+
(\d+|\d+[.]\d+)(kb|b) \s+ # bytes
42+
(\d+|\d+[.]\d+)(kb|b) \s+ # bytes_recovered
4043
\d+\.\d+% \s+ # bytes_percent
41-
\d+ \s+ # bytes_total
44+
(\d+|\d+[.]\d+)(kb|b) \s+ # bytes_total
4245
-?\d+ \s+ # translog_ops
4346
\d+ \s+ # translog_ops_recovered
4447
-?\d+\.\d+% # translog_ops_percent
@@ -56,7 +59,7 @@
5659
(
5760
\d \s+ # shard
5861
((\S+\s?){1,10})\s+ # source_node
59-
\d+ # bytes
62+
(\d+|\d+[.]\d+)(kb|b) # bytes
6063
\n
6164
)+
6265
$/
@@ -71,16 +74,16 @@
7174
(
7275
\d \s+ # shard
7376
((\S+\s?){1,10})\s+ # target_node
74-
\d+ # bytes
77+
(\d+|\d+[.]\d+)(kb|b) # bytes
7578
\n
7679
)+
7780
$/
7881
7982
---
8083
"Test cat recovery output for closed index":
8184
- skip:
82-
version: " - 7.1.99"
83-
reason: closed indices are replicated starting version 7.2.0
85+
version: " - 2.99.99"
86+
reason: Output format changed in 3.0
8487

8588
- do:
8689
indices.create:
@@ -122,10 +125,10 @@
122125
\d+ \s+ # files_recovered
123126
\d+\.\d+% \s+ # files_percent
124127
\d+ \s+ # files_total
125-
\d+ \s+ # bytes
126-
\d+ \s+ # bytes_recovered
128+
(\d+|\d+[.]\d+)(kb|b) \s+ # bytes
129+
(\d+|\d+[.]\d+)(kb|b) \s+ # bytes_recovered
127130
\d+\.\d+% \s+ # bytes_percent
128-
\d+ \s+ # bytes_total
131+
(\d+|\d+[.]\d+)(kb|b) \s+ # bytes_total
129132
0 \s+ # translog_ops (always 0 for closed indices)
130133
0 \s+ # translog_ops_recovered (always 0 for closed indices)
131134
100\.0% # translog_ops_percent (always 100.0% for closed indices)

server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteRestoreSnapshotIT.java

+10-8
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,9 @@
6868
import java.util.concurrent.CountDownLatch;
6969
import java.util.concurrent.ExecutionException;
7070
import java.util.concurrent.TimeUnit;
71+
import java.util.concurrent.atomic.AtomicBoolean;
7172
import java.util.concurrent.atomic.AtomicLong;
73+
import java.util.concurrent.atomic.AtomicReference;
7274
import java.util.stream.Collectors;
7375
import java.util.stream.Stream;
7476

@@ -1448,9 +1450,8 @@ public void testClusterManagerFailoverDuringSnapshotCreation() throws Exception
14481450

14491451
ensureStableCluster(4, internalCluster().getClusterManagerName());
14501452

1451-
final SnapshotInfo[] snapshotInfo = new SnapshotInfo[1];
1452-
final Boolean[] snapshotFailed = new Boolean[1];
1453-
snapshotFailed[0] = false;
1453+
final AtomicReference<SnapshotInfo> snapshotInfoRef = new AtomicReference<>();
1454+
final AtomicBoolean snapshotFailed = new AtomicBoolean(false);
14541455
Thread snapshotThread = new Thread(() -> {
14551456
try {
14561457
// Start snapshot creation
@@ -1459,10 +1460,10 @@ public void testClusterManagerFailoverDuringSnapshotCreation() throws Exception
14591460
.prepareCreateSnapshot(snapshotRepoName, snapshotName1)
14601461
.setWaitForCompletion(true)
14611462
.get();
1462-
snapshotInfo[0] = createSnapshotResponse.getSnapshotInfo();
1463+
snapshotInfoRef.set(createSnapshotResponse.getSnapshotInfo());
14631464

14641465
} catch (Exception e) {
1465-
snapshotFailed[0] = true;
1466+
snapshotFailed.set(true);
14661467
}
14671468
});
14681469
snapshotThread.start();
@@ -1482,10 +1483,11 @@ public void testClusterManagerFailoverDuringSnapshotCreation() throws Exception
14821483
repository.getRepositoryData(repositoryDataPlainActionFuture);
14831484

14841485
RepositoryData repositoryData = repositoryDataPlainActionFuture.get();
1485-
if (snapshotFailed[0]) {
1486-
assertFalse(repositoryData.getSnapshotIds().contains(snapshotInfo[0].snapshotId()));
1486+
SnapshotInfo snapshotInfo = snapshotInfoRef.get();
1487+
if (snapshotFailed.get()) {
1488+
assertTrue(repositoryData.getSnapshotIds().isEmpty());
14871489
} else {
1488-
assertTrue(repositoryData.getSnapshotIds().contains(snapshotInfo[0].snapshotId()));
1490+
assertTrue(repositoryData.getSnapshotIds().contains(snapshotInfo.snapshotId()));
14891491
}
14901492
}
14911493

server/src/main/java/org/opensearch/common/xcontent/support/XContentMapValues.java

+149
Original file line numberDiff line numberDiff line change
@@ -43,9 +43,11 @@
4343
import org.opensearch.common.unit.TimeValue;
4444
import org.opensearch.core.common.Strings;
4545

46+
import java.util.ArrayDeque;
4647
import java.util.ArrayList;
4748
import java.util.Arrays;
4849
import java.util.Collections;
50+
import java.util.Deque;
4951
import java.util.HashMap;
5052
import java.util.HashSet;
5153
import java.util.List;
@@ -60,6 +62,8 @@
6062
*/
6163
public class XContentMapValues {
6264

65+
private static final String TRANSFORMER_TRIE_LEAF_KEY = "$transformer";
66+
6367
/**
6468
* Extracts raw values (string, int, and so on) based on the path provided returning all of them
6569
* as a single list.
@@ -621,4 +625,149 @@ public static String[] nodeStringArrayValue(Object node) {
621625
return Strings.splitStringByCommaToArray(node.toString());
622626
}
623627
}
628+
629+
/**
630+
* Performs a depth first traversal of a map and applies a transformation for each field matched along the way. For
631+
* duplicated paths with transformers (i.e. "test.nested" and "test.nested.field"), only the transformer for
632+
* the shorter path is applied.
633+
*
634+
* @param source Source map to perform transformation on
635+
* @param transformers Map from path to transformer to apply to each path. Each transformer is a function that takes
636+
* the current value and returns a transformed value
637+
* @param inPlace If true, modify the source map directly; if false, create a copy
638+
* @return Map with transformations applied
639+
*/
640+
public static Map<String, Object> transform(
641+
Map<String, Object> source,
642+
Map<String, Function<Object, Object>> transformers,
643+
boolean inPlace
644+
) {
645+
return transform(transformers, inPlace).apply(source);
646+
}
647+
648+
/**
649+
* Returns function that performs a depth first traversal of a map and applies a transformation for each field
650+
* matched along the way. For duplicated paths with transformers (i.e. "test.nested" and "test.nested.field"), only
651+
* the transformer for the shorter path is applied.
652+
*
653+
* @param transformers Map from path to transformer to apply to each path. Each transformer is a function that takes
654+
* the current value and returns a transformed value
655+
* @param inPlace If true, modify the source map directly; if false, create a copy
656+
* @return Function that takes a map and returns a transformed version of the map
657+
*/
658+
public static Function<Map<String, Object>, Map<String, Object>> transform(
659+
Map<String, Function<Object, Object>> transformers,
660+
boolean inPlace
661+
) {
662+
Map<String, Object> transformerTrie = buildTransformerTrie(transformers);
663+
return source -> {
664+
Deque<TransformContext> stack = new ArrayDeque<>();
665+
Map<String, Object> result = inPlace ? source : new HashMap<>(source);
666+
stack.push(new TransformContext(result, transformerTrie));
667+
668+
processStack(stack, inPlace);
669+
return result;
670+
};
671+
}
672+
673+
@SuppressWarnings("unchecked")
674+
private static Map<String, Object> buildTransformerTrie(Map<String, Function<Object, Object>> transformers) {
675+
Map<String, Object> trie = new HashMap<>();
676+
for (Map.Entry<String, Function<Object, Object>> entry : transformers.entrySet()) {
677+
String[] pathElements = entry.getKey().split("\\.");
678+
Map<String, Object> subTrie = trie;
679+
for (String pathElement : pathElements) {
680+
subTrie = (Map<String, Object>) subTrie.computeIfAbsent(pathElement, k -> new HashMap<>());
681+
}
682+
subTrie.put(TRANSFORMER_TRIE_LEAF_KEY, entry.getValue());
683+
}
684+
return trie;
685+
}
686+
687+
private static void processStack(Deque<TransformContext> stack, boolean inPlace) {
688+
while (!stack.isEmpty()) {
689+
TransformContext ctx = stack.pop();
690+
processMap(ctx.map, ctx.trie, stack, inPlace);
691+
}
692+
}
693+
694+
private static void processMap(
695+
Map<String, Object> currentMap,
696+
Map<String, Object> currentTrie,
697+
Deque<TransformContext> stack,
698+
boolean inPlace
699+
) {
700+
for (Map.Entry<String, Object> entry : currentMap.entrySet()) {
701+
processEntry(entry, currentTrie, stack, inPlace);
702+
}
703+
}
704+
705+
private static void processEntry(
706+
Map.Entry<String, Object> entry,
707+
Map<String, Object> currentTrie,
708+
Deque<TransformContext> stack,
709+
boolean inPlace
710+
) {
711+
String key = entry.getKey();
712+
Object value = entry.getValue();
713+
714+
Object subTrieObj = currentTrie.get(key);
715+
if (subTrieObj instanceof Map == false) {
716+
return;
717+
}
718+
Map<String, Object> subTrie = nodeMapValue(subTrieObj, "transform");
719+
720+
// Apply transformation if available
721+
Function<Object, Object> transformer = (Function<Object, Object>) subTrie.get(TRANSFORMER_TRIE_LEAF_KEY);
722+
if (transformer != null) {
723+
entry.setValue(transformer.apply(value));
724+
return;
725+
}
726+
727+
// Process nested structures
728+
if (value instanceof Map) {
729+
Map<String, Object> subMap = nodeMapValue(value, "transform");
730+
if (inPlace == false) {
731+
subMap = new HashMap<>(subMap);
732+
entry.setValue(subMap);
733+
}
734+
stack.push(new TransformContext(subMap, subTrie));
735+
} else if (value instanceof List<?> list) {
736+
List<Object> subList = (List<Object>) list;
737+
if (inPlace == false) {
738+
subList = new ArrayList<>(list);
739+
entry.setValue(subList);
740+
}
741+
processList(subList, subTrie, stack, inPlace);
742+
}
743+
}
744+
745+
private static void processList(
746+
List<Object> list,
747+
Map<String, Object> transformerTrie,
748+
Deque<TransformContext> stack,
749+
boolean inPlace
750+
) {
751+
for (int i = list.size() - 1; i >= 0; i--) {
752+
Object value = list.get(i);
753+
if (value instanceof Map) {
754+
Map<String, Object> subMap = nodeMapValue(value, "transform");
755+
if (inPlace == false) {
756+
subMap = new HashMap<>(subMap);
757+
list.set(i, subMap);
758+
}
759+
stack.push(new TransformContext(subMap, transformerTrie));
760+
}
761+
}
762+
}
763+
764+
private static class TransformContext {
765+
Map<String, Object> map;
766+
Map<String, Object> trie;
767+
768+
TransformContext(Map<String, Object> map, Map<String, Object> trie) {
769+
this.map = map;
770+
this.trie = trie;
771+
}
772+
}
624773
}

server/src/main/java/org/opensearch/rest/action/cat/RestCatRecoveryAction.java

+4-3
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@
4242
import org.opensearch.common.unit.TimeValue;
4343
import org.opensearch.common.xcontent.XContentOpenSearchExtension;
4444
import org.opensearch.core.common.Strings;
45+
import org.opensearch.core.common.unit.ByteSizeValue;
4546
import org.opensearch.indices.recovery.RecoveryState;
4647
import org.opensearch.rest.RestRequest;
4748
import org.opensearch.rest.RestResponse;
@@ -196,10 +197,10 @@ public int compare(RecoveryState o1, RecoveryState o2) {
196197
t.addCell(state.getIndex().recoveredFileCount());
197198
t.addCell(String.format(Locale.ROOT, "%1.1f%%", state.getIndex().recoveredFilesPercent()));
198199
t.addCell(state.getIndex().totalFileCount());
199-
t.addCell(state.getIndex().totalRecoverBytes());
200-
t.addCell(state.getIndex().recoveredBytes());
200+
t.addCell(new ByteSizeValue(state.getIndex().totalRecoverBytes()));
201+
t.addCell(new ByteSizeValue(state.getIndex().recoveredBytes()));
201202
t.addCell(String.format(Locale.ROOT, "%1.1f%%", state.getIndex().recoveredBytesPercent()));
202-
t.addCell(state.getIndex().totalBytes());
203+
t.addCell(new ByteSizeValue(state.getIndex().totalBytes()));
203204
t.addCell(state.getTranslog().totalOperations());
204205
t.addCell(state.getTranslog().recoveredOperations());
205206
t.addCell(String.format(Locale.ROOT, "%1.1f%%", state.getTranslog().recoveredPercent()));

0 commit comments

Comments
 (0)