Skip to content

Commit 7e8af9e

Browse files
committed
Update Gradle Wrapper to 8.2.1 for Enhanced JDK20 Compatibility
- **Reason for Upgrade**: The migration to JDK20 ([reference](https://github.com/opensearch-project/opensearch-build/blob/aa65a8ecd69f77c3d3104043dd1c48dff708bffa/manifests/3.0.0/opensearch-3.0.0.yml#L9)) rendered the current Gradle version (7.6.1) incompatible. - **Actions Taken**: - **Gradle Wrapper Update**: Upgraded the Gradle wrapper to version 8.2.1 to maintain compatibility with JDK20. The gradle wrapper files are generated using the `./gradlew wrapper` command. - Applied `spotless` due to new formatting requirements in Gradle 8. - Resolved test "jar hell" issues. Gradle 8 introduced internal JARs to the test classpath that conflicted with dependencies from `org.junit.vintage:junit-vintage-engine`. As a remedy, these conflicting JARs have been excluded. - **Relevant Pull Requests**: - [Alerting#893](https://github.com/opensearch-project/alerting/pull/893/files) - [ML-Commons#892](opensearch-project/ml-commons#892) - [Security PR](opensearch-project/security#2978) - **Verification**: Successfully verified the changes using `gradle build`. Signed-off-by: Kaituo Li <kaituo@amazon.com>
1 parent 5ac6390 commit 7e8af9e

File tree

61 files changed

+409
-643
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

61 files changed

+409
-643
lines changed

.github/workflows/test_build_multi_platform.yml

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ jobs:
1111
Build-ad-windows:
1212
strategy:
1313
matrix:
14-
java: [ 11, 17 ]
14+
java: [ 11, 17, 20 ]
1515
name: Build and Test Anomaly Detection Plugin on Windows
1616
runs-on: windows-latest
1717
steps:

build.gradle

+24-16
Original file line numberDiff line numberDiff line change
@@ -64,8 +64,8 @@ buildscript {
6464
}
6565

6666
plugins {
67-
id 'nebula.ospackage' version "8.3.0" apply false
68-
id "com.diffplug.gradle.spotless" version "3.26.1"
67+
id 'com.netflix.nebula.ospackage' version "11.0.0"
68+
id "com.diffplug.spotless" version "6.18.0"
6969
id 'java-library'
7070
// Gradle 7.6 support was added in test-retry 1.4.0.
7171
id 'org.gradle.test-retry' version '1.4.1'
@@ -160,11 +160,12 @@ dependencies {
160160
testImplementation group: 'net.bytebuddy', name: 'byte-buddy', version: '1.9.15'
161161
testImplementation group: 'net.bytebuddy', name: 'byte-buddy-agent', version: '1.9.15'
162162
testCompileOnly 'org.apiguardian:apiguardian-api:1.1.0'
163-
testImplementation 'org.junit.jupiter:junit-jupiter-api:5.7.2'
164-
testImplementation 'org.junit.jupiter:junit-jupiter-params:5.7.2'
165-
testImplementation 'org.junit.jupiter:junit-jupiter-engine:5.7.2'
163+
// jupiter is required to run unit tests not inherited from OpenSearchTestCase (e.g., PreviousValueImputerTests)
164+
testImplementation 'org.junit.jupiter:junit-jupiter-api:5.8.2'
165+
testImplementation 'org.junit.jupiter:junit-jupiter-params:5.8.2'
166+
testImplementation 'org.junit.jupiter:junit-jupiter-engine:5.8.2'
166167
testImplementation "org.opensearch:opensearch-core:${opensearch_version}"
167-
testRuntimeOnly 'org.junit.vintage:junit-vintage-engine:5.7.2'
168+
testRuntimeOnly 'org.junit.vintage:junit-vintage-engine:5.8.2'
168169
testCompileOnly 'junit:junit:4.13.2'
169170
}
170171

@@ -302,6 +303,14 @@ test {
302303
excludeTestsMatching "org.opensearch.ad.ml.HCADModelPerfTests"
303304
}
304305
}
306+
307+
/* Gradle 8 is including some of its own internal JARs into the test classpath, and there's
308+
overlap with the dependencies org.junit.vintage:junit-vintage-engine pulling in. To prevent
309+
jar hell, exclude these problematic JARs. */
310+
classpath = classpath.filter {
311+
!it.toString().contains("junit-platform-engine-1.8.2.jar") &&
312+
!it.toString().contains("junit-platform-commons-1.8.2.jar")
313+
}
305314
}
306315

307316
task integTest(type: RestIntegTestTask) {
@@ -711,8 +720,8 @@ jacocoTestCoverageVerification {
711720

712721
jacocoTestReport {
713722
reports {
714-
xml.enabled = true
715-
html.enabled = true
723+
xml.required = true // for coverlay
724+
html.required = true // human readable
716725
}
717726
}
718727

@@ -722,10 +731,11 @@ jacocoTestCoverageVerification.dependsOn jacocoTestReport
722731
compileJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-try,-unchecked"
723732

724733
test {
734+
// required to run unit tests not inherited from OpenSearchTestCase (e.g., PreviousValueImputerTests)
725735
useJUnitPlatform()
726736
}
727737

728-
apply plugin: 'nebula.ospackage'
738+
apply plugin: 'com.netflix.nebula.ospackage'
729739

730740
// This is afterEvaluate because the bundlePlugin ZIP task is updated afterEvaluate and changes the ZIP name to match the plugin name
731741
afterEvaluate {
@@ -735,7 +745,7 @@ afterEvaluate {
735745
version = "${project.version}" - "-SNAPSHOT"
736746

737747
into '/usr/share/opensearch/plugins'
738-
from(zipTree(bundlePlugin.archivePath)) {
748+
from(zipTree(bundlePlugin.archiveFile)) {
739749
into opensearchplugin.name
740750
}
741751

@@ -766,9 +776,8 @@ afterEvaluate {
766776
task renameRpm(type: Copy) {
767777
from("$buildDir/distributions")
768778
into("$buildDir/distributions")
769-
include archiveName
770-
rename archiveName, "${packageName}-${version}.rpm"
771-
doLast { delete file("$buildDir/distributions/$archiveName") }
779+
rename "$archiveFileName", "${packageName}-${archiveVersion}.rpm"
780+
doLast { delete file("$buildDir/distributions/$archiveFileName") }
772781
}
773782
}
774783

@@ -779,9 +788,8 @@ afterEvaluate {
779788
task renameDeb(type: Copy) {
780789
from("$buildDir/distributions")
781790
into("$buildDir/distributions")
782-
include archiveName
783-
rename archiveName, "${packageName}-${version}.deb"
784-
doLast { delete file("$buildDir/distributions/$archiveName") }
791+
rename "$archiveFileName", "${packageName}-${archiveVersion}.deb"
792+
doLast { delete file("$buildDir/distributions/$archiveFileName") }
785793
}
786794
}
787795

gradle/wrapper/gradle-wrapper.jar

1.76 KB
Binary file not shown.
+2-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
distributionBase=GRADLE_USER_HOME
22
distributionPath=wrapper/dists
3-
distributionUrl=https\://services.gradle.org/distributions/gradle-7.6.1-bin.zip
3+
distributionUrl=https\://services.gradle.org/distributions/gradle-8.2.1-bin.zip
44
networkTimeout=10000
5+
validateDistributionUrl=true
56
zipStoreBase=GRADLE_USER_HOME
67
zipStorePath=wrapper/dists

gradlew

+10-6
Original file line numberDiff line numberDiff line change
@@ -85,9 +85,6 @@ done
8585
APP_BASE_NAME=${0##*/}
8686
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
8787

88-
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
89-
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
90-
9188
# Use the maximum available, or set MAX_FD != -1 to use that value.
9289
MAX_FD=maximum
9390

@@ -133,26 +130,29 @@ location of your Java installation."
133130
fi
134131
else
135132
JAVACMD=java
136-
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
133+
if ! command -v java >/dev/null 2>&1
134+
then
135+
die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
137136
138137
Please set the JAVA_HOME variable in your environment to match the
139138
location of your Java installation."
139+
fi
140140
fi
141141

142142
# Increase the maximum file descriptors if we can.
143143
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
144144
case $MAX_FD in #(
145145
max*)
146146
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
147-
# shellcheck disable=SC3045
147+
# shellcheck disable=SC3045
148148
MAX_FD=$( ulimit -H -n ) ||
149149
warn "Could not query maximum file descriptor limit"
150150
esac
151151
case $MAX_FD in #(
152152
'' | soft) :;; #(
153153
*)
154154
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
155-
# shellcheck disable=SC3045
155+
# shellcheck disable=SC3045
156156
ulimit -n "$MAX_FD" ||
157157
warn "Could not set maximum file descriptor limit to $MAX_FD"
158158
esac
@@ -197,6 +197,10 @@ if "$cygwin" || "$msys" ; then
197197
done
198198
fi
199199

200+
201+
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
202+
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
203+
200204
# Collect all arguments for the java command;
201205
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
202206
# shell script including quotes and variable substitutions, so put them in

src/main/java/org/opensearch/ad/AnomalyDetectorJobRunner.java

+8-37
Original file line numberDiff line numberDiff line change
@@ -307,38 +307,11 @@ private void runAnomalyDetectionJob(
307307
detectionStartTime.toEpochMilli(),
308308
executionStartTime.toEpochMilli()
309309
);
310-
client
311-
.execute(
312-
AnomalyResultAction.INSTANCE,
313-
request,
314-
ActionListener
315-
.wrap(
316-
response -> {
317-
indexAnomalyResult(
318-
jobParameter,
319-
lockService,
320-
lock,
321-
detectionStartTime,
322-
executionStartTime,
323-
response,
324-
recorder,
325-
detector
326-
);
327-
},
328-
exception -> {
329-
handleAdException(
330-
jobParameter,
331-
lockService,
332-
lock,
333-
detectionStartTime,
334-
executionStartTime,
335-
exception,
336-
recorder,
337-
detector
338-
);
339-
}
340-
)
341-
);
310+
client.execute(AnomalyResultAction.INSTANCE, request, ActionListener.wrap(response -> {
311+
indexAnomalyResult(jobParameter, lockService, lock, detectionStartTime, executionStartTime, response, recorder, detector);
312+
}, exception -> {
313+
handleAdException(jobParameter, lockService, lock, detectionStartTime, executionStartTime, exception, recorder, detector);
314+
}));
342315
} catch (Exception e) {
343316
indexAnomalyResultException(
344317
jobParameter,
@@ -672,11 +645,9 @@ private void releaseLock(Job jobParameter, LockService lockService, LockModel lo
672645
lockService
673646
.release(
674647
lock,
675-
ActionListener
676-
.wrap(
677-
released -> { log.info("Released lock for AD job {}", jobParameter.getName()); },
678-
exception -> { log.error("Failed to release lock for AD job: " + jobParameter.getName(), exception); }
679-
)
648+
ActionListener.wrap(released -> { log.info("Released lock for AD job {}", jobParameter.getName()); }, exception -> {
649+
log.error("Failed to release lock for AD job: " + jobParameter.getName(), exception);
650+
})
680651
);
681652
}
682653
}

src/main/java/org/opensearch/ad/AnomalyDetectorRunner.java

+3-5
Original file line numberDiff line numberDiff line change
@@ -84,11 +84,9 @@ public void executeDetector(
8484
listener.onResponse(Collections.emptyList());
8585
return;
8686
}
87-
ActionListener<EntityAnomalyResult> entityAnomalyResultListener = ActionListener
88-
.wrap(
89-
entityAnomalyResult -> { listener.onResponse(entityAnomalyResult.getAnomalyResults()); },
90-
e -> onFailure(e, listener, detector.getId())
91-
);
87+
ActionListener<EntityAnomalyResult> entityAnomalyResultListener = ActionListener.wrap(entityAnomalyResult -> {
88+
listener.onResponse(entityAnomalyResult.getAnomalyResults());
89+
}, e -> onFailure(e, listener, detector.getId()));
9290
MultiResponsesDelegateActionListener<EntityAnomalyResult> multiEntitiesResponseListener =
9391
new MultiResponsesDelegateActionListener<EntityAnomalyResult>(
9492
entityAnomalyResultListener,

src/main/java/org/opensearch/ad/caching/PriorityCache.java

+11-18
Original file line numberDiff line numberDiff line change
@@ -133,12 +133,9 @@ public PriorityCache(
133133
Duration inactiveEntityTtl = DateUtils.toDuration(checkpointTtl.get(settings));
134134

135135
this.inActiveEntities = createInactiveCache(inactiveEntityTtl, maxInactiveStates);
136-
clusterService
137-
.getClusterSettings()
138-
.addSettingsUpdateConsumer(
139-
checkpointTtl,
140-
it -> { this.inActiveEntities = createInactiveCache(DateUtils.toDuration(it), maxInactiveStates); }
141-
);
136+
clusterService.getClusterSettings().addSettingsUpdateConsumer(checkpointTtl, it -> {
137+
this.inActiveEntities = createInactiveCache(DateUtils.toDuration(it), maxInactiveStates);
138+
});
142139

143140
this.threadPool = threadPool;
144141
this.random = new Random(42);
@@ -163,19 +160,15 @@ public ModelState<EntityModel> get(String modelId, AnomalyDetector detector) {
163160
// during maintenance period, stop putting new entries
164161
if (!maintenanceLock.isLocked() && modelState == null) {
165162
if (ADEnabledSetting.isDoorKeeperInCacheEnabled()) {
166-
DoorKeeper doorKeeper = doorKeepers
167-
.computeIfAbsent(
168-
detectorId,
169-
id -> {
170-
// reset every 60 intervals
171-
return new DoorKeeper(
172-
TimeSeriesSettings.DOOR_KEEPER_FOR_CACHE_MAX_INSERTION,
173-
TimeSeriesSettings.DOOR_KEEPER_FALSE_POSITIVE_RATE,
174-
detector.getIntervalDuration().multipliedBy(TimeSeriesSettings.DOOR_KEEPER_MAINTENANCE_FREQ),
175-
clock
176-
);
177-
}
163+
DoorKeeper doorKeeper = doorKeepers.computeIfAbsent(detectorId, id -> {
164+
// reset every 60 intervals
165+
return new DoorKeeper(
166+
TimeSeriesSettings.DOOR_KEEPER_FOR_CACHE_MAX_INSERTION,
167+
TimeSeriesSettings.DOOR_KEEPER_FALSE_POSITIVE_RATE,
168+
detector.getIntervalDuration().multipliedBy(TimeSeriesSettings.DOOR_KEEPER_MAINTENANCE_FREQ),
169+
clock
178170
);
171+
});
179172

180173
// first hit, ignore
181174
// since door keeper may get reset during maintenance, it is possible

src/main/java/org/opensearch/ad/cluster/ADClusterEventListener.java

+3-13
Original file line numberDiff line numberDiff line change
@@ -72,19 +72,9 @@ public void clusterChanged(ClusterChangedEvent event) {
7272
if (delta.removed() || delta.added()) {
7373
LOG.info(NODE_CHANGED_MSG + ", node removed: {}, node added: {}", delta.removed(), delta.added());
7474
hashRing.addNodeChangeEvent();
75-
hashRing
76-
.buildCircles(
77-
delta,
78-
ActionListener
79-
.runAfter(
80-
ActionListener
81-
.wrap(
82-
hasRingBuildDone -> { LOG.info("Hash ring build result: {}", hasRingBuildDone); },
83-
e -> { LOG.error("Failed updating AD version hash ring", e); }
84-
),
85-
() -> inProgress.release()
86-
)
87-
);
75+
hashRing.buildCircles(delta, ActionListener.runAfter(ActionListener.wrap(hasRingBuildDone -> {
76+
LOG.info("Hash ring build result: {}", hasRingBuildDone);
77+
}, e -> { LOG.error("Failed updating AD version hash ring", e); }), () -> inProgress.release()));
8878
} else {
8979
inProgress.release();
9080
}

src/main/java/org/opensearch/ad/cluster/DailyCron.java

+11-20
Original file line numberDiff line numberDiff line change
@@ -58,26 +58,17 @@ public void run() {
5858
)
5959
)
6060
.setIndicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN);
61-
clientUtil
62-
.execute(
63-
DeleteByQueryAction.INSTANCE,
64-
deleteRequest,
65-
ActionListener
66-
.wrap(
67-
response -> {
68-
// if 0 docs get deleted, it means our query cannot find any matching doc
69-
LOG.info("{} " + CHECKPOINT_DELETED_MSG, response.getDeleted());
70-
},
71-
exception -> {
72-
if (exception instanceof IndexNotFoundException) {
73-
LOG.info(CHECKPOINT_NOT_EXIST_MSG);
74-
} else {
75-
// Gonna eventually delete in maintenance window.
76-
LOG.error(CANNOT_DELETE_OLD_CHECKPOINT_MSG, exception);
77-
}
78-
}
79-
)
80-
);
61+
clientUtil.execute(DeleteByQueryAction.INSTANCE, deleteRequest, ActionListener.wrap(response -> {
62+
// if 0 docs get deleted, it means our query cannot find any matching doc
63+
LOG.info("{} " + CHECKPOINT_DELETED_MSG, response.getDeleted());
64+
}, exception -> {
65+
if (exception instanceof IndexNotFoundException) {
66+
LOG.info(CHECKPOINT_NOT_EXIST_MSG);
67+
} else {
68+
// Gonna eventually delete in maintenance window.
69+
LOG.error(CANNOT_DELETE_OLD_CHECKPOINT_MSG, exception);
70+
}
71+
}));
8172
}
8273

8374
}

src/main/java/org/opensearch/ad/cluster/HashRing.java

+3-7
Original file line numberDiff line numberDiff line change
@@ -177,13 +177,9 @@ public void buildCirclesForRealtimeAD() {
177177
if (nodeChangeEvents.isEmpty()) {
178178
return;
179179
}
180-
buildCircles(
181-
ActionListener
182-
.wrap(
183-
r -> { LOG.debug("build circles on AD versions successfully"); },
184-
e -> { LOG.error("Failed to build circles on AD versions", e); }
185-
)
186-
);
180+
buildCircles(ActionListener.wrap(r -> { LOG.debug("build circles on AD versions successfully"); }, e -> {
181+
LOG.error("Failed to build circles on AD versions", e);
182+
}));
187183
}
188184

189185
/**

src/main/java/org/opensearch/ad/cluster/diskcleanup/ModelCheckpointIndexRetention.java

+6-6
Original file line numberDiff line numberDiff line change
@@ -66,12 +66,12 @@ public void run() {
6666
.lte(clock.millis() - defaultCheckpointTtl.toMillis())
6767
.format(ADCommonName.EPOCH_MILLIS_FORMAT)
6868
),
69-
ActionListener
70-
.wrap(
71-
response -> { cleanupBasedOnShardSize(defaultCheckpointTtl.minusDays(1)); },
72-
// The docs will be deleted in next scheduled windows. No need for retrying.
73-
exception -> LOG.error("delete docs by query fails for checkpoint index", exception)
74-
)
69+
ActionListener.wrap(response -> {
70+
cleanupBasedOnShardSize(defaultCheckpointTtl.minusDays(1));
71+
},
72+
// The docs will be deleted in next scheduled windows. No need for retrying.
73+
exception -> LOG.error("delete docs by query fails for checkpoint index", exception)
74+
)
7575
);
7676

7777
}

src/main/java/org/opensearch/ad/indices/ADIndexManagement.java

+3-3
Original file line numberDiff line numberDiff line change
@@ -101,9 +101,9 @@ public ADIndexManagement(
101101
historyRolloverPeriod = it;
102102
rescheduleRollover();
103103
});
104-
this.clusterService
105-
.getClusterSettings()
106-
.addSettingsUpdateConsumer(AD_RESULT_HISTORY_RETENTION_PERIOD, it -> { historyRetentionPeriod = it; });
104+
this.clusterService.getClusterSettings().addSettingsUpdateConsumer(AD_RESULT_HISTORY_RETENTION_PERIOD, it -> {
105+
historyRetentionPeriod = it;
106+
});
107107

108108
this.clusterService.getClusterSettings().addSettingsUpdateConsumer(AD_MAX_PRIMARY_SHARDS, it -> maxPrimaryShards = it);
109109
}

0 commit comments

Comments
 (0)