@@ -195,15 +195,24 @@ public V computeIfAbsent(ICacheKey<K> key, LoadAwareCacheLoader<ICacheKey<K>, V>
195
195
// and it only has to be loaded one time, we should report one miss and the rest hits. But, if we do stats in
196
196
// getValueFromTieredCache(),
197
197
// we will see all misses. Instead, handle stats in computeIfAbsent().
198
- Tuple <V , String > cacheValueTuple = getValueFromTieredCache (false ).apply (key );
198
+ Tuple <V , String > cacheValueTuple ;
199
+ CompletableFuture <Tuple <ICacheKey <K >, V >> future = null ;
200
+ try (ReleasableLock ignore = readLock .acquire ()) {
201
+ cacheValueTuple = getValueFromTieredCache (false ).apply (key );
202
+ if (cacheValueTuple == null ) {
203
+ // Only one of the threads will succeed putting a future into map for the same key.
204
+ // Rest will fetch existing future and wait on that to complete.
205
+ future = completableFutureMap .putIfAbsent (key , new CompletableFuture <>());
206
+ }
207
+ }
199
208
List <String > heapDimensionValues = statsHolder .getDimensionsWithTierValue (key .dimensions , TIER_DIMENSION_VALUE_ON_HEAP );
200
209
List <String > diskDimensionValues = statsHolder .getDimensionsWithTierValue (key .dimensions , TIER_DIMENSION_VALUE_DISK );
201
210
202
211
if (cacheValueTuple == null ) {
203
212
// Add the value to the onHeap cache. We are calling computeIfAbsent which does another get inside.
204
213
// This is needed as there can be many requests for the same key at the same time and we only want to load
205
214
// the value once.
206
- V value = compute (key , loader );
215
+ V value = compute (key , loader , future );
207
216
// Handle stats
208
217
if (loader .isLoaded ()) {
209
218
// The value was just computed and added to the cache by this thread. Register a miss for the heap cache, and the disk cache
@@ -232,10 +241,8 @@ public V computeIfAbsent(ICacheKey<K> key, LoadAwareCacheLoader<ICacheKey<K>, V>
232
241
return cacheValueTuple .v1 ();
233
242
}
234
243
235
- private V compute (ICacheKey <K > key , LoadAwareCacheLoader <ICacheKey <K >, V > loader ) throws Exception {
236
- // Only one of the threads will succeed putting a future into map for the same key.
237
- // Rest will fetch existing future and wait on that to complete.
238
- CompletableFuture <Tuple <ICacheKey <K >, V >> future = completableFutureMap .putIfAbsent (key , new CompletableFuture <>());
244
+ private V compute (ICacheKey <K > key , LoadAwareCacheLoader <ICacheKey <K >, V > loader , CompletableFuture <Tuple <ICacheKey <K >, V >> future )
245
+ throws Exception {
239
246
// Handler to handle results post processing. Takes a tuple<key, value> or exception as an input and returns
240
247
// the value. Also before returning value, puts the value in cache.
241
248
BiFunction <Tuple <ICacheKey <K >, V >, Throwable , Void > handler = (pair , ex ) -> {
@@ -253,7 +260,7 @@ private V compute(ICacheKey<K> key, LoadAwareCacheLoader<ICacheKey<K>, V> loader
253
260
logger .warn ("Exception occurred while trying to compute the value" , ex );
254
261
}
255
262
}
256
- completableFutureMap .remove (key ); // Remove key from map as not needed anymore.
263
+ completableFutureMap .remove (key );// Remove key from map as not needed anymore.
257
264
return null ;
258
265
};
259
266
V value = null ;
0 commit comments