From 6a67e426117e11160443e3ff35404030e307a13e Mon Sep 17 00:00:00 2001 From: "viktoriya.kutsarova" Date: Fri, 19 Sep 2025 16:11:15 +0300 Subject: [PATCH 1/3] Introduce HGETDEL command to the Spring Data Redis framework Signed-off-by: viktoriya.kutsarova --- Makefile | 2 +- .../DefaultStringRedisConnection.java | 11 ++ .../connection/DefaultedRedisConnection.java | 7 + .../connection/ReactiveHashCommands.java | 83 ++++++++++ .../redis/connection/RedisHashCommands.java | 13 ++ .../connection/StringRedisConnection.java | 12 ++ .../jedis/JedisClusterHashCommands.java | 14 ++ .../connection/jedis/JedisHashCommands.java | 9 ++ .../connection/lettuce/LettuceConnection.java | 1 + .../lettuce/LettuceHashCommands.java | 10 ++ .../lettuce/LettuceReactiveHashCommands.java | 14 ++ .../data/redis/core/BoundHashOperations.java | 9 ++ .../redis/core/DefaultHashOperations.java | 19 +++ .../core/DefaultReactiveHashOperations.java | 12 ++ .../data/redis/core/HashOperations.java | 12 ++ .../redis/core/ReactiveHashOperations.java | 13 ++ .../data/redis/core/RedisCommand.java | 1 + .../AbstractConnectionIntegrationTests.java | 77 +++++++++ .../connection/RedisConnectionUnitTests.java | 4 + .../jedis/JedisClusterConnectionTests.java | 89 ++++++++++- .../LettuceClusterConnectionTests.java | 86 ++++++++++- ...eReactiveHashCommandsIntegrationTests.java | 81 ++++++++++ ...DefaultHashOperationsIntegrationTests.java | 60 +++++++ ...eactiveHashOperationsIntegrationTests.java | 146 ++++++++++++++++++ 24 files changed, 781 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index 1f6dee240f..b2b13942ff 100644 --- a/Makefile +++ b/Makefile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -VERSION?=7.4.0 +VERSION?=8.0.0 PROJECT?=redis GH_ORG?=redis SPRING_PROFILE?=ci diff --git a/src/main/java/org/springframework/data/redis/connection/DefaultStringRedisConnection.java b/src/main/java/org/springframework/data/redis/connection/DefaultStringRedisConnection.java index 754c2b8b29..b003a713a3 100644 --- a/src/main/java/org/springframework/data/redis/connection/DefaultStringRedisConnection.java +++ b/src/main/java/org/springframework/data/redis/connection/DefaultStringRedisConnection.java @@ -23,6 +23,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.jetbrains.annotations.NotNull; import org.jspecify.annotations.NullUnmarked; import org.jspecify.annotations.Nullable; import org.springframework.core.convert.converter.Converter; @@ -1609,6 +1610,11 @@ public List hVals(String key) { return convertAndReturn(delegate.hVals(serialize(key)), byteListToStringList); } + @Override + public List hGetDel(String key, String... fields) { + return convertAndReturn(delegate.hGetDel(serialize(key), serializeMulti(fields)), byteListToStringList); + } + @Override public Long incr(String key) { return incr(serialize(key)); @@ -2582,6 +2588,11 @@ public List hTtl(byte[] key, TimeUnit timeUnit, byte[]... fields) { return this.delegate.hTtl(key, timeUnit, fields); } + @Override + public List hGetDel(@NotNull byte[] key, @NotNull byte[]... fields) { + return convertAndReturn(delegate.hGetDel(key, fields), Converters.identityConverter()); + } + public @Nullable List applyExpiration(String key, org.springframework.data.redis.core.types.Expiration expiration, ExpirationOptions options, String... fields) { diff --git a/src/main/java/org/springframework/data/redis/connection/DefaultedRedisConnection.java b/src/main/java/org/springframework/data/redis/connection/DefaultedRedisConnection.java index b5976bc082..79804ae1f1 100644 --- a/src/main/java/org/springframework/data/redis/connection/DefaultedRedisConnection.java +++ b/src/main/java/org/springframework/data/redis/connection/DefaultedRedisConnection.java @@ -1594,6 +1594,13 @@ default List hpTtl(byte[] key, byte[]... fields) { return hashCommands().hpTtl(key, fields); } + /** @deprecated in favor of {@link RedisConnection#hashCommands()}}. */ + @Override + @Deprecated + default List hGetDel(byte[] key, byte[]... fields) { + return hashCommands().hGetDel(key, fields); + } + /** @deprecated in favor of {@link RedisConnection#hashCommands()}}. */ @Override @Deprecated diff --git a/src/main/java/org/springframework/data/redis/connection/ReactiveHashCommands.java b/src/main/java/org/springframework/data/redis/connection/ReactiveHashCommands.java index 44ca7e8429..cc865ff70e 100644 --- a/src/main/java/org/springframework/data/redis/connection/ReactiveHashCommands.java +++ b/src/main/java/org/springframework/data/redis/connection/ReactiveHashCommands.java @@ -58,6 +58,7 @@ public interface ReactiveHashCommands { * * @author Christoph Strobl * @author Tihomir Mateev + * @author Viktoriya Kutsarova */ class HashFieldsCommand extends KeyCommand { @@ -1255,4 +1256,86 @@ default Flux hpTtl(ByteBuffer key, List fields) { */ Flux> hpTtl(Publisher commands); + + /** + * {@literal HGETDEL} {@link Command}. + * + * @author Viktoriya Kutsarova + * @see Redis Documentation: HGETDEL + */ + class HGetDelCommand extends HashFieldsCommand { + + private HGetDelCommand(@Nullable ByteBuffer key, List fields) { + super(key, fields); + } + + /** + * Creates a new {@link HGetDelCommand} given a {@link ByteBuffer field name}. + * + * @param field must not be {@literal null}. + * @return a new {@link HGetDelCommand} for a {@link ByteBuffer field name}. + */ + public static HGetDelCommand field(ByteBuffer field) { + + Assert.notNull(field, "Field must not be null"); + + return new HGetDelCommand(null, Collections.singletonList(field)); + } + + /** + * Creates a new {@link HGetDelCommand} given a {@link Collection} of field names. + * + * @param fields must not be {@literal null}. + * @return a new {@link HGetDelCommand} for a {@link Collection} of field names. + */ + public static HGetDelCommand fields(Collection fields) { + + Assert.notNull(fields, "Fields must not be null"); + + return new HGetDelCommand(null, new ArrayList<>(fields)); + } + + /** + * Applies the hash {@literal key}. Constructs a new command instance with all previously configured properties. + * + * @param key must not be {@literal null}. + * @return a new {@link HGetDelCommand} with {@literal key} applied. + */ + public HGetDelCommand from(ByteBuffer key) { + + Assert.notNull(key, "Key must not be null"); + + return new HGetDelCommand(key, getFields()); + } + } + + + /** + * Get and delete the value of one or more {@literal fields} from hash at {@literal key}. Values are returned in the + * order of the requested keys. Absent field values are represented using {@literal null} in the resulting {@link List}. + * When the last field is deleted, the key will also be deleted. + * + * @param key must not be {@literal null}. + * @param fields must not be {@literal null}. + * @return never {@literal null}. + * @see Redis Documentation: HGETDEL + */ + default Mono> hGetDel(ByteBuffer key, Collection fields) { + + Assert.notNull(key, "Key must not be null"); + Assert.notNull(fields, "Fields must not be null"); + + return hGetDel(Mono.just(HGetDelCommand.fields(fields).from(key))).next().map(MultiValueResponse::getOutput); + } + + /** + * Get and delete the value of one or more {@literal fields} from hash at {@literal key}. Values are returned in the + * order of the requested keys. Absent field values are represented using {@literal null} in the resulting {@link List}. + * When the last field is deleted, the key will also be deleted. + * + * @param commands must not be {@literal null}. + * @return never {@literal null}. + * @see Redis Documentation: HGETDEL + */ + Flux> hGetDel(Publisher commands); } diff --git a/src/main/java/org/springframework/data/redis/connection/RedisHashCommands.java b/src/main/java/org/springframework/data/redis/connection/RedisHashCommands.java index 1fda940e13..ead6cb88f9 100644 --- a/src/main/java/org/springframework/data/redis/connection/RedisHashCommands.java +++ b/src/main/java/org/springframework/data/redis/connection/RedisHashCommands.java @@ -34,6 +34,7 @@ * @author Christoph Strobl * @author Mark Paluch * @author Tihomir Mateev + * @author Viktoriya Kutsarova * @see RedisCommands */ @NullUnmarked @@ -541,4 +542,16 @@ default List hExpireAt(byte @NonNull [] key, long unixTime, byte @NonNull * @since 3.5 */ List<@NonNull Long> hpTtl(byte @NonNull [] key, byte @NonNull [] @NonNull... fields); + + /** + * Get and delete the value of one or more {@code fields} from hash at {@code key}. Values are returned in the order of + * the requested keys. Absent field values are represented using {@literal null} in the resulting {@link List}. + * When the last field is deleted, the key will also be deleted. + * + * @param key must not be {@literal null}. + * @param fields must not be {@literal null}. + * @return empty {@link List} if key does not exist. {@literal null} when used in pipeline / transaction. + * @see Redis Documentation: HGETDEL + */ + List hGetDel(byte @NonNull [] key, byte @NonNull [] @NonNull... fields); } diff --git a/src/main/java/org/springframework/data/redis/connection/StringRedisConnection.java b/src/main/java/org/springframework/data/redis/connection/StringRedisConnection.java index 9e0392e46d..52877f99f3 100644 --- a/src/main/java/org/springframework/data/redis/connection/StringRedisConnection.java +++ b/src/main/java/org/springframework/data/redis/connection/StringRedisConnection.java @@ -2552,6 +2552,18 @@ List hpExpireAt(@NonNull String key, long unixTimeInMillis, ExpirationOpti */ List hpTtl(@NonNull String key, @NonNull String @NonNull... fields); + /** + * Get and delete the value of one or more {@code fields} from hash at {@code key}. When the last field is deleted, + * the key will also be deleted. + * + * @param key must not be {@literal null}. + * @param fields must not be {@literal null}. + * @return empty {@link List} if key does not exist. {@literal null} when used in pipeline / transaction. + * @see Redis Documentation: HMGET + * @see RedisHashCommands#hMGet(byte[], byte[]...) + */ + List hGetDel(@NonNull String key, @NonNull String @NonNull... fields); + // ------------------------------------------------------------------------- // Methods dealing with HyperLogLog // ------------------------------------------------------------------------- diff --git a/src/main/java/org/springframework/data/redis/connection/jedis/JedisClusterHashCommands.java b/src/main/java/org/springframework/data/redis/connection/jedis/JedisClusterHashCommands.java index 8f289eb7d2..65d25d90fe 100644 --- a/src/main/java/org/springframework/data/redis/connection/jedis/JedisClusterHashCommands.java +++ b/src/main/java/org/springframework/data/redis/connection/jedis/JedisClusterHashCommands.java @@ -414,6 +414,20 @@ public List hpTtl(byte[] key, byte[]... fields) { } } + @Override + public List hGetDel(byte[] key, byte[]... fields) { + + Assert.notNull(key, "Key must not be null"); + Assert.notNull(fields, "Fields must not be null"); + + try { + return connection.getCluster().hgetdel(key, fields); + } catch (Exception ex) { + throw convertJedisAccessException(ex); + } + + } + @Nullable @Override public Long hStrLen(byte[] key, byte[] field) { diff --git a/src/main/java/org/springframework/data/redis/connection/jedis/JedisHashCommands.java b/src/main/java/org/springframework/data/redis/connection/jedis/JedisHashCommands.java index 32a5d8b54d..6e392d1586 100644 --- a/src/main/java/org/springframework/data/redis/connection/jedis/JedisHashCommands.java +++ b/src/main/java/org/springframework/data/redis/connection/jedis/JedisHashCommands.java @@ -332,6 +332,15 @@ protected void doClose() { return connection.invoke().just(Jedis::hpttl, PipelineBinaryCommands::hpttl, key, fields); } + @Override + public List hGetDel(byte @NonNull [] key, byte @NonNull [] @NonNull... fields) { + + Assert.notNull(key, "Key must not be null"); + Assert.notNull(fields, "Fields must not be null"); + + return connection.invoke().just(Jedis::hgetdel, PipelineBinaryCommands::hgetdel, key, fields); + } + @Nullable @Override public Long hStrLen(byte[] key, byte[] field) { diff --git a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConnection.java b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConnection.java index e8cb00bb34..e1d8600bdb 100644 --- a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConnection.java +++ b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConnection.java @@ -1243,6 +1243,7 @@ static class TypeHints { COMMAND_OUTPUT_TYPE_MAPPING.put(ZRANGEBYSCORE, ValueListOutput.class); COMMAND_OUTPUT_TYPE_MAPPING.put(ZREVRANGE, ValueListOutput.class); COMMAND_OUTPUT_TYPE_MAPPING.put(ZREVRANGEBYSCORE, ValueListOutput.class); + COMMAND_OUTPUT_TYPE_MAPPING.put(HGETDEL, ValueListOutput.class); // BOOLEAN COMMAND_OUTPUT_TYPE_MAPPING.put(EXISTS, BooleanOutput.class); diff --git a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceHashCommands.java b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceHashCommands.java index 54e7c99e07..0af9259d9c 100644 --- a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceHashCommands.java +++ b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceHashCommands.java @@ -265,6 +265,16 @@ public List hpTtl(byte @NonNull [] key, byte @NonNull [] @NonNull... field return connection.invoke().fromMany(RedisHashAsyncCommands::hpttl, key, fields).toList(); } + @Override + public List hGetDel(byte @NonNull [] key, byte @NonNull []... fields) { + + Assert.notNull(key, "Key must not be null"); + Assert.notNull(fields, "Fields must not be null"); + + return connection.invoke().fromMany(RedisHashAsyncCommands::hgetdel, key, fields) + .toList(source -> source.getValueOrElse(null)); + } + /** * @param key * @param cursorId diff --git a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommands.java b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommands.java index 7f764484e5..7656c8ad69 100644 --- a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommands.java +++ b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommands.java @@ -356,6 +356,20 @@ public Flux> hpTtl(Publisher> hGetDel(Publisher commands) { + + return connection.execute(cmd -> Flux.from(commands).concatMap(command -> { + + Assert.notNull(command.getKey(), "Key must not be null"); + Assert.notNull(command.getFields(), "Fields must not be null"); + + return cmd.hgetdel(command.getKey(), command.getFields().toArray(ByteBuffer[]::new)).collectList() + .map(value -> new MultiValueResponse<>(command, value.stream().map(v -> v.getValueOrElse(null)) + .collect(Collectors.toList()))); + })); + } + private static Map.Entry toEntry(KeyValue kv) { return new Entry() { diff --git a/src/main/java/org/springframework/data/redis/core/BoundHashOperations.java b/src/main/java/org/springframework/data/redis/core/BoundHashOperations.java index 45a854efaf..0b771a67a4 100644 --- a/src/main/java/org/springframework/data/redis/core/BoundHashOperations.java +++ b/src/main/java/org/springframework/data/redis/core/BoundHashOperations.java @@ -246,4 +246,13 @@ default BoundHashFieldExpirationOperations hashExpiration(@NonNull Collectio @NonNull RedisOperations getOperations(); + /** + * Get and remove the value for given {@code hashFields} from the hash at the bound key. Values are in the order of the + * requested hash fields. Absent field values are represented using {@literal null} in the resulting {@link List}. + * + * @param hashFields must not be {@literal null}. + * @return {@literal null} when used in pipeline / transaction. + * @since 3.1 + */ + List getAndDelete(@NonNull Collection<@NonNull HK> hashFields); } diff --git a/src/main/java/org/springframework/data/redis/core/DefaultHashOperations.java b/src/main/java/org/springframework/data/redis/core/DefaultHashOperations.java index 6712257b05..7d9e104e3c 100644 --- a/src/main/java/org/springframework/data/redis/core/DefaultHashOperations.java +++ b/src/main/java/org/springframework/data/redis/core/DefaultHashOperations.java @@ -195,6 +195,25 @@ public List multiGet(@NonNull K key, @NonNull Collection<@NonNull HK> fields return deserializeHashValues(rawValues); } + @Override + public List getAndDelete(@NonNull K key, @NonNull Collection<@NonNull HK> fields) { + + if (fields.isEmpty()) { + return Collections.emptyList(); + } + + byte[] rawKey = rawKey(key); + byte[][] rawHashKeys = new byte[fields.size()][]; + int counter = 0; + for (@NonNull + HK hashKey : fields) { + rawHashKeys[counter++] = rawHashKey(hashKey); + } + List rawValues = execute(connection -> connection.hashCommands().hGetDel(rawKey, rawHashKeys)); + + return deserializeHashValues(rawValues); + } + @Override public void put(@NonNull K key, @NonNull HK hashKey, HV value) { diff --git a/src/main/java/org/springframework/data/redis/core/DefaultReactiveHashOperations.java b/src/main/java/org/springframework/data/redis/core/DefaultReactiveHashOperations.java index e569e87497..d05f6ea336 100644 --- a/src/main/java/org/springframework/data/redis/core/DefaultReactiveHashOperations.java +++ b/src/main/java/org/springframework/data/redis/core/DefaultReactiveHashOperations.java @@ -111,6 +111,18 @@ public Mono> multiGet(H key, Collection hashKeys) { .flatMap(hks -> hashCommands.hMGet(rawKey(key), hks)).map(this::deserializeHashValues)); } + @Override + public Mono> getAndDelete(H key, Collection hashKeys) { + Assert.notNull(key, "Key must not be null"); + Assert.notNull(hashKeys, "Hash keys must not be null"); + Assert.notEmpty(hashKeys, "Hash keys must not be empty"); + + return createMono(hashCommands -> Flux.fromIterable(hashKeys) // + .map(this::rawHashKey) // + .collectList() // + .flatMap(hks -> hashCommands.hGetDel(rawKey(key), hks)).map(this::deserializeHashValues)); + } + @Override public Mono increment(H key, HK hashKey, long delta) { diff --git a/src/main/java/org/springframework/data/redis/core/HashOperations.java b/src/main/java/org/springframework/data/redis/core/HashOperations.java index 163a06f7dd..2299796e64 100644 --- a/src/main/java/org/springframework/data/redis/core/HashOperations.java +++ b/src/main/java/org/springframework/data/redis/core/HashOperations.java @@ -38,6 +38,7 @@ * @author Christoph Strobl * @author Ninad Divadkar * @author Tihomir Mateev + * @author Viktoriya Kutsarova */ @NullUnmarked public interface HashOperations { @@ -79,6 +80,17 @@ public interface HashOperations { */ List multiGet(@NonNull H key, @NonNull Collection<@NonNull HK> hashKeys); + /** + * Get and remove the value for given {@code hashKeys} from hash at {@code key}. Values are in the order of the + * requested keys. Absent field values are represented using {@literal null} in the resulting {@link List}. + * + * @param key must not be {@literal null}. + * @param hashKeys must not be {@literal null}. + * @return {@literal null} when used in pipeline / transaction. + * @since 4.0 + */ + List getAndDelete(@NonNull H key, @NonNull Collection<@NonNull HK> hashKeys); + /** * Increment {@code value} of a hash {@code hashKey} by the given {@code delta}. * diff --git a/src/main/java/org/springframework/data/redis/core/ReactiveHashOperations.java b/src/main/java/org/springframework/data/redis/core/ReactiveHashOperations.java index 7d9e6c7776..f156d7947a 100644 --- a/src/main/java/org/springframework/data/redis/core/ReactiveHashOperations.java +++ b/src/main/java/org/springframework/data/redis/core/ReactiveHashOperations.java @@ -41,6 +41,7 @@ * * @author Mark Paluch * @author Christoph Strobl + * @author Viktoriya Kutsarova * @since 2.0 */ public interface ReactiveHashOperations { @@ -81,6 +82,18 @@ public interface ReactiveHashOperations { * @return */ Mono> multiGet(H key, Collection hashKeys); + + /** + * Get and remove the value for given {@code hashKeys} from hash at {@code key}. Values are in the order of the + * requested keys. Absent field values are represented using {@literal null} in the resulting {@link List}. + * When the last field is deleted, the key will also be deleted. + * + * @param key must not be {@literal null}. + * @param hashKeys must not be {@literal null}. + * @return never {@literal null}. + * @since 4.0 + */ + Mono> getAndDelete(H key, Collection hashKeys); /** * Increment {@code value} of a hash {@code hashKey} by the given {@code delta}. diff --git a/src/main/java/org/springframework/data/redis/core/RedisCommand.java b/src/main/java/org/springframework/data/redis/core/RedisCommand.java index 79b40842ed..c0bc3f8a4d 100644 --- a/src/main/java/org/springframework/data/redis/core/RedisCommand.java +++ b/src/main/java/org/springframework/data/redis/core/RedisCommand.java @@ -147,6 +147,7 @@ public enum RedisCommand { HMSET("w", 3), // HPOP("rw", 3), HSET("w", 3, 3), // + HSETEX("w", 3), // HSETNX("w", 3, 3), // HVALS("r", 1, 1), // HEXPIRE("w", 5), // diff --git a/src/test/java/org/springframework/data/redis/connection/AbstractConnectionIntegrationTests.java b/src/test/java/org/springframework/data/redis/connection/AbstractConnectionIntegrationTests.java index 2402767239..57c8156107 100644 --- a/src/test/java/org/springframework/data/redis/connection/AbstractConnectionIntegrationTests.java +++ b/src/test/java/org/springframework/data/redis/connection/AbstractConnectionIntegrationTests.java @@ -3725,6 +3725,83 @@ public void hTtlReturnsMinusTwoWhenFieldOrKeyMissing() { verifyResults(Arrays.asList(new Object[] { List.of(-2L), List.of(-2L) })); } + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + public void hGetDelReturnsValueAndDeletesField() { + + actual.add(connection.hSet("hash-hgetdel", "field-1", "value-1")); + actual.add(connection.hSet("hash-hgetdel", "field-2", "value-2")); + actual.add(connection.hGetDel("hash-hgetdel", "field-1")); + actual.add(connection.hExists("hash-hgetdel", "field-1")); + actual.add(connection.hExists("hash-hgetdel", "field-2")); + + verifyResults(Arrays.asList(Boolean.TRUE, Boolean.TRUE, List.of("value-1"), Boolean.FALSE, Boolean.TRUE)); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + public void hGetDelReturnsNullWhenFieldDoesNotExist() { + + actual.add(connection.hSet("hash-hgetdel", "field-1", "value-1")); + actual.add(connection.hGetDel("hash-hgetdel", "missing-field")); + actual.add(connection.hExists("hash-hgetdel", "field-1")); + + verifyResults(Arrays.asList(Boolean.TRUE, Arrays.asList((Object) null), Boolean.TRUE)); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + public void hGetDelReturnsNullWhenKeyDoesNotExist() { + + actual.add(connection.hGetDel("missing-hash", "field-1")); + + verifyResults(Arrays.asList(Arrays.asList((Object) null))); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + public void hGetDelMultipleFieldsReturnsValuesAndDeletesFields() { + + actual.add(connection.hSet("hash-hgetdel", "field-1", "value-1")); + actual.add(connection.hSet("hash-hgetdel", "field-2", "value-2")); + actual.add(connection.hSet("hash-hgetdel", "field-3", "value-3")); + actual.add(connection.hGetDel("hash-hgetdel", "field-1", "field-2")); + actual.add(connection.hExists("hash-hgetdel", "field-1")); + actual.add(connection.hExists("hash-hgetdel", "field-2")); + actual.add(connection.hExists("hash-hgetdel", "field-3")); + + verifyResults(Arrays.asList(Boolean.TRUE, Boolean.TRUE, Boolean.TRUE, + Arrays.asList("value-1", "value-2"), + Boolean.FALSE, Boolean.FALSE, Boolean.TRUE)); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + public void hGetDelMultipleFieldsWithNonExistentFields() { + + actual.add(connection.hSet("hash-hgetdel", "field-1", "value-1")); + actual.add(connection.hGetDel("hash-hgetdel", "field-1", "missing-field")); + actual.add(connection.hExists("hash-hgetdel", "field-1")); + + verifyResults(Arrays.asList(Boolean.TRUE, + Arrays.asList("value-1", null), + Boolean.FALSE)); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + public void hGetDelDeletesKeyWhenAllFieldsRemoved() { + + actual.add(connection.hSet("hash-hgetdel", "field-1", "value-1")); + actual.add(connection.hSet("hash-hgetdel", "field-2", "value-2")); + actual.add(connection.hGetDel("hash-hgetdel", "field-1", "field-2")); + actual.add(connection.exists("hash-hgetdel")); + + verifyResults(Arrays.asList(Boolean.TRUE, Boolean.TRUE, + Arrays.asList("value-1", "value-2"), + Boolean.FALSE)); + } + @Test // DATAREDIS-694 void touchReturnsNrOfKeysTouched() { diff --git a/src/test/java/org/springframework/data/redis/connection/RedisConnectionUnitTests.java b/src/test/java/org/springframework/data/redis/connection/RedisConnectionUnitTests.java index 068e87444a..30dde87480 100644 --- a/src/test/java/org/springframework/data/redis/connection/RedisConnectionUnitTests.java +++ b/src/test/java/org/springframework/data/redis/connection/RedisConnectionUnitTests.java @@ -445,6 +445,10 @@ public List hMGet(byte[] key, byte[]... fields) { return delegate.hMGet(key, fields); } + public List hGetDel(byte[] key, byte[]... fields) { + return delegate.hGetDel(key, fields); + } + public Long zRem(byte[] key, byte[]... values) { return delegate.zRem(key, values); } diff --git a/src/test/java/org/springframework/data/redis/connection/jedis/JedisClusterConnectionTests.java b/src/test/java/org/springframework/data/redis/connection/jedis/JedisClusterConnectionTests.java index b4229d9522..c3b2ad3b01 100644 --- a/src/test/java/org/springframework/data/redis/connection/jedis/JedisClusterConnectionTests.java +++ b/src/test/java/org/springframework/data/redis/connection/jedis/JedisClusterConnectionTests.java @@ -16,7 +16,6 @@ package org.springframework.data.redis.connection.jedis; import static org.assertj.core.api.Assertions.*; -import static org.assertj.core.data.Offset.*; import static org.assertj.core.data.Offset.offset; import static org.springframework.data.redis.connection.BitFieldSubCommands.*; import static org.springframework.data.redis.connection.BitFieldSubCommands.BitFieldIncrBy.Overflow.*; @@ -1239,13 +1238,99 @@ public void hTtlReturnsMinusTwoWhenFieldOrKeyMissing() { assertThat(clusterConnection.hashCommands().hTtl(KEY_3_BYTES, KEY_2_BYTES)).contains(-2L); } + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + public void hGetDelReturnsValueAndDeletesField() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + nativeConnection.hset(KEY_1, KEY_3, VALUE_2); + + List result = clusterConnection.hashCommands().hGetDel(KEY_1_BYTES, KEY_2_BYTES); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isEqualTo(VALUE_1_BYTES); + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_2_BYTES)).isFalse(); + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_3_BYTES)).isTrue(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + public void hGetDelReturnsNullWhenFieldDoesNotExist() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + + List result = clusterConnection.hashCommands().hGetDel(KEY_1_BYTES, KEY_3_BYTES); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isNull(); + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_2_BYTES)).isTrue(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + public void hGetDelReturnsNullWhenKeyDoesNotExist() { + + List result = clusterConnection.hashCommands().hGetDel(KEY_1_BYTES, KEY_2_BYTES); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isNull(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + public void hGetDelMultipleFieldsReturnsValuesAndDeletesFields() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + nativeConnection.hset(KEY_1, KEY_3, VALUE_2); + nativeConnection.hset(KEY_1, "field3", "value3"); + + List result = clusterConnection.hashCommands().hGetDel(KEY_1_BYTES, KEY_2_BYTES, KEY_3_BYTES); + + assertThat(result).hasSize(2); + assertThat(result.get(0)).isEqualTo(VALUE_1_BYTES); + assertThat(result.get(1)).isEqualTo(VALUE_2_BYTES); + + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_2_BYTES)).isFalse(); + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_3_BYTES)).isFalse(); + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, "field3".getBytes())).isTrue(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + public void hGetDelMultipleFieldsWithNonExistentFields() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + + List result = clusterConnection.hashCommands().hGetDel(KEY_1_BYTES, KEY_2_BYTES, KEY_3_BYTES); + + assertThat(result).hasSize(2); + assertThat(result.get(0)).isEqualTo(VALUE_1_BYTES); + assertThat(result.get(1)).isNull(); + + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_2_BYTES)).isFalse(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + public void hGetDelDeletesKeyWhenAllFieldsRemoved() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + nativeConnection.hset(KEY_1, KEY_3, VALUE_2); + + List result = clusterConnection.hashCommands().hGetDel(KEY_1_BYTES, KEY_2_BYTES, KEY_3_BYTES); + + assertThat(result).hasSize(2); + assertThat(result.get(0)).isEqualTo(VALUE_1_BYTES); + assertThat(result.get(1)).isEqualTo(VALUE_2_BYTES); + + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_2_BYTES)).isFalse(); + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_3_BYTES)).isFalse(); + } + @Test // DATAREDIS-315 public void hValsShouldRetrieveValuesCorrectly() { nativeConnection.hset(KEY_1_BYTES, KEY_2_BYTES, VALUE_1_BYTES); nativeConnection.hset(KEY_1_BYTES, KEY_3_BYTES, VALUE_2_BYTES); - assertThat(clusterConnection.hVals(KEY_1_BYTES)).contains(VALUE_1_BYTES, VALUE_2_BYTES); + assertThat(clusterConnection.hashCommands().hVals(KEY_1_BYTES)).contains(VALUE_1_BYTES, VALUE_2_BYTES); } @Test // DATAREDIS-315 diff --git a/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceClusterConnectionTests.java b/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceClusterConnectionTests.java index 5590b18275..48e708b796 100644 --- a/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceClusterConnectionTests.java +++ b/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceClusterConnectionTests.java @@ -16,7 +16,6 @@ package org.springframework.data.redis.connection.lettuce; import static org.assertj.core.api.Assertions.*; -import static org.assertj.core.data.Offset.*; import static org.assertj.core.data.Offset.offset; import static org.springframework.data.redis.connection.BitFieldSubCommands.*; import static org.springframework.data.redis.connection.BitFieldSubCommands.BitFieldIncrBy.Overflow.*; @@ -1304,6 +1303,91 @@ public void hTtlReturnsMinusTwoWhenFieldOrKeyMissing() { assertThat(clusterConnection.hashCommands().hTtl(KEY_3_BYTES, KEY_2_BYTES)).contains(-2L); } + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + public void hGetDelReturnsValueAndDeletesField() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + nativeConnection.hset(KEY_1, KEY_3, VALUE_2); + + List result = clusterConnection.hashCommands().hGetDel(KEY_1_BYTES, KEY_2_BYTES); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isEqualTo(VALUE_1_BYTES); + assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_2_BYTES)).isFalse(); + assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_3_BYTES)).isTrue(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + public void hGetDelReturnsNullWhenFieldDoesNotExist() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + + List result = clusterConnection.hashCommands().hGetDel(KEY_1_BYTES, KEY_3_BYTES); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isNull(); + assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_2_BYTES)).isTrue(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + public void hGetDelReturnsNullWhenKeyDoesNotExist() { + + List result = clusterConnection.hashCommands().hGetDel(KEY_1_BYTES, KEY_2_BYTES); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isNull(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + public void hGetDelMultipleFieldsReturnsValuesAndDeletesFields() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + nativeConnection.hset(KEY_1, KEY_3, VALUE_2); + nativeConnection.hset(KEY_1, "field3", "value3"); + + List result = clusterConnection.hashCommands().hGetDel(KEY_1_BYTES, KEY_2_BYTES, KEY_3_BYTES); + + assertThat(result).hasSize(2); + assertThat(result.get(0)).isEqualTo(VALUE_1_BYTES); + assertThat(result.get(1)).isEqualTo(VALUE_2_BYTES); + + assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_2_BYTES)).isFalse(); + assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_3_BYTES)).isFalse(); + assertThat(clusterConnection.hExists(KEY_1_BYTES, "field3".getBytes())).isTrue(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + public void hGetDelMultipleFieldsWithNonExistentFields() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + + List result = clusterConnection.hashCommands().hGetDel(KEY_1_BYTES, KEY_2_BYTES, KEY_3_BYTES); + + assertThat(result).hasSize(2); + assertThat(result.get(0)).isEqualTo(VALUE_1_BYTES); + assertThat(result.get(1)).isNull(); + + assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_2_BYTES)).isFalse(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + public void hGetDelDeletesKeyWhenAllFieldsRemoved() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + nativeConnection.hset(KEY_1, KEY_3, VALUE_2); + + List result = clusterConnection.hashCommands().hGetDel(KEY_1_BYTES, KEY_2_BYTES, KEY_3_BYTES); + + assertThat(result).hasSize(2); + assertThat(result.get(0)).isEqualTo(VALUE_1_BYTES); + assertThat(result.get(1)).isEqualTo(VALUE_2_BYTES); + + assertThat(clusterConnection.exists(KEY_1_BYTES)).isFalse(); + } + @Test // DATAREDIS-315 public void hValsShouldRetrieveValuesCorrectly() { diff --git a/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommandsIntegrationTests.java b/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommandsIntegrationTests.java index 0c8d8f1506..1573a514d9 100644 --- a/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommandsIntegrationTests.java +++ b/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommandsIntegrationTests.java @@ -339,4 +339,85 @@ void hPersistShouldPersistFields() { assertThat(nativeCommands.httl(KEY_1, FIELD_1, FIELD_2)).allSatisfy(it -> assertThat(it).isEqualTo(-1L)); } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + void hGetDelShouldReturnValueAndDeleteField() { + + nativeCommands.hset(KEY_1, FIELD_1, VALUE_1); + nativeCommands.hset(KEY_1, FIELD_2, VALUE_2); + + connection.hashCommands().hGetDel(KEY_1_BBUFFER, Collections.singletonList(FIELD_1_BBUFFER)).as(StepVerifier::create) + .expectNext(Collections.singletonList(VALUE_1_BBUFFER)).verifyComplete(); + + assertThat(nativeCommands.hexists(KEY_1, FIELD_1)).isFalse(); + assertThat(nativeCommands.hexists(KEY_1, FIELD_2)).isTrue(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + void hGetDelShouldReturnNullForNonExistentField() { + + nativeCommands.hset(KEY_1, FIELD_1, VALUE_1); + + connection.hashCommands().hGetDel(KEY_1_BBUFFER, Collections.singletonList(FIELD_2_BBUFFER)).as(StepVerifier::create) + .expectNext(Collections.singletonList(null)).verifyComplete(); + + assertThat(nativeCommands.hexists(KEY_1, FIELD_1)).isTrue(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + void hGetDelShouldReturnNullForNonExistentKey() { + + connection.hashCommands().hGetDel(KEY_1_BBUFFER, Collections.singletonList(FIELD_1_BBUFFER)).as(StepVerifier::create) + .expectNext(Collections.singletonList(null)).verifyComplete(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + void hGetDelShouldHandleMultipleFields() { + + nativeCommands.hset(KEY_1, FIELD_1, VALUE_1); + nativeCommands.hset(KEY_1, FIELD_2, VALUE_2); + nativeCommands.hset(KEY_1, FIELD_3, VALUE_3); + + connection.hashCommands().hGetDel(KEY_1_BBUFFER, Arrays.asList(FIELD_1_BBUFFER, FIELD_2_BBUFFER)) + .as(StepVerifier::create) + .expectNext(Arrays.asList(VALUE_1_BBUFFER, VALUE_2_BBUFFER)) + .verifyComplete(); + + assertThat(nativeCommands.hexists(KEY_1, FIELD_1)).isFalse(); + assertThat(nativeCommands.hexists(KEY_1, FIELD_2)).isFalse(); + assertThat(nativeCommands.hexists(KEY_1, FIELD_3)).isTrue(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + void hGetDelShouldHandleMultipleFieldsWithNonExistent() { + + nativeCommands.hset(KEY_1, FIELD_1, VALUE_1); + + connection.hashCommands().hGetDel(KEY_1_BBUFFER, Arrays.asList(FIELD_1_BBUFFER, FIELD_2_BBUFFER)) + .as(StepVerifier::create) + .expectNext(Arrays.asList(VALUE_1_BBUFFER, null)) + .verifyComplete(); + + assertThat(nativeCommands.hexists(KEY_1, FIELD_1)).isFalse(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + void hGetDelShouldDeleteKeyWhenAllFieldsRemoved() { + + nativeCommands.hset(KEY_1, FIELD_1, VALUE_1); + nativeCommands.hset(KEY_1, FIELD_2, VALUE_2); + + connection.hashCommands().hGetDel(KEY_1_BBUFFER, Arrays.asList(FIELD_1_BBUFFER, FIELD_2_BBUFFER)) + .as(StepVerifier::create) + .expectNext(Arrays.asList(VALUE_1_BBUFFER, VALUE_2_BBUFFER)) + .verifyComplete(); + + assertThat(nativeCommands.hlen(KEY_1)).isEqualTo(0L); + } } diff --git a/src/test/java/org/springframework/data/redis/core/DefaultHashOperationsIntegrationTests.java b/src/test/java/org/springframework/data/redis/core/DefaultHashOperationsIntegrationTests.java index 174a49ec78..ddc4528e87 100644 --- a/src/test/java/org/springframework/data/redis/core/DefaultHashOperationsIntegrationTests.java +++ b/src/test/java/org/springframework/data/redis/core/DefaultHashOperationsIntegrationTests.java @@ -405,4 +405,64 @@ void testPersistAndGetExpireMillis() { assertThat(expirations.expirationOf(key2).isPersistent()).isTrue(); }); } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + void testGetAndDelete() { + + K key = keyFactory.instance(); + HK key1 = hashKeyFactory.instance(); + HV val1 = hashValueFactory.instance(); + HK key2 = hashKeyFactory.instance(); + HV val2 = hashValueFactory.instance(); + HK key3 = hashKeyFactory.instance(); + HV val3 = hashValueFactory.instance(); + + // Set up test data + hashOps.put(key, key1, val1); + hashOps.put(key, key2, val2); + hashOps.put(key, key3, val3); + + // Test single field get and delete + List result = hashOps.getAndDelete(key, List.of(key1)); + assertThat(result).hasSize(1).containsExactly(val1); + assertThat(hashOps.hasKey(key, key1)).isFalse(); // Field should be deleted + assertThat(hashOps.hasKey(key, key2)).isTrue(); // Other fields should remain + + // Test multiple fields get and delete + List multiResult = hashOps.getAndDelete(key, List.of(key2, key3)); + assertThat(multiResult).hasSize(2).containsExactly(val2, val3); + assertThat(hashOps.hasKey(key, key2)).isFalse(); // Both fields should be deleted + assertThat(hashOps.hasKey(key, key3)).isFalse(); + assertThat(hashOps.size(key)).isEqualTo(0L); // Hash should be empty + + // Test get and delete on non-existent field + HK nonExistentKey = hashKeyFactory.instance(); + List emptyResult = hashOps.getAndDelete(key, List.of(nonExistentKey)); + assertThat(emptyResult).hasSize(1); + assertThat(emptyResult.get(0)).isNull(); + + // Test get and delete on non-existent hash + K nonExistentHash = keyFactory.instance(); + List nonExistentHashResult = hashOps.getAndDelete(nonExistentHash, List.of(key1)); + assertThat(nonExistentHashResult).hasSize(1); + assertThat(nonExistentHashResult.get(0)).isNull(); + + // Test that key is deleted when all fields are removed + K keyForDeletion = keyFactory.instance(); + HK field1 = hashKeyFactory.instance(); + HK field2 = hashKeyFactory.instance(); + HV value1 = hashValueFactory.instance(); + HV value2 = hashValueFactory.instance(); + + // Set up hash with two fields + hashOps.put(keyForDeletion, field1, value1); + hashOps.put(keyForDeletion, field2, value2); + assertThat(redisTemplate.hasKey(keyForDeletion)).isTrue(); // Key should exist + + // Delete all fields at once - key should be deleted + List allFieldsResult = hashOps.getAndDelete(keyForDeletion, List.of(field1, field2)); + assertThat(allFieldsResult).hasSize(2).containsExactly(value1, value2); + assertThat(redisTemplate.hasKey(keyForDeletion)).isFalse(); // Key should be deleted when last field is removed + } } diff --git a/src/test/java/org/springframework/data/redis/core/DefaultReactiveHashOperationsIntegrationTests.java b/src/test/java/org/springframework/data/redis/core/DefaultReactiveHashOperationsIntegrationTests.java index e40dd63ec7..717d4c39fe 100644 --- a/src/test/java/org/springframework/data/redis/core/DefaultReactiveHashOperationsIntegrationTests.java +++ b/src/test/java/org/springframework/data/redis/core/DefaultReactiveHashOperationsIntegrationTests.java @@ -685,4 +685,150 @@ private void putAll(K key, HK hashkey1, HV hashvalue1, HK hashkey2, HV hashvalue .expectNext(true) // .verifyComplete(); } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + void getAndDeleteSingleKey() { + + assumeThat(hashKeyFactory instanceof StringObjectFactory && hashValueFactory instanceof StringObjectFactory) + .isTrue(); + + K key = keyFactory.instance(); + HK hashkey1 = hashKeyFactory.instance(); + HV hashvalue1 = hashValueFactory.instance(); + HK hashkey2 = hashKeyFactory.instance(); + HV hashvalue2 = hashValueFactory.instance(); + + putAll(key, hashkey1, hashvalue1, hashkey2, hashvalue2); + + hashOperations.getAndDelete(key, Arrays.asList(hashkey1)).as(StepVerifier::create) + .consumeNextWith(actual -> { + assertThat(actual).hasSize(1).containsExactly(hashvalue1); + }) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey1).as(StepVerifier::create) + .expectNext(false) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey2).as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + void getAndDeletePartialKeys() { + + assumeThat(hashKeyFactory instanceof StringObjectFactory && hashValueFactory instanceof StringObjectFactory) + .isTrue(); + + K key = keyFactory.instance(); + HK hashkey1 = hashKeyFactory.instance(); + HV hashvalue1 = hashValueFactory.instance(); + HK hashkey2 = hashKeyFactory.instance(); + HV hashvalue2 = hashValueFactory.instance(); + + putAll(key, hashkey1, hashvalue1, hashkey2, hashvalue2); + + hashOperations.getAndDelete(key, Arrays.asList(hashkey1)).as(StepVerifier::create) + .consumeNextWith(actual -> { + assertThat(actual).hasSize(1).containsExactly(hashvalue1); + }) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey1).as(StepVerifier::create) + .expectNext(false) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey2).as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + hashOperations.get(key, hashkey2).as(StepVerifier::create) + .expectNext(hashvalue2) + .verifyComplete(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + void getAndDeleteNonExistentKeys() { + + assumeThat(hashKeyFactory instanceof StringObjectFactory && hashValueFactory instanceof StringObjectFactory) + .isTrue(); + + K key = keyFactory.instance(); + HK hashkey1 = hashKeyFactory.instance(); + HV hashvalue1 = hashValueFactory.instance(); + HK hashkey2 = hashKeyFactory.instance(); + HV hashvalue2 = hashValueFactory.instance(); + HK nonExistentKey = hashKeyFactory.instance(); + + putAll(key, hashkey1, hashvalue1, hashkey2, hashvalue2); + + hashOperations.getAndDelete(key, Arrays.asList(nonExistentKey)).as(StepVerifier::create) + .consumeNextWith(actual -> { + assertThat(actual).hasSize(1).containsExactly((HV) null); + }) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey1).as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey2).as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + void getAndDeleteKeyDeletionBehavior() { + + assumeThat(hashKeyFactory instanceof StringObjectFactory && hashValueFactory instanceof StringObjectFactory) + .isTrue(); + + K key = keyFactory.instance(); + HK hashkey1 = hashKeyFactory.instance(); + HV hashvalue1 = hashValueFactory.instance(); + HK hashkey2 = hashKeyFactory.instance(); + HV hashvalue2 = hashValueFactory.instance(); + + putAll(key, hashkey1, hashvalue1, hashkey2, hashvalue2); + + redisTemplate.hasKey(key).as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + hashOperations.getAndDelete(key, Arrays.asList(hashkey1, hashkey2)).as(StepVerifier::create) + .consumeNextWith(actual -> { + assertThat(actual).hasSize(2).containsSequence(hashvalue1, hashvalue2); + }) + .verifyComplete(); + + hashOperations.size(key).as(StepVerifier::create) + .expectNext(0L) + .verifyComplete(); + + redisTemplate.hasKey(key).as(StepVerifier::create) + .expectNext(false) + .verifyComplete(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETDEL") + void getAndDeleteFromNonExistentHash() { + + assumeThat(hashKeyFactory instanceof StringObjectFactory && hashValueFactory instanceof StringObjectFactory) + .isTrue(); + + K nonExistentKey = keyFactory.instance(); + HK hashkey = hashKeyFactory.instance(); + + hashOperations.getAndDelete(nonExistentKey, Arrays.asList(hashkey)).as(StepVerifier::create) + .consumeNextWith(actual -> { + assertThat(actual).hasSize(1).containsExactly((HV) null); + }) + .verifyComplete(); + } } From a69b2f2b9b1a251df1f75fa8ca3231a71b858c60 Mon Sep 17 00:00:00 2001 From: "viktoriya.kutsarova" Date: Thu, 25 Sep 2025 15:36:10 +0300 Subject: [PATCH 2/3] Introduce HGETEX command to the Spring Data Redis framework Signed-off-by: viktoriya.kutsarova --- .../DefaultStringRedisConnection.java | 10 ++ .../connection/DefaultedRedisConnection.java | 7 ++ .../connection/ReactiveHashCommands.java | 72 +++++++++++ .../redis/connection/RedisHashCommands.java | 14 +++ .../connection/StringRedisConnection.java | 12 ++ .../jedis/JedisClusterHashCommands.java | 13 ++ .../connection/jedis/JedisConverters.java | 42 +++++-- .../connection/jedis/JedisHashCommands.java | 10 ++ .../connection/lettuce/LettuceConnection.java | 3 +- .../connection/lettuce/LettuceConverters.java | 31 +++++ .../lettuce/LettuceHashCommands.java | 12 ++ .../lettuce/LettuceReactiveHashCommands.java | 14 +++ .../data/redis/core/BoundHashOperations.java | 12 ++ .../redis/core/DefaultHashOperations.java | 20 ++++ .../core/DefaultReactiveHashOperations.java | 13 ++ .../data/redis/core/HashOperations.java | 14 +++ .../redis/core/ReactiveHashOperations.java | 12 ++ .../AbstractConnectionIntegrationTests.java | 63 ++++++++++ .../connection/RedisConnectionUnitTests.java | 4 + .../jedis/JedisClusterConnectionTests.java | 67 +++++++++++ .../LettuceClusterConnectionTests.java | 67 +++++++++++ ...eReactiveHashCommandsIntegrationTests.java | 70 +++++++++++ ...DefaultHashOperationsIntegrationTests.java | 112 +++++++++++++++++- ...eactiveHashOperationsIntegrationTests.java | 78 ++++++++++++ 24 files changed, 760 insertions(+), 12 deletions(-) diff --git a/src/main/java/org/springframework/data/redis/connection/DefaultStringRedisConnection.java b/src/main/java/org/springframework/data/redis/connection/DefaultStringRedisConnection.java index b003a713a3..267def7896 100644 --- a/src/main/java/org/springframework/data/redis/connection/DefaultStringRedisConnection.java +++ b/src/main/java/org/springframework/data/redis/connection/DefaultStringRedisConnection.java @@ -1615,6 +1615,11 @@ public List hGetDel(String key, String... fields) { return convertAndReturn(delegate.hGetDel(serialize(key), serializeMulti(fields)), byteListToStringList); } + @Override + public List hGetEx(String key, Expiration expiration, String... fields) { + return convertAndReturn(delegate.hGetEx(serialize(key), expiration, serializeMulti(fields)), byteListToStringList); + } + @Override public Long incr(String key) { return incr(serialize(key)); @@ -2593,6 +2598,11 @@ public List hGetDel(@NotNull byte[] key, @NotNull byte[]... fields) { return convertAndReturn(delegate.hGetDel(key, fields), Converters.identityConverter()); } + @Override + public List hGetEx(@NotNull byte[] key, Expiration expiration, @NotNull byte[]... fields) { + return convertAndReturn(delegate.hGetEx(key, expiration, fields), Converters.identityConverter()); + } + public @Nullable List applyExpiration(String key, org.springframework.data.redis.core.types.Expiration expiration, ExpirationOptions options, String... fields) { diff --git a/src/main/java/org/springframework/data/redis/connection/DefaultedRedisConnection.java b/src/main/java/org/springframework/data/redis/connection/DefaultedRedisConnection.java index 79804ae1f1..4177aae23f 100644 --- a/src/main/java/org/springframework/data/redis/connection/DefaultedRedisConnection.java +++ b/src/main/java/org/springframework/data/redis/connection/DefaultedRedisConnection.java @@ -1601,6 +1601,13 @@ default List hGetDel(byte[] key, byte[]... fields) { return hashCommands().hGetDel(key, fields); } + /** @deprecated in favor of {@link RedisConnection#hashCommands()}}. */ + @Override + @Deprecated + default List hGetEx(byte[] key, Expiration expiration, byte[]... fields) { + return hashCommands().hGetEx(key, expiration, fields); + } + /** @deprecated in favor of {@link RedisConnection#hashCommands()}}. */ @Override @Deprecated diff --git a/src/main/java/org/springframework/data/redis/connection/ReactiveHashCommands.java b/src/main/java/org/springframework/data/redis/connection/ReactiveHashCommands.java index cc865ff70e..9f7882a7c2 100644 --- a/src/main/java/org/springframework/data/redis/connection/ReactiveHashCommands.java +++ b/src/main/java/org/springframework/data/redis/connection/ReactiveHashCommands.java @@ -1338,4 +1338,76 @@ default Mono> hGetDel(ByteBuffer key, Collection fi * @see Redis Documentation: HGETDEL */ Flux> hGetDel(Publisher commands); + + class HGetExCommand extends HashFieldsCommand { + + private final Expiration expiration; + + private HGetExCommand(@Nullable ByteBuffer key, List fields, Expiration expiration) { + + super(key, fields); + + this.expiration = expiration; + } + + /** + * Creates a new {@link HGetExCommand}. + * + * @param fields the {@code fields} names to apply expiration to + * @param expiration the {@link Expiration} to apply to the given {@literal fields}. + * @return new instance of {@link HGetExCommand}. + */ + public static HGetExCommand expire(List fields, Expiration expiration) { + return new HGetExCommand(null, fields, expiration); + } + + /** + * @param key the {@literal key} from which to expire the {@literal fields} from. + * @return new instance of {@link HashExpireCommand}. + */ + public HGetExCommand from(ByteBuffer key) { + return new HGetExCommand(key, getFields(), expiration); + } + + /** + * Creates a new {@link HGetExCommand}. + * + * @param fields the {@code fields} names to apply expiration to + * @return new instance of {@link HGetExCommand}. + */ + public HGetExCommand fields(Collection fields) { + return new HGetExCommand(getKey(), new ArrayList<>(fields), expiration); + } + + public Expiration getExpiration() { + return expiration; + } + } + + /** + * Get the value of one or more {@literal fields} from hash at {@literal key} and optionally set expiration time or + * time-to-live (TTL) for given {@literal fields}. + * + * @param key must not be {@literal null}. + * @param fields must not be {@literal null}. + * @return never {@literal null}. + * @see Redis Documentation: HGETEX + */ + default Mono> hGetEx(ByteBuffer key, Expiration expiration, List fields) { + + Assert.notNull(key, "Key must not be null"); + Assert.notNull(fields, "Fields must not be null"); + + return hGetEx(Mono.just(HGetExCommand.expire(fields, expiration).from(key))).next().map(MultiValueResponse::getOutput); + } + + /** + * Get the value of one or more {@literal fields} from hash at {@literal key} and optionally set expiration time or + * time-to-live (TTL) for given {@literal fields}. + * + * @param commands must not be {@literal null}. + * @return never {@literal null}. + * @see Redis Documentation: HGETEX + */ + Flux> hGetEx(Publisher commands); } diff --git a/src/main/java/org/springframework/data/redis/connection/RedisHashCommands.java b/src/main/java/org/springframework/data/redis/connection/RedisHashCommands.java index ead6cb88f9..25b0bba921 100644 --- a/src/main/java/org/springframework/data/redis/connection/RedisHashCommands.java +++ b/src/main/java/org/springframework/data/redis/connection/RedisHashCommands.java @@ -25,6 +25,7 @@ import org.jspecify.annotations.NullUnmarked; import org.springframework.data.redis.core.Cursor; import org.springframework.data.redis.core.ScanOptions; +import org.springframework.data.redis.core.types.Expiration; import org.springframework.util.ObjectUtils; /** @@ -554,4 +555,17 @@ default List hExpireAt(byte @NonNull [] key, long unixTime, byte @NonNull * @see Redis Documentation: HGETDEL */ List hGetDel(byte @NonNull [] key, byte @NonNull [] @NonNull... fields); + + /** + * Get the value of one or more {@code fields} from hash at {@code key} and optionally set expiration time or + * time-to-live (TTL) for given {@code fields}. + * + * @param key must not be {@literal null}. + * @param fields must not be {@literal null}. + * @return empty {@link List} if key does not exist. {@literal null} when used in pipeline / transaction. + * @see Redis Documentation: HGETEX + */ + List hGetEx(byte @NonNull [] key, Expiration expiration, + byte @NonNull [] @NonNull... fields); + } diff --git a/src/main/java/org/springframework/data/redis/connection/StringRedisConnection.java b/src/main/java/org/springframework/data/redis/connection/StringRedisConnection.java index 52877f99f3..e151f6b701 100644 --- a/src/main/java/org/springframework/data/redis/connection/StringRedisConnection.java +++ b/src/main/java/org/springframework/data/redis/connection/StringRedisConnection.java @@ -2564,6 +2564,18 @@ List hpExpireAt(@NonNull String key, long unixTimeInMillis, ExpirationOpti */ List hGetDel(@NonNull String key, @NonNull String @NonNull... fields); + /** + * Get the value of one or more {@code fields} from hash at {@code key} and optionally set expiration time or + * time-to-live (TTL) for given {@code fields}. + * + * @param key must not be {@literal null}. + * @param fields must not be {@literal null}. + * @return empty {@link List} if key does not exist. {@literal null} when used in pipeline / transaction. + * @see Redis Documentation: HGETEX + * @see RedisHashCommands#hGetEx(byte[], Expiration, byte[]...) + */ + List hGetEx(@NonNull String key, Expiration expiration, @NonNull String @NonNull... fields); + // ------------------------------------------------------------------------- // Methods dealing with HyperLogLog // ------------------------------------------------------------------------- diff --git a/src/main/java/org/springframework/data/redis/connection/jedis/JedisClusterHashCommands.java b/src/main/java/org/springframework/data/redis/connection/jedis/JedisClusterHashCommands.java index 65d25d90fe..bd362dc12f 100644 --- a/src/main/java/org/springframework/data/redis/connection/jedis/JedisClusterHashCommands.java +++ b/src/main/java/org/springframework/data/redis/connection/jedis/JedisClusterHashCommands.java @@ -15,6 +15,7 @@ */ package org.springframework.data.redis.connection.jedis; +import org.springframework.data.redis.core.types.Expiration; import redis.clients.jedis.args.ExpiryOption; import redis.clients.jedis.params.ScanParams; import redis.clients.jedis.resps.ScanResult; @@ -425,7 +426,19 @@ public List hGetDel(byte[] key, byte[]... fields) { } catch (Exception ex) { throw convertJedisAccessException(ex); } + } + + @Override + public List hGetEx(byte[] key, Expiration expiration, byte[]... fields) { + + Assert.notNull(key, "Key must not be null"); + Assert.notNull(fields, "Fields must not be null"); + try { + return connection.getCluster().hgetex(key, JedisConverters.toHGetExParams(expiration), fields); + } catch (Exception ex) { + throw convertJedisAccessException(ex); + } } @Nullable diff --git a/src/main/java/org/springframework/data/redis/connection/jedis/JedisConverters.java b/src/main/java/org/springframework/data/redis/connection/jedis/JedisConverters.java index d9aad4571a..b38fa28fc2 100644 --- a/src/main/java/org/springframework/data/redis/connection/jedis/JedisConverters.java +++ b/src/main/java/org/springframework/data/redis/connection/jedis/JedisConverters.java @@ -22,13 +22,7 @@ import redis.clients.jedis.args.FlushMode; import redis.clients.jedis.args.GeoUnit; import redis.clients.jedis.args.ListPosition; -import redis.clients.jedis.params.GeoRadiusParam; -import redis.clients.jedis.params.GeoSearchParam; -import redis.clients.jedis.params.GetExParams; -import redis.clients.jedis.params.ScanParams; -import redis.clients.jedis.params.SetParams; -import redis.clients.jedis.params.SortingParams; -import redis.clients.jedis.params.ZAddParams; +import redis.clients.jedis.params.*; import redis.clients.jedis.resps.GeoRadiusResponse; import redis.clients.jedis.util.SafeEncoder; @@ -398,6 +392,40 @@ static GetExParams toGetExParams(Expiration expiration, GetExParams params) { : params.ex(expiration.getConverted(TimeUnit.SECONDS)); } + /** + * Converts a given {@link Expiration} to the according {@code HGETEX} command argument depending on + * {@link Expiration#isUnixTimestamp()}. + *
+ *
{@link TimeUnit#MILLISECONDS}
+ *
{@code PX|PXAT}
+ *
{@link TimeUnit#SECONDS}
+ *
{@code EX|EXAT}
+ *
+ * + * @param expiration must not be {@literal null}. + * @since 4.0 + */ + static HGetExParams toHGetExParams(Expiration expiration) { + return toHGetExParams(expiration, new HGetExParams()); + } + + static HGetExParams toHGetExParams(Expiration expiration, HGetExParams params) { + + if (expiration.isPersistent()) { + return params.persist(); + } + + if (expiration.getTimeUnit() == TimeUnit.MILLISECONDS) { + if (expiration.isUnixTimestamp()) { + return params.pxAt(expiration.getExpirationTime()); + } + return params.px(expiration.getExpirationTime()); + } + + return expiration.isUnixTimestamp() ? params.exAt(expiration.getConverted(TimeUnit.SECONDS)) + : params.ex(expiration.getConverted(TimeUnit.SECONDS)); + } + /** * Converts a given {@link SetOption} to the according {@code SET} command argument.
*
diff --git a/src/main/java/org/springframework/data/redis/connection/jedis/JedisHashCommands.java b/src/main/java/org/springframework/data/redis/connection/jedis/JedisHashCommands.java index 6e392d1586..14bfe5d271 100644 --- a/src/main/java/org/springframework/data/redis/connection/jedis/JedisHashCommands.java +++ b/src/main/java/org/springframework/data/redis/connection/jedis/JedisHashCommands.java @@ -15,6 +15,7 @@ */ package org.springframework.data.redis.connection.jedis; +import org.springframework.data.redis.core.types.Expiration; import redis.clients.jedis.Jedis; import redis.clients.jedis.args.ExpiryOption; import redis.clients.jedis.commands.PipelineBinaryCommands; @@ -341,6 +342,15 @@ public List hGetDel(byte @NonNull [] key, byte @NonNull [] @NonNull... f return connection.invoke().just(Jedis::hgetdel, PipelineBinaryCommands::hgetdel, key, fields); } + @Override + public List hGetEx(byte @NonNull [] key, Expiration expiration, byte @NonNull [] @NonNull... fields) { + + Assert.notNull(key, "Key must not be null"); + Assert.notNull(fields, "Fields must not be null"); + + return connection.invoke().just(Jedis::hgetex, PipelineBinaryCommands::hgetex, key, JedisConverters.toHGetExParams(expiration), fields); + } + @Nullable @Override public Long hStrLen(byte[] key, byte[] field) { diff --git a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConnection.java b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConnection.java index e1d8600bdb..b12a9eb3de 100644 --- a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConnection.java +++ b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConnection.java @@ -1244,8 +1244,9 @@ static class TypeHints { COMMAND_OUTPUT_TYPE_MAPPING.put(ZREVRANGE, ValueListOutput.class); COMMAND_OUTPUT_TYPE_MAPPING.put(ZREVRANGEBYSCORE, ValueListOutput.class); COMMAND_OUTPUT_TYPE_MAPPING.put(HGETDEL, ValueListOutput.class); + COMMAND_OUTPUT_TYPE_MAPPING.put(HGETEX, ValueListOutput.class); - // BOOLEAN + // BOOLEAN COMMAND_OUTPUT_TYPE_MAPPING.put(EXISTS, BooleanOutput.class); COMMAND_OUTPUT_TYPE_MAPPING.put(EXPIRE, BooleanOutput.class); COMMAND_OUTPUT_TYPE_MAPPING.put(EXPIREAT, BooleanOutput.class); diff --git a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConverters.java b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConverters.java index 4ade04c7a6..34dadc7574 100644 --- a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConverters.java +++ b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConverters.java @@ -23,6 +23,8 @@ import io.lettuce.core.cluster.models.partitions.RedisClusterNode.NodeFlag; import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.time.Instant; import java.util.*; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; @@ -620,6 +622,35 @@ static GetExArgs toGetExArgs(@Nullable Expiration expiration) { : args.ex(expiration.getConverted(TimeUnit.SECONDS)); } + /** + * Convert {@link Expiration} to {@link HGetExArgs}. + * + * @param expiration can be {@literal null}. + * @since 4.0 + */ + static HGetExArgs toHGetExArgs(@Nullable Expiration expiration) { + + HGetExArgs args = new HGetExArgs(); + + if (expiration == null) { + return args; + } + + if (expiration.isPersistent()) { + return args.persist(); + } + + if (expiration.getTimeUnit() == TimeUnit.MILLISECONDS) { + if (expiration.isUnixTimestamp()) { + return args.pxAt(Instant.ofEpochSecond(expiration.getExpirationTime())); + } + return args.px(Duration.ofMillis(expiration.getExpirationTime())); + } + + return expiration.isUnixTimestamp() ? args.exAt(Instant.ofEpochSecond(expiration.getConverted(TimeUnit.SECONDS))) + : args.ex(Duration.ofSeconds(expiration.getConverted(TimeUnit.SECONDS))); + } + @SuppressWarnings("NullAway") static Converter, Long> toTimeConverter(TimeUnit timeUnit) { diff --git a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceHashCommands.java b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceHashCommands.java index 0af9259d9c..ec9bbe88a8 100644 --- a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceHashCommands.java +++ b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceHashCommands.java @@ -40,6 +40,7 @@ import org.springframework.data.redis.core.KeyBoundCursor; import org.springframework.data.redis.core.ScanIteration; import org.springframework.data.redis.core.ScanOptions; +import org.springframework.data.redis.core.types.Expiration; import org.springframework.util.Assert; import org.springframework.util.ObjectUtils; @@ -275,6 +276,17 @@ public List hGetDel(byte @NonNull [] key, byte @NonNull []... fields) { .toList(source -> source.getValueOrElse(null)); } + @Override + public List hGetEx(byte @NonNull [] key, Expiration expiration, byte @NonNull []... fields) { + + Assert.notNull(key, "Key must not be null"); + Assert.notNull(fields, "Fields must not be null"); + + return connection.invoke().fromMany(RedisHashAsyncCommands::hgetex, key, + LettuceConverters.toHGetExArgs(expiration), fields) + .toList(source -> source.getValueOrElse(null)); + } + /** * @param key * @param cursorId diff --git a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommands.java b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommands.java index 7656c8ad69..9a1da724e3 100644 --- a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommands.java +++ b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommands.java @@ -370,6 +370,20 @@ public Flux> hGetDel(Publisher> hGetEx(Publisher commands) { + + return connection.execute(cmd -> Flux.from(commands).concatMap(command -> { + + Assert.notNull(command.getKey(), "Key must not be null"); + Assert.notNull(command.getFields(), "Fields must not be null"); + + return cmd.hgetex(command.getKey(), LettuceConverters.toHGetExArgs(command.getExpiration()), command.getFields().toArray(ByteBuffer[]::new)).collectList() + .map(value -> new MultiValueResponse<>(command, value.stream().map(v -> v.getValueOrElse(null)) + .collect(Collectors.toList()))); + })); + } + private static Map.Entry toEntry(KeyValue kv) { return new Entry() { diff --git a/src/main/java/org/springframework/data/redis/core/BoundHashOperations.java b/src/main/java/org/springframework/data/redis/core/BoundHashOperations.java index 0b771a67a4..4fe9ec7a0d 100644 --- a/src/main/java/org/springframework/data/redis/core/BoundHashOperations.java +++ b/src/main/java/org/springframework/data/redis/core/BoundHashOperations.java @@ -23,6 +23,7 @@ import org.jspecify.annotations.NonNull; import org.jspecify.annotations.NullUnmarked; +import org.springframework.data.redis.core.types.Expiration; /** * Hash operations bound to a certain key. @@ -255,4 +256,15 @@ default BoundHashFieldExpirationOperations hashExpiration(@NonNull Collectio * @since 3.1 */ List getAndDelete(@NonNull Collection<@NonNull HK> hashFields); + + /** + * Get and optionally expire the value for given {@code hashFields} from the hash at the bound key. Values are in the order of the + * requested hash fields. Absent field values are represented using {@literal null} in the resulting {@link List}. + * + * @param expiration is optional. + * @param hashFields must not be {@literal null}. + * @return never {@literal null}. + * @since 4.0 + */ + List getAndExpire(Expiration expiration, @NonNull Collection<@NonNull HK> hashFields); } diff --git a/src/main/java/org/springframework/data/redis/core/DefaultHashOperations.java b/src/main/java/org/springframework/data/redis/core/DefaultHashOperations.java index 7d9e104e3c..f74c673583 100644 --- a/src/main/java/org/springframework/data/redis/core/DefaultHashOperations.java +++ b/src/main/java/org/springframework/data/redis/core/DefaultHashOperations.java @@ -214,6 +214,26 @@ public List getAndDelete(@NonNull K key, @NonNull Collection<@NonNull HK> fi return deserializeHashValues(rawValues); } + @Override + public List getAndExpire(@NonNull K key, @NonNull Expiration expiration, + @NonNull Collection<@NonNull HK> fields) { + + if (fields.isEmpty()) { + return Collections.emptyList(); + } + + byte[] rawKey = rawKey(key); + byte[][] rawHashKeys = new byte[fields.size()][]; + int counter = 0; + for (@NonNull + HK hashKey : fields) { + rawHashKeys[counter++] = rawHashKey(hashKey); + } + List rawValues = execute(connection -> connection.hashCommands().hGetEx(rawKey, expiration, rawHashKeys)); + + return deserializeHashValues(rawValues); + } + @Override public void put(@NonNull K key, @NonNull HK hashKey, HV value) { diff --git a/src/main/java/org/springframework/data/redis/core/DefaultReactiveHashOperations.java b/src/main/java/org/springframework/data/redis/core/DefaultReactiveHashOperations.java index d05f6ea336..3f66d75bb1 100644 --- a/src/main/java/org/springframework/data/redis/core/DefaultReactiveHashOperations.java +++ b/src/main/java/org/springframework/data/redis/core/DefaultReactiveHashOperations.java @@ -123,6 +123,19 @@ public Mono> getAndDelete(H key, Collection hashKeys) { .flatMap(hks -> hashCommands.hGetDel(rawKey(key), hks)).map(this::deserializeHashValues)); } + @Override + public Mono> getAndExpire(H key, Expiration expiration, Collection hashKeys) { + + Assert.notNull(key, "Key must not be null"); + Assert.notNull(hashKeys, "Hash keys must not be null"); + Assert.notEmpty(hashKeys, "Hash keys must not be empty"); + + return createMono(hashCommands -> Flux.fromIterable(hashKeys) // + .map(this::rawHashKey) // + .collectList() // + .flatMap(hks -> hashCommands.hGetEx(rawKey(key), expiration, hks)).map(this::deserializeHashValues)); + } + @Override public Mono increment(H key, HK hashKey, long delta) { diff --git a/src/main/java/org/springframework/data/redis/core/HashOperations.java b/src/main/java/org/springframework/data/redis/core/HashOperations.java index 2299796e64..3763a2553e 100644 --- a/src/main/java/org/springframework/data/redis/core/HashOperations.java +++ b/src/main/java/org/springframework/data/redis/core/HashOperations.java @@ -91,6 +91,18 @@ public interface HashOperations { */ List getAndDelete(@NonNull H key, @NonNull Collection<@NonNull HK> hashKeys); + /** + * Get and optionally expire the value for given {@code hashKeys} from hash at {@code key}. Values are in the order of + * the requested keys. Absent field values are represented using {@literal null} in the resulting {@link List}. + * + * @param key must not be {@literal null}. + * @param expiration is optional. + * @param hashKeys must not be {@literal null}. + * @return {@literal null} when used in pipeline / transaction. + * @since 4.0 + */ + List getAndExpire(@NonNull H key, Expiration expiration, @NonNull Collection<@NonNull HK> hashKeys); + /** * Increment {@code value} of a hash {@code hashKey} by the given {@code delta}. * @@ -363,6 +375,8 @@ default BoundHashFieldExpirationOperations expiration(@NonNull H key, return new DefaultBoundHashFieldExpirationOperations<>(this, key, () -> hashFields); } + + /** * @return the underlying {@link RedisOperations} used to execute commands. */ diff --git a/src/main/java/org/springframework/data/redis/core/ReactiveHashOperations.java b/src/main/java/org/springframework/data/redis/core/ReactiveHashOperations.java index f156d7947a..a5de4b187d 100644 --- a/src/main/java/org/springframework/data/redis/core/ReactiveHashOperations.java +++ b/src/main/java/org/springframework/data/redis/core/ReactiveHashOperations.java @@ -95,6 +95,18 @@ public interface ReactiveHashOperations { */ Mono> getAndDelete(H key, Collection hashKeys); + /** + * Get and optionally expire the value for given {@code hashKeys} from hash at {@code key}. Values are in the order of the + * requested keys. Absent field values are represented using {@literal null} in the resulting {@link List}. + * + * @param key must not be {@literal null}. + * @param expiration is optional. + * @param hashKeys must not be {@literal null}. + * @return never {@literal null}. + * @since 4.0 + */ + Mono> getAndExpire(H key, Expiration expiration, Collection hashKeys); + /** * Increment {@code value} of a hash {@code hashKey} by the given {@code delta}. * diff --git a/src/test/java/org/springframework/data/redis/connection/AbstractConnectionIntegrationTests.java b/src/test/java/org/springframework/data/redis/connection/AbstractConnectionIntegrationTests.java index 57c8156107..472cfbed8c 100644 --- a/src/test/java/org/springframework/data/redis/connection/AbstractConnectionIntegrationTests.java +++ b/src/test/java/org/springframework/data/redis/connection/AbstractConnectionIntegrationTests.java @@ -3802,6 +3802,69 @@ public void hGetDelDeletesKeyWhenAllFieldsRemoved() { Boolean.FALSE)); } + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + public void hGetExReturnsValueAndSetsExpiration() { + + actual.add(connection.hSet("hash-hgetex", "field-1", "value-1")); + actual.add(connection.hSet("hash-hgetex", "field-2", "value-2")); + actual.add(connection.hGetEx("hash-hgetex", Expiration.seconds(60), "field-1")); + actual.add(connection.hExists("hash-hgetex", "field-1")); + actual.add(connection.hExists("hash-hgetex", "field-2")); + + verifyResults(Arrays.asList(Boolean.TRUE, Boolean.TRUE, List.of("value-1"), Boolean.TRUE, Boolean.TRUE)); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + public void hGetExReturnsNullWhenFieldDoesNotExist() { + + actual.add(connection.hSet("hash-hgetex", "field-1", "value-1")); + actual.add(connection.hGetEx("hash-hgetex", Expiration.seconds(60), "missing-field")); + actual.add(connection.hExists("hash-hgetex", "field-1")); + + verifyResults(Arrays.asList(Boolean.TRUE, Arrays.asList((Object) null), Boolean.TRUE)); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + public void hGetExReturnsNullWhenKeyDoesNotExist() { + + actual.add(connection.hGetEx("missing-hash", Expiration.seconds(60), "field-1")); + + verifyResults(Arrays.asList(Arrays.asList((Object) null))); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + public void hGetExMultipleFieldsReturnsValuesAndSetsExpiration() { + + actual.add(connection.hSet("hash-hgetex", "field-1", "value-1")); + actual.add(connection.hSet("hash-hgetex", "field-2", "value-2")); + actual.add(connection.hSet("hash-hgetex", "field-3", "value-3")); + actual.add(connection.hGetEx("hash-hgetex", Expiration.seconds(120), "field-1", "field-2")); + actual.add(connection.hExists("hash-hgetex", "field-1")); + actual.add(connection.hExists("hash-hgetex", "field-2")); + actual.add(connection.hExists("hash-hgetex", "field-3")); + + verifyResults(Arrays.asList(Boolean.TRUE, Boolean.TRUE, Boolean.TRUE, + Arrays.asList("value-1", "value-2"), + Boolean.TRUE, Boolean.TRUE, Boolean.TRUE)); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + public void hGetExMultipleFieldsWithNonExistentFields() { + + actual.add(connection.hSet("hash-hgetex", "field-1", "value-1")); + actual.add(connection.hGetEx("hash-hgetex", Expiration.seconds(60), "field-1", "missing-field")); + actual.add(connection.hExists("hash-hgetex", "field-1")); + + verifyResults(Arrays.asList(Boolean.TRUE, + Arrays.asList("value-1", null), + Boolean.TRUE)); + } + @Test // DATAREDIS-694 void touchReturnsNrOfKeysTouched() { diff --git a/src/test/java/org/springframework/data/redis/connection/RedisConnectionUnitTests.java b/src/test/java/org/springframework/data/redis/connection/RedisConnectionUnitTests.java index 30dde87480..dd8ffadab3 100644 --- a/src/test/java/org/springframework/data/redis/connection/RedisConnectionUnitTests.java +++ b/src/test/java/org/springframework/data/redis/connection/RedisConnectionUnitTests.java @@ -449,6 +449,10 @@ public List hGetDel(byte[] key, byte[]... fields) { return delegate.hGetDel(key, fields); } + public List hGetEx(byte[] key, Expiration expiration, byte[]... fields) { + return delegate.hGetEx(key, expiration, fields); + } + public Long zRem(byte[] key, byte[]... values) { return delegate.zRem(key, values); } diff --git a/src/test/java/org/springframework/data/redis/connection/jedis/JedisClusterConnectionTests.java b/src/test/java/org/springframework/data/redis/connection/jedis/JedisClusterConnectionTests.java index c3b2ad3b01..4db8ba56fd 100644 --- a/src/test/java/org/springframework/data/redis/connection/jedis/JedisClusterConnectionTests.java +++ b/src/test/java/org/springframework/data/redis/connection/jedis/JedisClusterConnectionTests.java @@ -1324,6 +1324,73 @@ public void hGetDelDeletesKeyWhenAllFieldsRemoved() { assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_3_BYTES)).isFalse(); } + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + public void hGetExReturnsValueAndSetsExpiration() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + nativeConnection.hset(KEY_1, KEY_3, VALUE_2); + + List result = clusterConnection.hashCommands().hGetEx(KEY_1_BYTES, Expiration.seconds(60), KEY_2_BYTES); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isEqualTo(VALUE_1_BYTES); + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_2_BYTES)).isTrue(); + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_3_BYTES)).isTrue(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + public void hGetExReturnsNullWhenFieldDoesNotExist() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + + List result = clusterConnection.hashCommands().hGetEx(KEY_1_BYTES, Expiration.seconds(60), KEY_3_BYTES); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isNull(); + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_2_BYTES)).isTrue(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + public void hGetExReturnsNullWhenKeyDoesNotExist() { + + List result = clusterConnection.hashCommands().hGetEx(KEY_1_BYTES, Expiration.seconds(60), KEY_2_BYTES); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isNull(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + public void hGetExMultipleFieldsReturnsValuesAndSetsExpiration() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + nativeConnection.hset(KEY_1, KEY_3, VALUE_2); + nativeConnection.hset(KEY_1, "field3", "value3"); + + List result = clusterConnection.hashCommands().hGetEx(KEY_1_BYTES, Expiration.seconds(120), KEY_2_BYTES, KEY_3_BYTES); + + assertThat(result).hasSize(2); + assertThat(result.get(0)).isEqualTo(VALUE_1_BYTES); + assertThat(result.get(1)).isEqualTo(VALUE_2_BYTES); + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_2_BYTES)).isTrue(); + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_3_BYTES)).isTrue(); + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, "field3".getBytes())).isTrue(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + public void hGetExMultipleFieldsWithNonExistentFields() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + + List result = clusterConnection.hashCommands().hGetEx(KEY_1_BYTES, Expiration.seconds(60), KEY_2_BYTES, KEY_3_BYTES); + + assertThat(result).hasSize(2); + assertThat(result.get(0)).isEqualTo(VALUE_1_BYTES); + assertThat(result.get(1)).isNull(); + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_2_BYTES)).isTrue(); + } + @Test // DATAREDIS-315 public void hValsShouldRetrieveValuesCorrectly() { diff --git a/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceClusterConnectionTests.java b/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceClusterConnectionTests.java index 48e708b796..33bf6d16d3 100644 --- a/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceClusterConnectionTests.java +++ b/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceClusterConnectionTests.java @@ -1388,6 +1388,73 @@ public void hGetDelDeletesKeyWhenAllFieldsRemoved() { assertThat(clusterConnection.exists(KEY_1_BYTES)).isFalse(); } + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + public void hGetExReturnsValueAndSetsExpiration() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + nativeConnection.hset(KEY_1, KEY_3, VALUE_2); + + List result = clusterConnection.hashCommands().hGetEx(KEY_1_BYTES, Expiration.seconds(60), KEY_2_BYTES); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isEqualTo(VALUE_1_BYTES); + assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_2_BYTES)).isTrue(); + assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_3_BYTES)).isTrue(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + public void hGetExReturnsNullWhenFieldDoesNotExist() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + + List result = clusterConnection.hashCommands().hGetEx(KEY_1_BYTES, Expiration.seconds(60), KEY_3_BYTES); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isNull(); + assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_2_BYTES)).isTrue(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + public void hGetExReturnsNullWhenKeyDoesNotExist() { + + List result = clusterConnection.hashCommands().hGetEx(KEY_1_BYTES, Expiration.seconds(60), KEY_2_BYTES); + assertThat(result).hasSize(1); + assertThat(result.get(0)).isNull(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + public void hGetExMultipleFieldsReturnsValuesAndSetsExpiration() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + nativeConnection.hset(KEY_1, KEY_3, VALUE_2); + nativeConnection.hset(KEY_1, "field3", "value3"); + + List result = clusterConnection.hashCommands().hGetEx(KEY_1_BYTES, Expiration.seconds(120), KEY_2_BYTES, KEY_3_BYTES); + + assertThat(result).hasSize(2); + assertThat(result.get(0)).isEqualTo(VALUE_1_BYTES); + assertThat(result.get(1)).isEqualTo(VALUE_2_BYTES); + assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_2_BYTES)).isTrue(); + assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_3_BYTES)).isTrue(); + assertThat(clusterConnection.hExists(KEY_1_BYTES, "field3".getBytes())).isTrue(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + public void hGetExMultipleFieldsWithNonExistentFields() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + + List result = clusterConnection.hashCommands().hGetEx(KEY_1_BYTES, Expiration.seconds(60), KEY_2_BYTES, KEY_3_BYTES); + + assertThat(result).hasSize(2); + assertThat(result.get(0)).isEqualTo(VALUE_1_BYTES); + assertThat(result.get(1)).isNull(); + assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_2_BYTES)).isTrue(); + } + @Test // DATAREDIS-315 public void hValsShouldRetrieveValuesCorrectly() { diff --git a/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommandsIntegrationTests.java b/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommandsIntegrationTests.java index 1573a514d9..496de6ab8c 100644 --- a/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommandsIntegrationTests.java +++ b/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommandsIntegrationTests.java @@ -17,6 +17,7 @@ import static org.assertj.core.api.Assertions.*; +import org.springframework.data.redis.core.types.Expiration; import reactor.test.StepVerifier; import java.nio.ByteBuffer; @@ -420,4 +421,73 @@ void hGetDelShouldDeleteKeyWhenAllFieldsRemoved() { assertThat(nativeCommands.hlen(KEY_1)).isEqualTo(0L); } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + void hGetExShouldReturnValueAndSetExpiration() { + + nativeCommands.hset(KEY_1, FIELD_1, VALUE_1); + nativeCommands.hset(KEY_1, FIELD_2, VALUE_2); + + connection.hashCommands().hGetEx(KEY_1_BBUFFER, Expiration.seconds(60), Collections.singletonList(FIELD_1_BBUFFER)) + .as(StepVerifier::create) + .expectNext(Collections.singletonList(VALUE_1_BBUFFER)).verifyComplete(); + + assertThat(nativeCommands.hexists(KEY_1, FIELD_1)).isTrue(); + assertThat(nativeCommands.hexists(KEY_1, FIELD_2)).isTrue(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + void hGetExShouldReturnNullForNonExistentField() { + + nativeCommands.hset(KEY_1, FIELD_1, VALUE_1); + + connection.hashCommands().hGetEx(KEY_1_BBUFFER, Expiration.seconds(60), Collections.singletonList(FIELD_2_BBUFFER)) + .as(StepVerifier::create) + .expectNext(Collections.singletonList(null)).verifyComplete(); + + assertThat(nativeCommands.hexists(KEY_1, FIELD_1)).isTrue(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + void hGetExShouldReturnNullForNonExistentKey() { + + connection.hashCommands().hGetEx(KEY_1_BBUFFER, Expiration.seconds(60), Collections.singletonList(FIELD_1_BBUFFER)) + .as(StepVerifier::create) + .expectNext(Collections.singletonList(null)).verifyComplete(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + void hGetExShouldHandleMultipleFields() { + + nativeCommands.hset(KEY_1, FIELD_1, VALUE_1); + nativeCommands.hset(KEY_1, FIELD_2, VALUE_2); + nativeCommands.hset(KEY_1, FIELD_3, VALUE_3); + + connection.hashCommands().hGetEx(KEY_1_BBUFFER, Expiration.seconds(120), Arrays.asList(FIELD_1_BBUFFER, FIELD_2_BBUFFER)) + .as(StepVerifier::create) + .expectNext(Arrays.asList(VALUE_1_BBUFFER, VALUE_2_BBUFFER)) + .verifyComplete(); + + assertThat(nativeCommands.hexists(KEY_1, FIELD_1)).isTrue(); + assertThat(nativeCommands.hexists(KEY_1, FIELD_2)).isTrue(); + assertThat(nativeCommands.hexists(KEY_1, FIELD_3)).isTrue(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + void hGetExShouldHandleMultipleFieldsWithNonExistent() { + + nativeCommands.hset(KEY_1, FIELD_1, VALUE_1); + + connection.hashCommands().hGetEx(KEY_1_BBUFFER, Expiration.seconds(60), Arrays.asList(FIELD_1_BBUFFER, FIELD_2_BBUFFER)) + .as(StepVerifier::create) + .expectNext(Arrays.asList(VALUE_1_BBUFFER, null)) + .verifyComplete(); + + assertThat(nativeCommands.hexists(KEY_1, FIELD_1)).isTrue(); + } } diff --git a/src/test/java/org/springframework/data/redis/core/DefaultHashOperationsIntegrationTests.java b/src/test/java/org/springframework/data/redis/core/DefaultHashOperationsIntegrationTests.java index ddc4528e87..8b2baa51ea 100644 --- a/src/test/java/org/springframework/data/redis/core/DefaultHashOperationsIntegrationTests.java +++ b/src/test/java/org/springframework/data/redis/core/DefaultHashOperationsIntegrationTests.java @@ -21,10 +21,7 @@ import java.io.IOException; import java.time.Duration; import java.time.Instant; -import java.util.Arrays; -import java.util.Collection; -import java.util.List; -import java.util.Map; +import java.util.*; import java.util.concurrent.TimeUnit; import org.assertj.core.api.InstanceOfAssertFactories; @@ -40,6 +37,7 @@ import org.springframework.data.redis.connection.jedis.JedisConnectionFactory; import org.springframework.data.redis.connection.jedis.extension.JedisConnectionFactoryExtension; import org.springframework.data.redis.core.ExpireChanges.ExpiryChangeState; +import org.springframework.data.redis.core.types.Expiration; import org.springframework.data.redis.core.types.Expirations.TimeToLive; import org.springframework.data.redis.test.condition.EnabledOnCommand; import org.springframework.data.redis.test.extension.RedisStandalone; @@ -302,6 +300,53 @@ void testBoundExpireAndGetExpireSeconds() { }); } + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + void testBoundHashOperationsGetAndExpire() { + + K key = keyFactory.instance(); + HK key1 = hashKeyFactory.instance(); + HV val1 = hashValueFactory.instance(); + HK key2 = hashKeyFactory.instance(); + HV val2 = hashValueFactory.instance(); + HK key3 = hashKeyFactory.instance(); + HV val3 = hashValueFactory.instance(); + + // Set up test data + hashOps.put(key, key1, val1); + hashOps.put(key, key2, val2); + hashOps.put(key, key3, val3); + + BoundHashOperations boundHashOps = redisTemplate.boundHashOps(key); + + // Test single field get and expire + List result1 = boundHashOps.getAndExpire(Expiration.seconds(60), Arrays.asList(key1)); + assertThat(result1).hasSize(1).containsExactly(val1); + + // Verify field still exists but has expiration + assertThat(boundHashOps.hasKey(key1)).isTrue(); + assertThat(boundHashOps.get(key1)).isEqualTo(val1); + + // Test multiple fields get and expire + List result2 = boundHashOps.getAndExpire(Expiration.seconds(120), Arrays.asList(key2, key3)); + assertThat(result2).hasSize(2).containsExactly(val2, val3); + + // Verify fields still exist but have expiration + assertThat(boundHashOps.hasKey(key2)).isTrue(); + assertThat(boundHashOps.hasKey(key3)).isTrue(); + assertThat(boundHashOps.get(key2)).isEqualTo(val2); + assertThat(boundHashOps.get(key3)).isEqualTo(val3); + + // Test non-existent field + HK nonExistentKey = hashKeyFactory.instance(); + List result3 = boundHashOps.getAndExpire(Expiration.seconds(60), Arrays.asList(nonExistentKey)); + assertThat(result3).hasSize(1).containsExactly((HV) null); + + // Test empty fields collection + List result4 = boundHashOps.getAndExpire(Expiration.seconds(60), Collections.emptyList()); + assertThat(result4).isEmpty(); + } + @Test // GH-3054 @EnabledOnCommand("HEXPIRE") void testExpireAtAndGetExpireMillis() { @@ -465,4 +510,63 @@ void testGetAndDelete() { assertThat(allFieldsResult).hasSize(2).containsExactly(value1, value2); assertThat(redisTemplate.hasKey(keyForDeletion)).isFalse(); // Key should be deleted when last field is removed } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + void testGetAndExpire() { + + K key = keyFactory.instance(); + HK key1 = hashKeyFactory.instance(); + HV val1 = hashValueFactory.instance(); + HK key2 = hashKeyFactory.instance(); + HV val2 = hashValueFactory.instance(); + HK key3 = hashKeyFactory.instance(); + HV val3 = hashValueFactory.instance(); + + // Set up test data + hashOps.put(key, key1, val1); + hashOps.put(key, key2, val2); + hashOps.put(key, key3, val3); + + // Test single field get and expire + List result1 = hashOps.getAndExpire(key, Expiration.seconds(60), Arrays.asList(key1)); + assertThat(result1).hasSize(1).containsExactly(val1); + + // Verify field still exists but has expiration + assertThat(hashOps.hasKey(key, key1)).isTrue(); + assertThat(hashOps.get(key, key1)).isEqualTo(val1); + + // Test multiple fields get and expire + List result2 = hashOps.getAndExpire(key, Expiration.seconds(120), Arrays.asList(key2, key3)); + assertThat(result2).hasSize(2).containsExactly(val2, val3); + + // Verify fields still exist but have expiration + assertThat(hashOps.hasKey(key, key2)).isTrue(); + assertThat(hashOps.hasKey(key, key3)).isTrue(); + assertThat(hashOps.get(key, key2)).isEqualTo(val2); + assertThat(hashOps.get(key, key3)).isEqualTo(val3); + + // Test non-existent field + HK nonExistentKey = hashKeyFactory.instance(); + List result3 = hashOps.getAndExpire(key, Expiration.seconds(60), Arrays.asList(nonExistentKey)); + assertThat(result3).hasSize(1).containsExactly((HV) null); + + // Test mixed existing and non-existent fields + HK key4 = hashKeyFactory.instance(); + HV val4 = hashValueFactory.instance(); + hashOps.put(key, key4, val4); + + List result4 = hashOps.getAndExpire(key, Expiration.seconds(60), Arrays.asList(key4, nonExistentKey)); + assertThat(result4).hasSize(2); + assertThat(result4.get(0)).isEqualTo(val4); + assertThat(result4.get(1)).isNull(); + + // Verify existing field still exists with expiration + assertThat(hashOps.hasKey(key, key4)).isTrue(); + assertThat(hashOps.get(key, key4)).isEqualTo(val4); + + // Test empty fields collection + List result5 = hashOps.getAndExpire(key, Expiration.seconds(60), Collections.emptyList()); + assertThat(result5).isEmpty(); + } } diff --git a/src/test/java/org/springframework/data/redis/core/DefaultReactiveHashOperationsIntegrationTests.java b/src/test/java/org/springframework/data/redis/core/DefaultReactiveHashOperationsIntegrationTests.java index 717d4c39fe..bd4e606acc 100644 --- a/src/test/java/org/springframework/data/redis/core/DefaultReactiveHashOperationsIntegrationTests.java +++ b/src/test/java/org/springframework/data/redis/core/DefaultReactiveHashOperationsIntegrationTests.java @@ -19,6 +19,7 @@ import static org.assertj.core.api.Assumptions.*; import static org.junit.jupiter.api.condition.OS.*; +import org.springframework.data.redis.core.types.Expiration; import reactor.test.StepVerifier; import java.time.Duration; @@ -831,4 +832,81 @@ void getAndDeleteFromNonExistentHash() { }) .verifyComplete(); } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + void getAndExpireSingleKey() { + + assumeThat(hashKeyFactory instanceof StringObjectFactory && hashValueFactory instanceof StringObjectFactory) + .isTrue(); + + K key = keyFactory.instance(); + HK hashkey1 = hashKeyFactory.instance(); + HV hashvalue1 = hashValueFactory.instance(); + HK hashkey2 = hashKeyFactory.instance(); + HV hashvalue2 = hashValueFactory.instance(); + + putAll(key, hashkey1, hashvalue1, hashkey2, hashvalue2); + + hashOperations.getAndExpire(key, Expiration.seconds(60), Arrays.asList(hashkey1)).as(StepVerifier::create) + .consumeNextWith(actual -> { + assertThat(actual).hasSize(1).containsExactly(hashvalue1); + }) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey1).as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey2).as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + void getAndExpireMultipleKeys() { + + assumeThat(hashKeyFactory instanceof StringObjectFactory && hashValueFactory instanceof StringObjectFactory) + .isTrue(); + + K key = keyFactory.instance(); + HK hashkey1 = hashKeyFactory.instance(); + HV hashvalue1 = hashValueFactory.instance(); + HK hashkey2 = hashKeyFactory.instance(); + HV hashvalue2 = hashValueFactory.instance(); + + putAll(key, hashkey1, hashvalue1, hashkey2, hashvalue2); + + hashOperations.getAndExpire(key, Expiration.seconds(120), Arrays.asList(hashkey1, hashkey2)).as(StepVerifier::create) + .consumeNextWith(actual -> { + assertThat(actual).hasSize(2).containsExactly(hashvalue1, hashvalue2); + }) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey1).as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey2).as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + } + + @Test // GH-3211 + @EnabledOnCommand("HGETEX") + void getAndExpireNonExistentKey() { + + assumeThat(hashKeyFactory instanceof StringObjectFactory && hashValueFactory instanceof StringObjectFactory) + .isTrue(); + + K key = keyFactory.instance(); + HK hashkey1 = hashKeyFactory.instance(); + + hashOperations.getAndExpire(key, Expiration.seconds(60), Arrays.asList(hashkey1)).as(StepVerifier::create) + .consumeNextWith(actual -> { + assertThat(actual).hasSize(1).containsExactly((HV) null); + }) + .verifyComplete(); + } } From 00e1ba2685d925495ecd99e40e0c4510e11a0937 Mon Sep 17 00:00:00 2001 From: "viktoriya.kutsarova" Date: Tue, 30 Sep 2025 16:17:00 +0300 Subject: [PATCH 3/3] Introduce HSETEX command to the Spring Data Redis framework Signed-off-by: viktoriya.kutsarova --- .../DefaultStringRedisConnection.java | 11 + .../connection/DefaultedRedisConnection.java | 7 + .../connection/ReactiveHashCommands.java | 97 ++++++++ .../redis/connection/RedisHashCommands.java | 56 +++++ .../connection/StringRedisConnection.java | 14 ++ .../jedis/JedisClusterHashCommands.java | 13 + .../connection/jedis/JedisConverters.java | 73 +++++- .../connection/jedis/JedisHashCommands.java | 12 + .../connection/lettuce/LettuceConverters.java | 59 +++++ .../lettuce/LettuceHashCommands.java | 11 + .../lettuce/LettuceReactiveHashCommands.java | 17 ++ .../data/redis/core/BoundHashOperations.java | 14 ++ .../redis/core/DefaultHashOperations.java | 19 ++ .../core/DefaultReactiveHashOperations.java | 11 + .../data/redis/core/HashOperations.java | 14 ++ .../redis/core/ReactiveHashOperations.java | 14 ++ .../AbstractConnectionIntegrationTests.java | 112 +++++++++ .../connection/RedisConnectionUnitTests.java | 4 + .../jedis/JedisClusterConnectionTests.java | 71 ++++++ .../LettuceClusterConnectionTests.java | 71 ++++++ ...eReactiveHashCommandsIntegrationTests.java | 87 +++++++ ...DefaultHashOperationsIntegrationTests.java | 161 +++++++++++++ ...eactiveHashOperationsIntegrationTests.java | 222 ++++++++++++++++++ 23 files changed, 1163 insertions(+), 7 deletions(-) diff --git a/src/main/java/org/springframework/data/redis/connection/DefaultStringRedisConnection.java b/src/main/java/org/springframework/data/redis/connection/DefaultStringRedisConnection.java index 267def7896..21d89afc38 100644 --- a/src/main/java/org/springframework/data/redis/connection/DefaultStringRedisConnection.java +++ b/src/main/java/org/springframework/data/redis/connection/DefaultStringRedisConnection.java @@ -24,6 +24,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.jetbrains.annotations.NotNull; +import org.jspecify.annotations.NonNull; import org.jspecify.annotations.NullUnmarked; import org.jspecify.annotations.Nullable; import org.springframework.core.convert.converter.Converter; @@ -1620,6 +1621,11 @@ public List hGetEx(String key, Expiration expiration, String... fields) return convertAndReturn(delegate.hGetEx(serialize(key), expiration, serializeMulti(fields)), byteListToStringList); } + @Override + public Boolean hSetEx(@NonNull String key, @NonNull Map<@NonNull String, String> hashes, HashFieldSetOption condition, Expiration expiration) { + return convertAndReturn(delegate.hSetEx(serialize(key), serialize(hashes), condition, expiration), Converters.identityConverter()); + } + @Override public Long incr(String key) { return incr(serialize(key)); @@ -2603,6 +2609,11 @@ public List hGetEx(@NotNull byte[] key, Expiration expiration, @NotNull return convertAndReturn(delegate.hGetEx(key, expiration, fields), Converters.identityConverter()); } + @Override + public Boolean hSetEx(@NotNull byte[] key, @NonNull Map hashes, HashFieldSetOption condition, Expiration expiration) { + return convertAndReturn(delegate.hSetEx(key, hashes, condition, expiration), Converters.identityConverter()); + } + public @Nullable List applyExpiration(String key, org.springframework.data.redis.core.types.Expiration expiration, ExpirationOptions options, String... fields) { diff --git a/src/main/java/org/springframework/data/redis/connection/DefaultedRedisConnection.java b/src/main/java/org/springframework/data/redis/connection/DefaultedRedisConnection.java index 4177aae23f..e2100d0db6 100644 --- a/src/main/java/org/springframework/data/redis/connection/DefaultedRedisConnection.java +++ b/src/main/java/org/springframework/data/redis/connection/DefaultedRedisConnection.java @@ -1608,6 +1608,13 @@ default List hGetEx(byte[] key, Expiration expiration, byte[]... fields) return hashCommands().hGetEx(key, expiration, fields); } + /** @deprecated in favor of {@link RedisConnection#hashCommands()}}. */ + @Override + @Deprecated + default Boolean hSetEx(byte[] key, Map hashes, HashFieldSetOption condition, Expiration expiration) { + return hashCommands().hSetEx(key, hashes, condition, expiration); + } + /** @deprecated in favor of {@link RedisConnection#hashCommands()}}. */ @Override @Deprecated diff --git a/src/main/java/org/springframework/data/redis/connection/ReactiveHashCommands.java b/src/main/java/org/springframework/data/redis/connection/ReactiveHashCommands.java index 9f7882a7c2..47898878f0 100644 --- a/src/main/java/org/springframework/data/redis/connection/ReactiveHashCommands.java +++ b/src/main/java/org/springframework/data/redis/connection/ReactiveHashCommands.java @@ -1410,4 +1410,101 @@ default Mono> hGetEx(ByteBuffer key, Expiration expiration, Lis * @see Redis Documentation: HGETEX */ Flux> hGetEx(Publisher commands); + + /** + * {@literal HSETEX} {@link Command}. + * + * @author Viktoriya Kutsarova + * @see Redis Documentation: HSETEX + */ + class HSetExCommand extends KeyCommand { + + private final Map fieldValueMap; + private final RedisHashCommands.HashFieldSetOption condition; + private final Expiration expiration; + + private HSetExCommand(@Nullable ByteBuffer key, Map fieldValueMap, + RedisHashCommands.HashFieldSetOption condition, Expiration expiration) { + super(key); + this.fieldValueMap = fieldValueMap; + this.condition = condition; + this.expiration = expiration; + } + + /** + * Creates a new {@link HSetExCommand} for setting field-value pairs with condition and expiration. + * + * @param fieldValueMap the field-value pairs to set; must not be {@literal null}. + * @param condition the condition for setting fields; must not be {@literal null}. + * @param expiration the expiration to apply; must not be {@literal null}. + * @return new instance of {@link HSetExCommand}. + */ + public static HSetExCommand setWithConditionAndExpiration(Map fieldValueMap, + RedisHashCommands.HashFieldSetOption condition, Expiration expiration) { + return new HSetExCommand(null, fieldValueMap, condition, expiration); + } + + /** + * Applies the hash {@literal key}. Constructs a new command instance with all previously configured properties. + * + * @param key must not be {@literal null}. + * @return a new {@link HSetExCommand} with {@literal key} applied. + */ + public HSetExCommand from(ByteBuffer key) { + Assert.notNull(key, "Key must not be null"); + return new HSetExCommand(key, fieldValueMap, condition, expiration); + } + + /** + * @return the field-value map. + */ + public Map getFieldValueMap() { + return fieldValueMap; + } + + /** + * @return the condition for setting fields. + */ + public RedisHashCommands.HashFieldSetOption getCondition() { + return condition; + } + + /** + * @return the expiration to apply. + */ + public Expiration getExpiration() { + return expiration; + } + } + + /** + * Set field-value pairs in hash at {@literal key} with condition and expiration. + * + * @param key must not be {@literal null}. + * @param fieldValueMap the field-value pairs to set; must not be {@literal null}. + * @param condition the condition for setting fields; must not be {@literal null}. + * @param expiration the expiration to apply; must not be {@literal null}. + * @return never {@literal null}. + * @see Redis Documentation: HSETEX + */ + default Mono hSetEx(ByteBuffer key, Map fieldValueMap, + RedisHashCommands.HashFieldSetOption condition, Expiration expiration) { + + Assert.notNull(key, "Key must not be null"); + Assert.notNull(fieldValueMap, "Field-value map must not be null"); + Assert.notNull(condition, "Condition must not be null"); + Assert.notNull(expiration, "Expiration must not be null"); + + return hSetEx(Mono.just(HSetExCommand.setWithConditionAndExpiration(fieldValueMap, condition, expiration).from(key))) + .next().map(CommandResponse::getOutput); + } + + /** + * Set field-value pairs in hash at {@literal key} with condition and expiration. + * + * @param commands must not be {@literal null}. + * @return never {@literal null}. + * @see Redis Documentation: HSETEX + */ + Flux> hSetEx(Publisher commands); } diff --git a/src/main/java/org/springframework/data/redis/connection/RedisHashCommands.java b/src/main/java/org/springframework/data/redis/connection/RedisHashCommands.java index 25b0bba921..03a5f79276 100644 --- a/src/main/java/org/springframework/data/redis/connection/RedisHashCommands.java +++ b/src/main/java/org/springframework/data/redis/connection/RedisHashCommands.java @@ -568,4 +568,60 @@ default List hExpireAt(byte @NonNull [] key, long unixTime, byte @NonNull List hGetEx(byte @NonNull [] key, Expiration expiration, byte @NonNull [] @NonNull... fields); + /** + * Set field-value pairs in hash at {@literal key} with optional condition and expiration. + * + * @param key must not be {@literal null}. + * @param hashes the field-value pairs to set; must not be {@literal null}. + * @param hashFieldSetOption the optional condition for setting fields. + * @param expiration the optional expiration to apply. + * @return never {@literal null}. + * @see Redis Documentation: HSETEX + */ + Boolean hSetEx(byte @NonNull [] key, @NonNull Map hashes, HashFieldSetOption hashFieldSetOption, + Expiration expiration); + + /** + * {@code HSETEX} command arguments for {@code FNX}, {@code FXX}. + * + * @author Viktoriya Kutsarova + */ + enum HashFieldSetOption { + + /** + * Do not set any additional command argument. + */ + UPSERT, + + /** + * {@code FNX} + */ + IF_NONE_EXIST, + + /** + * {@code FXX} + */ + IF_ALL_EXIST; + + /** + * Do not set any additional command argument. + */ + public static HashFieldSetOption upsert() { + return UPSERT; + } + + /** + * {@code FNX} + */ + public static HashFieldSetOption ifNoneExist() { + return IF_NONE_EXIST; + } + + /** + * {@code FXX} + */ + public static HashFieldSetOption ifAllExist() { + return IF_ALL_EXIST; + } + } } diff --git a/src/main/java/org/springframework/data/redis/connection/StringRedisConnection.java b/src/main/java/org/springframework/data/redis/connection/StringRedisConnection.java index e151f6b701..2667ac0a0e 100644 --- a/src/main/java/org/springframework/data/redis/connection/StringRedisConnection.java +++ b/src/main/java/org/springframework/data/redis/connection/StringRedisConnection.java @@ -2576,6 +2576,20 @@ List hpExpireAt(@NonNull String key, long unixTimeInMillis, ExpirationOpti */ List hGetEx(@NonNull String key, Expiration expiration, @NonNull String @NonNull... fields); + /** + * Set field-value pairs in hash at {@literal key} with optional condition and expiration. + * + * @param key must not be {@literal null}. + * @param hashes the field-value pairs to set; must not be {@literal null}. + * @param condition the optional condition for setting fields. + * @param expiration the optional expiration to apply. + * @return never {@literal null}. + * @see Redis Documentation: HSETEX + * @see RedisHashCommands#hSetEx(byte[], Map, HashFieldSetOption, Expiration) + */ + Boolean hSetEx(@NonNull String key, @NonNull Map<@NonNull String, String> hashes, HashFieldSetOption condition, + Expiration expiration); + // ------------------------------------------------------------------------- // Methods dealing with HyperLogLog // ------------------------------------------------------------------------- diff --git a/src/main/java/org/springframework/data/redis/connection/jedis/JedisClusterHashCommands.java b/src/main/java/org/springframework/data/redis/connection/jedis/JedisClusterHashCommands.java index bd362dc12f..582fb2a9cf 100644 --- a/src/main/java/org/springframework/data/redis/connection/jedis/JedisClusterHashCommands.java +++ b/src/main/java/org/springframework/data/redis/connection/jedis/JedisClusterHashCommands.java @@ -441,6 +441,19 @@ public List hGetEx(byte[] key, Expiration expiration, byte[]... fields) } } + @Override + public Boolean hSetEx(byte[] key, Map hashes, HashFieldSetOption condition, Expiration expiration) { + + Assert.notNull(key, "Key must not be null"); + Assert.notNull(hashes, "Fields must not be null"); + + try { + return JedisConverters.toBoolean(connection.getCluster().hsetex(key, JedisConverters.toHSetExParams(condition, expiration), hashes)); + } catch (Exception ex) { + throw convertJedisAccessException(ex); + } + } + @Nullable @Override public Long hStrLen(byte[] key, byte[] field) { diff --git a/src/main/java/org/springframework/data/redis/connection/jedis/JedisConverters.java b/src/main/java/org/springframework/data/redis/connection/jedis/JedisConverters.java index b38fa28fc2..82847649e9 100644 --- a/src/main/java/org/springframework/data/redis/connection/jedis/JedisConverters.java +++ b/src/main/java/org/springframework/data/redis/connection/jedis/JedisConverters.java @@ -15,6 +15,7 @@ */ package org.springframework.data.redis.connection.jedis; +import org.springframework.data.redis.connection.*; import redis.clients.jedis.GeoCoordinate; import redis.clients.jedis.HostAndPort; import redis.clients.jedis.Protocol; @@ -47,26 +48,19 @@ import org.springframework.data.geo.Metric; import org.springframework.data.geo.Metrics; import org.springframework.data.geo.Point; -import org.springframework.data.redis.connection.BitFieldSubCommands; import org.springframework.data.redis.connection.BitFieldSubCommands.BitFieldIncrBy; import org.springframework.data.redis.connection.BitFieldSubCommands.BitFieldSet; import org.springframework.data.redis.connection.BitFieldSubCommands.BitFieldSubCommand; -import org.springframework.data.redis.connection.RedisGeoCommands; import org.springframework.data.redis.connection.RedisGeoCommands.DistanceUnit; import org.springframework.data.redis.connection.RedisGeoCommands.GeoLocation; import org.springframework.data.redis.connection.RedisGeoCommands.GeoRadiusCommandArgs; import org.springframework.data.redis.connection.RedisGeoCommands.GeoRadiusCommandArgs.Flag; import org.springframework.data.redis.connection.RedisListCommands.Position; -import org.springframework.data.redis.connection.RedisNode; -import org.springframework.data.redis.connection.RedisServer; -import org.springframework.data.redis.connection.RedisServerCommands; import org.springframework.data.redis.connection.RedisStringCommands.BitOperation; import org.springframework.data.redis.connection.RedisStringCommands.SetOption; import org.springframework.data.redis.connection.RedisZSetCommands.ZAddArgs; -import org.springframework.data.redis.connection.SortParameters; import org.springframework.data.redis.connection.SortParameters.Order; import org.springframework.data.redis.connection.SortParameters.Range; -import org.springframework.data.redis.connection.ValueEncoding; import org.springframework.data.redis.connection.convert.Converters; import org.springframework.data.redis.connection.convert.ListConverter; import org.springframework.data.redis.connection.convert.StringToRedisClientInfoConverter; @@ -392,6 +386,67 @@ static GetExParams toGetExParams(Expiration expiration, GetExParams params) { : params.ex(expiration.getConverted(TimeUnit.SECONDS)); } + /** + * Converts a given {@link RedisHashCommands.HashFieldSetOption} and {@link Expiration} to the according + * {@code HSETEX} command argument. + *
+ *
{@link RedisHashCommands.HashFieldSetOption#ifNoneExist()}
+ *
{@code FNX}
+ *
{@link RedisHashCommands.HashFieldSetOption#ifAllExist()}
+ *
{@code FXX}
+ *
{@link RedisHashCommands.HashFieldSetOption#upsert()}
+ *
no condition flag
+ *
+ *
+ *
{@link TimeUnit#MILLISECONDS}
+ *
{@code PX|PXAT}
+ *
{@link TimeUnit#SECONDS}
+ *
{@code EX|EXAT}
+ *
+ * + * @param condition can be {@literal null}. + * @param expiration can be {@literal null}. + * @since 4.0 + */ + static HSetExParams toHSetExParams(RedisHashCommands.@Nullable HashFieldSetOption condition, @Nullable Expiration expiration) { + return toHSetExParams(condition, expiration, new HSetExParams()); + } + + static HSetExParams toHSetExParams(RedisHashCommands.@Nullable HashFieldSetOption condition, @Nullable Expiration expiration, HSetExParams params) { + + if (condition == null && expiration == null) { + return params; + } + + if (condition != null) { + if (condition.equals(RedisHashCommands.HashFieldSetOption.ifNoneExist())) { + params.fnx(); + } else if (condition.equals(RedisHashCommands.HashFieldSetOption.ifAllExist())) { + params.fxx(); + } + } + + if (expiration == null) { + return params; + } + + if (expiration.isKeepTtl()) { + return params.keepTtl(); + } + + if (expiration.isPersistent()) { + return params; + } + + if (expiration.getTimeUnit() == TimeUnit.MILLISECONDS) { + return expiration.isUnixTimestamp() ? params.pxAt(expiration.getExpirationTime()) + : params.px(expiration.getExpirationTime()); + } + + return expiration.isUnixTimestamp() ? params.exAt(expiration.getConverted(TimeUnit.SECONDS)) + : params.ex(expiration.getConverted(TimeUnit.SECONDS)); + } + /** * Converts a given {@link Expiration} to the according {@code HGETEX} command argument depending on * {@link Expiration#isUnixTimestamp()}. @@ -411,6 +466,10 @@ static HGetExParams toHGetExParams(Expiration expiration) { static HGetExParams toHGetExParams(Expiration expiration, HGetExParams params) { + if (expiration == null) { + return params; + } + if (expiration.isPersistent()) { return params.persist(); } diff --git a/src/main/java/org/springframework/data/redis/connection/jedis/JedisHashCommands.java b/src/main/java/org/springframework/data/redis/connection/jedis/JedisHashCommands.java index 14bfe5d271..11581a741d 100644 --- a/src/main/java/org/springframework/data/redis/connection/jedis/JedisHashCommands.java +++ b/src/main/java/org/springframework/data/redis/connection/jedis/JedisHashCommands.java @@ -351,6 +351,18 @@ public List hGetEx(byte @NonNull [] key, Expiration expiration, byte @No return connection.invoke().just(Jedis::hgetex, PipelineBinaryCommands::hgetex, key, JedisConverters.toHGetExParams(expiration), fields); } + @Override + public Boolean hSetEx(byte @NonNull [] key, @NonNull Map hashes, HashFieldSetOption condition, + Expiration expiration) { + + Assert.notNull(key, "Key must not be null"); + Assert.notNull(hashes, "Hashes must not be null"); + + return connection.invoke().from(Jedis::hsetex, PipelineBinaryCommands::hsetex, key, + JedisConverters.toHSetExParams(condition, expiration), hashes) + .get(Converters::toBoolean); + } + @Nullable @Override public Long hStrLen(byte[] key, byte[] field) { diff --git a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConverters.java b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConverters.java index 34dadc7574..98c0969738 100644 --- a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConverters.java +++ b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceConverters.java @@ -651,6 +651,65 @@ static HGetExArgs toHGetExArgs(@Nullable Expiration expiration) { : args.ex(Duration.ofSeconds(expiration.getConverted(TimeUnit.SECONDS))); } + /** + * Convert {@link RedisHashCommands.HashFieldSetOption} and {@link Expiration} to {@link HSetExArgs} for the Redis {@code HSETEX} command. + * + *

Condition mapping:

+ *
    + *
  • {@code IF_NONE_EXIST}  {@code FNX}
  • + *
  • {@code IF_ALL_EXIST}  {@code FXX}
  • + *
  • {@code UPSERT}  no condition flag
  • + *
+ * + *

Expiration mapping:

+ *
    + *
  • {@link Expiration#keepTtl()}  {@code KEEPTTL}
  • + *
  • Unix timestamp  {@code EXAT}/{@code PXAT} depending on time unit
  • + *
  • Relative expiration  {@code EX}/{@code PX} depending on time unit
  • + *
  • {@code null} expiration  no TTL argument
  • + *
+ * + * @param condition must not be {@literal null}; use {@code UPSERT} to omit FNX/FXX. + * @param expiration can be {@literal null} to omit TTL. + * @return never {@literal null}. + * @since 4.0 + */ + static HSetExArgs toHSetExArgs(RedisHashCommands.HashFieldSetOption condition, @Nullable Expiration expiration) { + + HSetExArgs args = new HSetExArgs(); + + if (condition == null && expiration == null) { + return args; + } + + if (condition != null ) { + if (condition.equals(RedisHashCommands.HashFieldSetOption.ifNoneExist())) { + args.fnx(); + } + if (condition.equals(RedisHashCommands.HashFieldSetOption.ifAllExist())) { + args.fxx(); + } + } + + if (expiration == null) { + return args; + } + + if (expiration.isKeepTtl()) { + return args.keepttl(); + } + + if (expiration.getTimeUnit() == TimeUnit.MILLISECONDS) { + if (expiration.isUnixTimestamp()) { + return args.pxAt(Instant.ofEpochSecond(expiration.getExpirationTime())); + } + return args.px(Duration.ofMillis(expiration.getExpirationTime())); + } + + return expiration.isUnixTimestamp() ? args.exAt(Instant.ofEpochSecond(expiration.getConverted(TimeUnit.SECONDS))) + : args.ex(Duration.ofSeconds(expiration.getConverted(TimeUnit.SECONDS))); + } + @SuppressWarnings("NullAway") static Converter, Long> toTimeConverter(TimeUnit timeUnit) { diff --git a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceHashCommands.java b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceHashCommands.java index ec9bbe88a8..5492d4c45c 100644 --- a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceHashCommands.java +++ b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceHashCommands.java @@ -287,6 +287,17 @@ public List hGetEx(byte @NonNull [] key, Expiration expiration, byte @No .toList(source -> source.getValueOrElse(null)); } + public Boolean hSetEx(byte @NonNull [] key, @NonNull Map hashes, HashFieldSetOption condition, + Expiration expiration) { + + Assert.notNull(key, "Key must not be null"); + Assert.notNull(hashes, "Hashes must not be null"); + + return connection.invoke().from(RedisHashAsyncCommands::hsetex, key, + LettuceConverters.toHSetExArgs(condition, expiration), hashes) + .get(LettuceConverters.longToBooleanConverter()); + } + /** * @param key * @param cursorId diff --git a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommands.java b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommands.java index 9a1da724e3..ac9f0491d0 100644 --- a/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommands.java +++ b/src/main/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommands.java @@ -384,6 +384,23 @@ public Flux> hGetEx(Publisher> hSetEx(Publisher commands) { + return connection.execute(cmd -> Flux.from(commands).concatMap(command -> { + + Assert.notNull(command.getKey(), "Key must not be null"); + Assert.notNull(command.getFieldValueMap(), "FieldValueMap must not be null"); + + Map entries = command.getFieldValueMap(); + + return cmd.hsetex(command.getKey(), + LettuceConverters.toHSetExArgs(command.getCondition(), command.getExpiration()), entries) + .map(LettuceConverters.longToBooleanConverter()::convert) + .map(value -> new BooleanResponse<>(command, value)); + + })); + } + private static Map.Entry toEntry(KeyValue kv) { return new Entry() { diff --git a/src/main/java/org/springframework/data/redis/core/BoundHashOperations.java b/src/main/java/org/springframework/data/redis/core/BoundHashOperations.java index 4fe9ec7a0d..9d44c8c2ba 100644 --- a/src/main/java/org/springframework/data/redis/core/BoundHashOperations.java +++ b/src/main/java/org/springframework/data/redis/core/BoundHashOperations.java @@ -23,6 +23,7 @@ import org.jspecify.annotations.NonNull; import org.jspecify.annotations.NullUnmarked; +import org.springframework.data.redis.connection.RedisHashCommands; import org.springframework.data.redis.core.types.Expiration; /** @@ -267,4 +268,17 @@ default BoundHashFieldExpirationOperations hashExpiration(@NonNull Collectio * @since 4.0 */ List getAndExpire(Expiration expiration, @NonNull Collection<@NonNull HK> hashFields); + + /** + * Set the value of one or more fields using data provided in {@code m} at the bound key, and optionally set their + * expiration time or time-to-live (TTL). The {@code condition} determines whether the fields are set. + * + * @param m must not be {@literal null}. + * @param condition is optional. Use {@link RedisHashCommands.HashFieldSetOption#IF_NONE_EXIST} (FNX) to only set the fields if + * none of them already exist, {@link RedisHashCommands.HashFieldSetOption#IF_ALL_EXIST} (FXX) to only set the + * fields if all of them already exist, or {@link RedisHashCommands.HashFieldSetOption#UPSERT} to set the fields + * unconditionally. + * @param expiration is optional. + */ + void putAndExpire(Map m, RedisHashCommands.HashFieldSetOption condition, Expiration expiration); } diff --git a/src/main/java/org/springframework/data/redis/core/DefaultHashOperations.java b/src/main/java/org/springframework/data/redis/core/DefaultHashOperations.java index f74c673583..cf8ea5a0e1 100644 --- a/src/main/java/org/springframework/data/redis/core/DefaultHashOperations.java +++ b/src/main/java/org/springframework/data/redis/core/DefaultHashOperations.java @@ -31,6 +31,7 @@ import org.jspecify.annotations.Nullable; import org.springframework.core.convert.converter.Converter; import org.springframework.data.redis.connection.ExpirationOptions; +import org.springframework.data.redis.connection.RedisHashCommands; import org.springframework.data.redis.connection.convert.Converters; import org.springframework.data.redis.core.types.Expiration; import org.springframework.data.redis.core.types.Expirations; @@ -234,6 +235,24 @@ public List getAndExpire(@NonNull K key, @NonNull Expiration expiration, return deserializeHashValues(rawValues); } + @Override + public Boolean putAndExpire(@NonNull K key, @NonNull Map m, + RedisHashCommands.HashFieldSetOption condition, Expiration expiration) { + if (m.isEmpty()) { + return false; + } + + byte[] rawKey = rawKey(key); + + Map hashes = new LinkedHashMap<>(m.size()); + + for (Map.Entry entry : m.entrySet()) { + hashes.put(rawHashKey(entry.getKey()), rawHashValue(entry.getValue())); + } + + return execute(connection -> connection.hashCommands().hSetEx(rawKey, hashes, condition, expiration)); + } + @Override public void put(@NonNull K key, @NonNull HK hashKey, HV value) { diff --git a/src/main/java/org/springframework/data/redis/core/DefaultReactiveHashOperations.java b/src/main/java/org/springframework/data/redis/core/DefaultReactiveHashOperations.java index 3f66d75bb1..3dc149940e 100644 --- a/src/main/java/org/springframework/data/redis/core/DefaultReactiveHashOperations.java +++ b/src/main/java/org/springframework/data/redis/core/DefaultReactiveHashOperations.java @@ -15,6 +15,7 @@ */ package org.springframework.data.redis.core; +import org.springframework.data.redis.connection.RedisHashCommands; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; @@ -123,6 +124,16 @@ public Mono> getAndDelete(H key, Collection hashKeys) { .flatMap(hks -> hashCommands.hGetDel(rawKey(key), hks)).map(this::deserializeHashValues)); } + @Override + public Mono putAndExpire(H key, Map map, RedisHashCommands.HashFieldSetOption condition, Expiration expiration) { + Assert.notNull(key, "Key must not be null"); + Assert.notNull(map, "Map must not be null"); + + return createMono(hashCommands -> Flux.fromIterable(() -> map.entrySet().iterator()) // + .collectMap(entry -> rawHashKey(entry.getKey()), entry -> rawHashValue(entry.getValue())) // + .flatMap(serialized -> hashCommands.hSetEx(rawKey(key), serialized, condition, expiration))); + } + @Override public Mono> getAndExpire(H key, Expiration expiration, Collection hashKeys) { diff --git a/src/main/java/org/springframework/data/redis/core/HashOperations.java b/src/main/java/org/springframework/data/redis/core/HashOperations.java index 3763a2553e..d89d00e920 100644 --- a/src/main/java/org/springframework/data/redis/core/HashOperations.java +++ b/src/main/java/org/springframework/data/redis/core/HashOperations.java @@ -28,6 +28,7 @@ import org.jspecify.annotations.NullUnmarked; import org.jspecify.annotations.Nullable; import org.springframework.data.redis.connection.ExpirationOptions; +import org.springframework.data.redis.connection.RedisHashCommands; import org.springframework.data.redis.core.types.Expiration; import org.springframework.data.redis.core.types.Expirations; @@ -103,6 +104,19 @@ public interface HashOperations { */ List getAndExpire(@NonNull H key, Expiration expiration, @NonNull Collection<@NonNull HK> hashKeys); + /** + * Set multiple hash fields to multiple values using data provided in {@code m} with optional condition and expiration. + * + * @param key must not be {@literal null}. + * @param m must not be {@literal null}. + * @param condition is optional. + * @param expiration is optional. + * @return {@literal null} when used in pipeline / transaction. + * @since 4.0 + */ + Boolean putAndExpire(@NonNull H key, @NonNull Map m, + RedisHashCommands.HashFieldSetOption condition, Expiration expiration); + /** * Increment {@code value} of a hash {@code hashKey} by the given {@code delta}. * diff --git a/src/main/java/org/springframework/data/redis/core/ReactiveHashOperations.java b/src/main/java/org/springframework/data/redis/core/ReactiveHashOperations.java index a5de4b187d..b7c69c2396 100644 --- a/src/main/java/org/springframework/data/redis/core/ReactiveHashOperations.java +++ b/src/main/java/org/springframework/data/redis/core/ReactiveHashOperations.java @@ -15,6 +15,7 @@ */ package org.springframework.data.redis.core; +import org.springframework.data.redis.connection.RedisHashCommands; import reactor.core.publisher.Flux; import reactor.core.publisher.Mono; @@ -95,6 +96,19 @@ public interface ReactiveHashOperations { */ Mono> getAndDelete(H key, Collection hashKeys); + /** + * Set multiple hash fields to multiple values using data provided in {@code m} with optional condition and expiration. + * + * @param key must not be {@literal null}. + * @param map must not be {@literal null}. + * @param condition is optional. + * @param expiration is optional. + * @return never {@literal null}. + * @since 4.0 + */ + Mono putAndExpire(H key, Map map, RedisHashCommands.HashFieldSetOption condition, + Expiration expiration); + /** * Get and optionally expire the value for given {@code hashKeys} from hash at {@code key}. Values are in the order of the * requested keys. Absent field values are represented using {@literal null} in the resulting {@link List}. diff --git a/src/test/java/org/springframework/data/redis/connection/AbstractConnectionIntegrationTests.java b/src/test/java/org/springframework/data/redis/connection/AbstractConnectionIntegrationTests.java index 472cfbed8c..056f443a93 100644 --- a/src/test/java/org/springframework/data/redis/connection/AbstractConnectionIntegrationTests.java +++ b/src/test/java/org/springframework/data/redis/connection/AbstractConnectionIntegrationTests.java @@ -3865,6 +3865,118 @@ public void hGetExMultipleFieldsWithNonExistentFields() { Boolean.TRUE)); } + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + public void hSetExUpsertConditionSetsFieldsWithExpiration() { + + Map fieldMap = Map.of("field-1", "value-1", "field-2", "value-2"); + actual.add(connection.hSetEx("hash-hsetex", fieldMap, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.seconds(60))); + actual.add(connection.hExists("hash-hsetex", "field-1")); + actual.add(connection.hExists("hash-hsetex", "field-2")); + actual.add(connection.hGet("hash-hsetex", "field-1")); + actual.add(connection.hGet("hash-hsetex", "field-2")); + + verifyResults(Arrays.asList(Boolean.TRUE, Boolean.TRUE, Boolean.TRUE, "value-1", "value-2")); + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + public void hSetExIfNoneExistConditionSucceedsWhenNoFieldsExist() { + + Map fieldMap = Map.of("field-1", "value-1", "field-2", "value-2"); + actual.add(connection.hSetEx("hash-hsetex", fieldMap, RedisHashCommands.HashFieldSetOption.ifNoneExist(), Expiration.seconds(60))); + actual.add(connection.hExists("hash-hsetex", "field-1")); + actual.add(connection.hExists("hash-hsetex", "field-2")); + actual.add(connection.hGet("hash-hsetex", "field-1")); + actual.add(connection.hGet("hash-hsetex", "field-2")); + + verifyResults(Arrays.asList(Boolean.TRUE, Boolean.TRUE, Boolean.TRUE, "value-1", "value-2")); + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + public void hSetExIfNoneExistConditionFailsWhenSomeFieldsExist() { + + actual.add(connection.hSet("hash-hsetex", "field-1", "existing-value")); + Map fieldMap = Map.of("field-1", "new-value", "field-2", "value-2"); + actual.add(connection.hSetEx("hash-hsetex", fieldMap, RedisHashCommands.HashFieldSetOption.ifNoneExist(), Expiration.seconds(60))); + actual.add(connection.hGet("hash-hsetex", "field-1")); + actual.add(connection.hExists("hash-hsetex", "field-2")); + + verifyResults(Arrays.asList(Boolean.TRUE, Boolean.FALSE, "existing-value", Boolean.FALSE)); + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + public void hSetExIfAllExistConditionSucceedsWhenAllFieldsExist() { + + actual.add(connection.hSet("hash-hsetex", "field-1", "old-value-1")); + actual.add(connection.hSet("hash-hsetex", "field-2", "old-value-2")); + Map fieldMap = Map.of("field-1", "new-value-1", "field-2", "new-value-2"); + actual.add(connection.hSetEx("hash-hsetex", fieldMap, RedisHashCommands.HashFieldSetOption.ifAllExist(), Expiration.seconds(60))); + actual.add(connection.hGet("hash-hsetex", "field-1")); + actual.add(connection.hGet("hash-hsetex", "field-2")); + + verifyResults(Arrays.asList(Boolean.TRUE, Boolean.TRUE, Boolean.TRUE, "new-value-1", "new-value-2")); + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + public void hSetExIfAllExistConditionFailsWhenSomeFieldsMissing() { + + actual.add(connection.hSet("hash-hsetex", "field-1", "existing-value")); + Map fieldMap = Map.of("field-1", "new-value", "field-2", "value-2"); + actual.add(connection.hSetEx("hash-hsetex", fieldMap, RedisHashCommands.HashFieldSetOption.ifAllExist(), Expiration.seconds(60))); + actual.add(connection.hGet("hash-hsetex", "field-1")); + actual.add(connection.hExists("hash-hsetex", "field-2")); + + verifyResults(Arrays.asList(Boolean.TRUE, Boolean.FALSE, "existing-value", Boolean.FALSE)); + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + public void hSetExWithDifferentExpirationPolicies() { + + // Test with seconds expiration + Map fieldMap1 = Map.of("field-1", "value-1"); + actual.add(connection.hSetEx("hash-hsetex-exp", fieldMap1, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.seconds(60))); + actual.add(connection.hExists("hash-hsetex-exp", "field-1")); + actual.add(connection.hGet("hash-hsetex-exp", "field-1")); + + // Test with milliseconds expiration + Map fieldMap2 = Map.of("field-2", "value-2"); + actual.add(connection.hSetEx("hash-hsetex-exp", fieldMap2, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.milliseconds(120000))); + actual.add(connection.hExists("hash-hsetex-exp", "field-2")); + actual.add(connection.hGet("hash-hsetex-exp", "field-2")); + + // Test with Duration expiration + Map fieldMap3 = Map.of("field-3", "value-3"); + actual.add(connection.hSetEx("hash-hsetex-exp", fieldMap3, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.from(Duration.ofMinutes(3)))); + actual.add(connection.hExists("hash-hsetex-exp", "field-3")); + actual.add(connection.hGet("hash-hsetex-exp", "field-3")); + + // Test with unix timestamp expiration (5 minutes from now) + long futureTimestamp = System.currentTimeMillis() / 1000 + 300; // 5 minutes from now + Map fieldMap4 = Map.of("field-4", "value-4"); + actual.add(connection.hSetEx("hash-hsetex-exp", fieldMap4, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.unixTimestamp(futureTimestamp, TimeUnit.SECONDS))); + actual.add(connection.hExists("hash-hsetex-exp", "field-4")); + actual.add(connection.hGet("hash-hsetex-exp", "field-4")); + + // Test with keepTtl expiration + Map fieldMap5 = Map.of("field-5", "value-5"); + actual.add(connection.hSetEx("hash-hsetex-exp", fieldMap5, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.keepTtl())); + actual.add(connection.hExists("hash-hsetex-exp", "field-5")); + actual.add(connection.hGet("hash-hsetex-exp", "field-5")); + + verifyResults(Arrays.asList( + Boolean.TRUE, Boolean.TRUE, "value-1", // seconds + Boolean.TRUE, Boolean.TRUE, "value-2", // milliseconds + Boolean.TRUE, Boolean.TRUE, "value-3", // Duration + Boolean.TRUE, Boolean.TRUE, "value-4", // unix timestamp + Boolean.TRUE, Boolean.TRUE, "value-5" // keepTtl + )); + } + @Test // DATAREDIS-694 void touchReturnsNrOfKeysTouched() { diff --git a/src/test/java/org/springframework/data/redis/connection/RedisConnectionUnitTests.java b/src/test/java/org/springframework/data/redis/connection/RedisConnectionUnitTests.java index dd8ffadab3..aea8fdc6bd 100644 --- a/src/test/java/org/springframework/data/redis/connection/RedisConnectionUnitTests.java +++ b/src/test/java/org/springframework/data/redis/connection/RedisConnectionUnitTests.java @@ -453,6 +453,10 @@ public List hGetEx(byte[] key, Expiration expiration, byte[]... fields) return delegate.hGetEx(key, expiration, fields); } + public Boolean hSetEx(byte[] key, Map hashes, HashFieldSetOption condition, Expiration expiration) { + return delegate.hSetEx(key, hashes, condition, expiration); + } + public Long zRem(byte[] key, byte[]... values) { return delegate.zRem(key, values); } diff --git a/src/test/java/org/springframework/data/redis/connection/jedis/JedisClusterConnectionTests.java b/src/test/java/org/springframework/data/redis/connection/jedis/JedisClusterConnectionTests.java index 4db8ba56fd..7c40d0a630 100644 --- a/src/test/java/org/springframework/data/redis/connection/jedis/JedisClusterConnectionTests.java +++ b/src/test/java/org/springframework/data/redis/connection/jedis/JedisClusterConnectionTests.java @@ -1391,6 +1391,77 @@ public void hGetExMultipleFieldsWithNonExistentFields() { assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_2_BYTES)).isTrue(); } + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + public void hSetExUpsertConditionSetsFieldsWithExpiration() { + + Map fieldMap = Map.of(KEY_2_BYTES, VALUE_1_BYTES, KEY_3_BYTES, VALUE_2_BYTES); + Boolean result = clusterConnection.hashCommands().hSetEx(KEY_1_BYTES, fieldMap, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.seconds(60)); + + assertThat(result).isTrue(); + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_2_BYTES)).isTrue(); + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_3_BYTES)).isTrue(); + assertThat(clusterConnection.hashCommands().hGet(KEY_1_BYTES, KEY_2_BYTES)).isEqualTo(VALUE_1_BYTES); + assertThat(clusterConnection.hashCommands().hGet(KEY_1_BYTES, KEY_3_BYTES)).isEqualTo(VALUE_2_BYTES); + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + public void hSetExIfNoneExistConditionSucceedsWhenNoFieldsExist() { + + Map fieldMap = Map.of(KEY_2_BYTES, VALUE_1_BYTES, KEY_3_BYTES, VALUE_2_BYTES); + Boolean result = clusterConnection.hashCommands().hSetEx(KEY_1_BYTES, fieldMap, RedisHashCommands.HashFieldSetOption.ifNoneExist(), Expiration.seconds(60)); + + assertThat(result).isTrue(); + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_2_BYTES)).isTrue(); + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_3_BYTES)).isTrue(); + assertThat(clusterConnection.hashCommands().hGet(KEY_1_BYTES, KEY_2_BYTES)).isEqualTo(VALUE_1_BYTES); + assertThat(clusterConnection.hashCommands().hGet(KEY_1_BYTES, KEY_3_BYTES)).isEqualTo(VALUE_2_BYTES); + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + public void hSetExIfNoneExistConditionFailsWhenSomeFieldsExist() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + + Map fieldMap = Map.of(KEY_2_BYTES, VALUE_2_BYTES, KEY_3_BYTES, VALUE_2_BYTES); + Boolean result = clusterConnection.hashCommands().hSetEx(KEY_1_BYTES, fieldMap, RedisHashCommands.HashFieldSetOption.ifNoneExist(), Expiration.seconds(60)); + + assertThat(result).isFalse(); + assertThat(clusterConnection.hashCommands().hGet(KEY_1_BYTES, KEY_2_BYTES)).isEqualTo(VALUE_1_BYTES); // unchanged + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_3_BYTES)).isFalse(); // not set + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + public void hSetExIfAllExistConditionSucceedsWhenAllFieldsExist() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + nativeConnection.hset(KEY_1, KEY_3, VALUE_2); + + Map fieldMap = Map.of(KEY_2_BYTES, "new-value-1".getBytes(), KEY_3_BYTES, "new-value-2".getBytes()); + Boolean result = clusterConnection.hashCommands().hSetEx(KEY_1_BYTES, fieldMap, RedisHashCommands.HashFieldSetOption.ifAllExist(), Expiration.seconds(60)); + + assertThat(result).isTrue(); + assertThat(clusterConnection.hashCommands().hGet(KEY_1_BYTES, KEY_2_BYTES)).isEqualTo("new-value-1".getBytes()); // updated + assertThat(clusterConnection.hashCommands().hGet(KEY_1_BYTES, KEY_3_BYTES)).isEqualTo("new-value-2".getBytes()); // updated + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + public void hSetExIfAllExistConditionFailsWhenSomeFieldsMissing() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + + Map fieldMap = Map.of(KEY_2_BYTES, VALUE_2_BYTES, KEY_3_BYTES, VALUE_2_BYTES); + Boolean result = clusterConnection.hashCommands().hSetEx(KEY_1_BYTES, fieldMap, RedisHashCommands.HashFieldSetOption.ifAllExist(), Expiration.seconds(60)); + + assertThat(result).isFalse(); + assertThat(clusterConnection.hashCommands().hGet(KEY_1_BYTES, KEY_2_BYTES)).isEqualTo(VALUE_1_BYTES); // unchanged + assertThat(clusterConnection.hashCommands().hExists(KEY_1_BYTES, KEY_3_BYTES)).isFalse(); // not set + } + @Test // DATAREDIS-315 public void hValsShouldRetrieveValuesCorrectly() { diff --git a/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceClusterConnectionTests.java b/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceClusterConnectionTests.java index 33bf6d16d3..f2bff0a0a8 100644 --- a/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceClusterConnectionTests.java +++ b/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceClusterConnectionTests.java @@ -1455,6 +1455,77 @@ public void hGetExMultipleFieldsWithNonExistentFields() { assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_2_BYTES)).isTrue(); } + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + public void hSetExUpsertConditionSetsFieldsWithExpiration() { + + Map fieldMap = Map.of(KEY_2_BYTES, VALUE_1_BYTES, KEY_3_BYTES, VALUE_2_BYTES); + Boolean result = clusterConnection.hashCommands().hSetEx(KEY_1_BYTES, fieldMap, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.seconds(60)); + + assertThat(result).isTrue(); + assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_2_BYTES)).isTrue(); + assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_3_BYTES)).isTrue(); + assertThat(clusterConnection.hGet(KEY_1_BYTES, KEY_2_BYTES)).isEqualTo(VALUE_1_BYTES); + assertThat(clusterConnection.hGet(KEY_1_BYTES, KEY_3_BYTES)).isEqualTo(VALUE_2_BYTES); + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + public void hSetExIfNoneExistConditionSucceedsWhenNoFieldsExist() { + + Map fieldMap = Map.of(KEY_2_BYTES, VALUE_1_BYTES, KEY_3_BYTES, VALUE_2_BYTES); + Boolean result = clusterConnection.hashCommands().hSetEx(KEY_1_BYTES, fieldMap, RedisHashCommands.HashFieldSetOption.ifNoneExist(), Expiration.seconds(60)); + + assertThat(result).isTrue(); + assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_2_BYTES)).isTrue(); + assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_3_BYTES)).isTrue(); + assertThat(clusterConnection.hGet(KEY_1_BYTES, KEY_2_BYTES)).isEqualTo(VALUE_1_BYTES); + assertThat(clusterConnection.hGet(KEY_1_BYTES, KEY_3_BYTES)).isEqualTo(VALUE_2_BYTES); + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + public void hSetExIfNoneExistConditionFailsWhenSomeFieldsExist() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + + Map fieldMap = Map.of(KEY_2_BYTES, VALUE_2_BYTES, KEY_3_BYTES, VALUE_2_BYTES); + Boolean result = clusterConnection.hashCommands().hSetEx(KEY_1_BYTES, fieldMap, RedisHashCommands.HashFieldSetOption.ifNoneExist(), Expiration.seconds(60)); + + assertThat(result).isFalse(); + assertThat(clusterConnection.hGet(KEY_1_BYTES, KEY_2_BYTES)).isEqualTo(VALUE_1_BYTES); // unchanged + assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_3_BYTES)).isFalse(); // not set + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + public void hSetExIfAllExistConditionSucceedsWhenAllFieldsExist() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + nativeConnection.hset(KEY_1, KEY_3, VALUE_2); + + Map fieldMap = Map.of(KEY_2_BYTES, "new-value-1".getBytes(), KEY_3_BYTES, "new-value-2".getBytes()); + Boolean result = clusterConnection.hashCommands().hSetEx(KEY_1_BYTES, fieldMap, RedisHashCommands.HashFieldSetOption.ifAllExist(), Expiration.seconds(60)); + + assertThat(result).isTrue(); + assertThat(clusterConnection.hGet(KEY_1_BYTES, KEY_2_BYTES)).isEqualTo("new-value-1".getBytes()); // updated + assertThat(clusterConnection.hGet(KEY_1_BYTES, KEY_3_BYTES)).isEqualTo("new-value-2".getBytes()); // updated + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + public void hSetExIfAllExistConditionFailsWhenSomeFieldsMissing() { + + nativeConnection.hset(KEY_1, KEY_2, VALUE_1); + + Map fieldMap = Map.of(KEY_2_BYTES, VALUE_2_BYTES, KEY_3_BYTES, VALUE_2_BYTES); + Boolean result = clusterConnection.hashCommands().hSetEx(KEY_1_BYTES, fieldMap, RedisHashCommands.HashFieldSetOption.ifAllExist(), Expiration.seconds(60)); + + assertThat(result).isFalse(); + assertThat(clusterConnection.hGet(KEY_1_BYTES, KEY_2_BYTES)).isEqualTo(VALUE_1_BYTES); // unchanged + assertThat(clusterConnection.hExists(KEY_1_BYTES, KEY_3_BYTES)).isFalse(); // not set + } + @Test // DATAREDIS-315 public void hValsShouldRetrieveValuesCorrectly() { diff --git a/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommandsIntegrationTests.java b/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommandsIntegrationTests.java index 496de6ab8c..7cc46f4d88 100644 --- a/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommandsIntegrationTests.java +++ b/src/test/java/org/springframework/data/redis/connection/lettuce/LettuceReactiveHashCommandsIntegrationTests.java @@ -17,6 +17,7 @@ import static org.assertj.core.api.Assertions.*; +import org.springframework.data.redis.connection.RedisHashCommands; import org.springframework.data.redis.core.types.Expiration; import reactor.test.StepVerifier; @@ -490,4 +491,90 @@ void hGetExShouldHandleMultipleFieldsWithNonExistent() { assertThat(nativeCommands.hexists(KEY_1, FIELD_1)).isTrue(); } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + void hSetExShouldSetFieldsWithUpsertCondition() { + + Map fieldMap = Map.of(FIELD_1_BBUFFER, VALUE_1_BBUFFER, FIELD_2_BBUFFER, VALUE_2_BBUFFER); + + connection.hashCommands().hSetEx(KEY_1_BBUFFER, fieldMap, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.seconds(60)) + .as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + assertThat(nativeCommands.hexists(KEY_1, FIELD_1)).isTrue(); + assertThat(nativeCommands.hexists(KEY_1, FIELD_2)).isTrue(); + assertThat(nativeCommands.hget(KEY_1, FIELD_1)).isEqualTo(VALUE_1); + assertThat(nativeCommands.hget(KEY_1, FIELD_2)).isEqualTo(VALUE_2); + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + void hSetExShouldSucceedWithIfNoneExistWhenNoFieldsExist() { + + Map fieldMap = Map.of(FIELD_1_BBUFFER, VALUE_1_BBUFFER, FIELD_2_BBUFFER, VALUE_2_BBUFFER); + + connection.hashCommands().hSetEx(KEY_1_BBUFFER, fieldMap, RedisHashCommands.HashFieldSetOption.ifNoneExist(), Expiration.seconds(60)) + .as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + assertThat(nativeCommands.hexists(KEY_1, FIELD_1)).isTrue(); + assertThat(nativeCommands.hexists(KEY_1, FIELD_2)).isTrue(); + assertThat(nativeCommands.hget(KEY_1, FIELD_1)).isEqualTo(VALUE_1); + assertThat(nativeCommands.hget(KEY_1, FIELD_2)).isEqualTo(VALUE_2); + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + void hSetExShouldFailWithIfNoneExistWhenSomeFieldsExist() { + + nativeCommands.hset(KEY_1, FIELD_1, VALUE_1); + + Map fieldMap = Map.of(FIELD_1_BBUFFER, VALUE_2_BBUFFER, FIELD_2_BBUFFER, VALUE_2_BBUFFER); + + connection.hashCommands().hSetEx(KEY_1_BBUFFER, fieldMap, RedisHashCommands.HashFieldSetOption.ifNoneExist(), Expiration.seconds(60)) + .as(StepVerifier::create) + .expectNext(false) + .verifyComplete(); + + assertThat(nativeCommands.hget(KEY_1, FIELD_1)).isEqualTo(VALUE_1); // unchanged + assertThat(nativeCommands.hexists(KEY_1, FIELD_2)).isFalse(); // not set + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + void hSetExShouldSucceedWithIfAllExistWhenAllFieldsExist() { + + nativeCommands.hset(KEY_1, FIELD_1, VALUE_1); + nativeCommands.hset(KEY_1, FIELD_2, VALUE_2); + + Map fieldMap = Map.of(FIELD_1_BBUFFER, VALUE_3_BBUFFER, FIELD_2_BBUFFER, VALUE_3_BBUFFER); + + connection.hashCommands().hSetEx(KEY_1_BBUFFER, fieldMap, RedisHashCommands.HashFieldSetOption.ifAllExist(), Expiration.seconds(60)) + .as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + assertThat(nativeCommands.hget(KEY_1, FIELD_1)).isEqualTo(VALUE_3); // updated + assertThat(nativeCommands.hget(KEY_1, FIELD_2)).isEqualTo(VALUE_3); // updated + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + void hSetExShouldFailWithIfAllExistWhenSomeFieldsMissing() { + + nativeCommands.hset(KEY_1, FIELD_1, VALUE_1); + + Map fieldMap = Map.of(FIELD_1_BBUFFER, VALUE_2_BBUFFER, FIELD_2_BBUFFER, VALUE_2_BBUFFER); + + connection.hashCommands().hSetEx(KEY_1_BBUFFER, fieldMap, RedisHashCommands.HashFieldSetOption.ifAllExist(), Expiration.seconds(60)) + .as(StepVerifier::create) + .expectNext(false) + .verifyComplete(); + + assertThat(nativeCommands.hget(KEY_1, FIELD_1)).isEqualTo(VALUE_1); // unchanged + assertThat(nativeCommands.hexists(KEY_1, FIELD_2)).isFalse(); // not set + } } diff --git a/src/test/java/org/springframework/data/redis/core/DefaultHashOperationsIntegrationTests.java b/src/test/java/org/springframework/data/redis/core/DefaultHashOperationsIntegrationTests.java index 8b2baa51ea..adc58c5847 100644 --- a/src/test/java/org/springframework/data/redis/core/DefaultHashOperationsIntegrationTests.java +++ b/src/test/java/org/springframework/data/redis/core/DefaultHashOperationsIntegrationTests.java @@ -34,6 +34,7 @@ import org.springframework.data.redis.RawObjectFactory; import org.springframework.data.redis.StringObjectFactory; import org.springframework.data.redis.connection.ExpirationOptions; +import org.springframework.data.redis.connection.RedisHashCommands; import org.springframework.data.redis.connection.jedis.JedisConnectionFactory; import org.springframework.data.redis.connection.jedis.extension.JedisConnectionFactoryExtension; import org.springframework.data.redis.core.ExpireChanges.ExpiryChangeState; @@ -569,4 +570,164 @@ void testGetAndExpire() { List result5 = hashOps.getAndExpire(key, Expiration.seconds(60), Collections.emptyList()); assertThat(result5).isEmpty(); } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + void testPutAndExpire() { + + K key = keyFactory.instance(); + HK key1 = hashKeyFactory.instance(); + HV val1 = hashValueFactory.instance(); + HK key2 = hashKeyFactory.instance(); + HV val2 = hashValueFactory.instance(); + HK key3 = hashKeyFactory.instance(); + HV val3 = hashValueFactory.instance(); + + // Test UPSERT condition - should always set fields + Map fieldMap1 = Map.of(key1, val1, key2, val2); + Boolean result1 = hashOps.putAndExpire(key, fieldMap1, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.seconds(60)); + assertThat(result1).isTrue(); + + // Verify fields were set and exist + assertThat(hashOps.hasKey(key, key1)).isTrue(); + assertThat(hashOps.hasKey(key, key2)).isTrue(); + assertThat(hashOps.get(key, key1)).isEqualTo(val1); + assertThat(hashOps.get(key, key2)).isEqualTo(val2); + + // Test IF_NONE_EXIST condition - should not change existing fields + Map fieldMap2 = Map.of(key1, val3, key3, val3); + Boolean result2 = hashOps.putAndExpire(key, fieldMap2, RedisHashCommands.HashFieldSetOption.ifNoneExist(), Expiration.seconds(120)); + assertThat(result2).isFalse(); + + // Verify original values unchanged (IF_NONE_EXIST failed because key1 exists) + assertThat(hashOps.get(key, key1)).isEqualTo(val1); + assertThat(hashOps.hasKey(key, key3)).isFalse(); + + // Test IF_ALL_EXIST condition - should succeed because all fields exist + Map fieldMap3 = Map.of(key1, val3, key2, val3); + Boolean result3 = hashOps.putAndExpire(key, fieldMap3, RedisHashCommands.HashFieldSetOption.ifAllExist(), Expiration.seconds(180)); + assertThat(result3).isTrue(); + + // Verify values were updated + assertThat(hashOps.get(key, key1)).isEqualTo(val3); + assertThat(hashOps.get(key, key2)).isEqualTo(val3); + + // Test IF_ALL_EXIST condition with non-existent field - should not change anything + HK nonExistentKey = hashKeyFactory.instance(); + Map fieldMap4 = Map.of(key1, val1, nonExistentKey, val1); + Boolean result4 = hashOps.putAndExpire(key, fieldMap4, RedisHashCommands.HashFieldSetOption.ifAllExist(), Expiration.seconds(60)); + assertThat(result4).isFalse(); + + // Verify values unchanged (IF_ALL_EXIST failed because nonExistentKey doesn't exist) + assertThat(hashOps.get(key, key1)).isEqualTo(val3); + assertThat(hashOps.hasKey(key, nonExistentKey)).isFalse(); + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + void testBoundHashOperationsPutAndExpire() { + + K key = keyFactory.instance(); + HK key1 = hashKeyFactory.instance(); + HV val1 = hashValueFactory.instance(); + HK key2 = hashKeyFactory.instance(); + HV val2 = hashValueFactory.instance(); + HK key3 = hashKeyFactory.instance(); + HV val3 = hashValueFactory.instance(); + + BoundHashOperations boundHashOps = redisTemplate.boundHashOps(key); + + // Test UPSERT condition - should always set fields + Map fieldMap1 = Map.of(key1, val1, key2, val2); + boundHashOps.putAndExpire(fieldMap1, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.seconds(60)); + + // Verify fields were set and exist + assertThat(boundHashOps.hasKey(key1)).isTrue(); + assertThat(boundHashOps.hasKey(key2)).isTrue(); + assertThat(boundHashOps.get(key1)).isEqualTo(val1); + assertThat(boundHashOps.get(key2)).isEqualTo(val2); + + // Test IF_NONE_EXIST condition - should not change existing fields + Map fieldMap2 = Map.of(key1, val3, key3, val3); + boundHashOps.putAndExpire(fieldMap2, RedisHashCommands.HashFieldSetOption.ifNoneExist(), Expiration.seconds(120)); + + // Verify original values unchanged (IF_NONE_EXIST failed because key1 exists) + assertThat(boundHashOps.get(key1)).isEqualTo(val1); + assertThat(boundHashOps.hasKey(key3)).isFalse(); + + // Test IF_ALL_EXIST condition - should succeed because all fields exist + Map fieldMap3 = Map.of(key1, val3, key2, val3); + boundHashOps.putAndExpire(fieldMap3, RedisHashCommands.HashFieldSetOption.ifAllExist(), Expiration.seconds(180)); + + // Verify values were updated + assertThat(boundHashOps.get(key1)).isEqualTo(val3); + assertThat(boundHashOps.get(key2)).isEqualTo(val3); + + // Test IF_ALL_EXIST condition with non-existent field - should not change anything + HK nonExistentKey = hashKeyFactory.instance(); + Map fieldMap4 = Map.of(key1, val1, nonExistentKey, val1); + boundHashOps.putAndExpire(fieldMap4, RedisHashCommands.HashFieldSetOption.ifAllExist(), Expiration.seconds(60)); + + // Verify values unchanged (IF_ALL_EXIST failed because nonExistentKey doesn't exist) + assertThat(boundHashOps.get(key1)).isEqualTo(val3); + assertThat(boundHashOps.hasKey(nonExistentKey)).isFalse(); + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + void testPutAndExpireWithDifferentExpirationPolicies() { + + K key = keyFactory.instance(); + HK key1 = hashKeyFactory.instance(); + HV val1 = hashValueFactory.instance(); + HK key2 = hashKeyFactory.instance(); + HV val2 = hashValueFactory.instance(); + HK key3 = hashKeyFactory.instance(); + HV val3 = hashValueFactory.instance(); + HK key4 = hashKeyFactory.instance(); + HV val4 = hashValueFactory.instance(); + HK key5 = hashKeyFactory.instance(); + HV val5 = hashValueFactory.instance(); + + // Test with seconds expiration + Map fieldMap1 = Map.of(key1, val1); + Boolean result1 = hashOps.putAndExpire(key, fieldMap1, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.seconds(60)); + assertThat(result1).isTrue(); + assertThat(hashOps.hasKey(key, key1)).isTrue(); + assertThat(hashOps.get(key, key1)).isEqualTo(val1); + + // Test with milliseconds expiration + Map fieldMap2 = Map.of(key2, val2); + Boolean result2 = hashOps.putAndExpire(key, fieldMap2, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.milliseconds(120000)); + assertThat(result2).isTrue(); + assertThat(hashOps.hasKey(key, key2)).isTrue(); + assertThat(hashOps.get(key, key2)).isEqualTo(val2); + + // Test with Duration expiration + Map fieldMap3 = Map.of(key3, val3); + Boolean result3 = hashOps.putAndExpire(key, fieldMap3, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.from(Duration.ofMinutes(3))); + assertThat(result3).isTrue(); + assertThat(hashOps.hasKey(key, key3)).isTrue(); + assertThat(hashOps.get(key, key3)).isEqualTo(val3); + + // Test with unix timestamp expiration (5 minutes from now) + long futureTimestamp = System.currentTimeMillis() / 1000 + 300; // 5 minutes from now + Map fieldMap4 = Map.of(key4, val4); + Boolean result4 = hashOps.putAndExpire(key, fieldMap4, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.unixTimestamp(futureTimestamp, TimeUnit.SECONDS)); + assertThat(result4).isTrue(); + assertThat(hashOps.hasKey(key, key4)).isTrue(); + assertThat(hashOps.get(key, key4)).isEqualTo(val4); + + // Test with keepTtl expiration (should preserve existing TTL) + // First set a field with TTL, then update it with keepTtl + hashOps.put(key, key5, val5); + hashOps.expire(key, Duration.ofMinutes(4), Arrays.asList(key5)); // Set initial TTL + + Map fieldMap5 = Map.of(key5, val5); + Boolean result5 = hashOps.putAndExpire(key, fieldMap5, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.keepTtl()); + assertThat(result5).isTrue(); + assertThat(hashOps.hasKey(key, key5)).isTrue(); + assertThat(hashOps.get(key, key5)).isEqualTo(val5); + // TTL should be preserved (we can't easily test the exact value, but field should still exist) + } } diff --git a/src/test/java/org/springframework/data/redis/core/DefaultReactiveHashOperationsIntegrationTests.java b/src/test/java/org/springframework/data/redis/core/DefaultReactiveHashOperationsIntegrationTests.java index bd4e606acc..2279e1b07b 100644 --- a/src/test/java/org/springframework/data/redis/core/DefaultReactiveHashOperationsIntegrationTests.java +++ b/src/test/java/org/springframework/data/redis/core/DefaultReactiveHashOperationsIntegrationTests.java @@ -19,6 +19,7 @@ import static org.assertj.core.api.Assumptions.*; import static org.junit.jupiter.api.condition.OS.*; +import org.springframework.data.redis.connection.RedisHashCommands; import org.springframework.data.redis.core.types.Expiration; import reactor.test.StepVerifier; @@ -909,4 +910,225 @@ void getAndExpireNonExistentKey() { }) .verifyComplete(); } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + void putAndExpireUpsert() { + + assumeThat(hashKeyFactory instanceof StringObjectFactory && hashValueFactory instanceof StringObjectFactory) + .isTrue(); + + K key = keyFactory.instance(); + HK hashkey1 = hashKeyFactory.instance(); + HV hashvalue1 = hashValueFactory.instance(); + HK hashkey2 = hashKeyFactory.instance(); + HV hashvalue2 = hashValueFactory.instance(); + + Map fieldMap = Map.of(hashkey1, hashvalue1, hashkey2, hashvalue2); + + hashOperations.putAndExpire(key, fieldMap, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.seconds(60)) + .as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + // Verify fields were set + hashOperations.hasKey(key, hashkey1).as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey2).as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + hashOperations.get(key, hashkey1).as(StepVerifier::create) + .expectNext(hashvalue1) + .verifyComplete(); + + hashOperations.get(key, hashkey2).as(StepVerifier::create) + .expectNext(hashvalue2) + .verifyComplete(); + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + void putAndExpireIfNoneExist() { + + assumeThat(hashKeyFactory instanceof StringObjectFactory && hashValueFactory instanceof StringObjectFactory) + .isTrue(); + + K key = keyFactory.instance(); + HK hashkey1 = hashKeyFactory.instance(); + HV hashvalue1 = hashValueFactory.instance(); + HK hashkey2 = hashKeyFactory.instance(); + HV hashvalue2 = hashValueFactory.instance(); + HK hashkey3 = hashKeyFactory.instance(); + HV hashvalue3 = hashValueFactory.instance(); + + // Set up existing field + hashOperations.put(key, hashkey1, hashvalue1).as(StepVerifier::create).expectNext(true).verifyComplete(); + + // Try to set fields where one already exists - should fail + Map fieldMap = Map.of(hashkey1, hashvalue2, hashkey2, hashvalue2); + + hashOperations.putAndExpire(key, fieldMap, RedisHashCommands.HashFieldSetOption.ifNoneExist(), Expiration.seconds(60)) + .as(StepVerifier::create) + .expectNext(false) + .verifyComplete(); + + // Verify original value unchanged and new field not set + hashOperations.get(key, hashkey1).as(StepVerifier::create) + .expectNext(hashvalue1) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey2).as(StepVerifier::create) + .expectNext(false) + .verifyComplete(); + + // Try with all new fields - should succeed + Map newFieldMap = Map.of(hashkey2, hashvalue2, hashkey3, hashvalue3); + + hashOperations.putAndExpire(key, newFieldMap, RedisHashCommands.HashFieldSetOption.ifNoneExist(), Expiration.seconds(120)) + .as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + // Verify new fields were set + hashOperations.hasKey(key, hashkey2).as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey3).as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + void putAndExpireIfAllExist() { + + assumeThat(hashKeyFactory instanceof StringObjectFactory && hashValueFactory instanceof StringObjectFactory) + .isTrue(); + + K key = keyFactory.instance(); + HK hashkey1 = hashKeyFactory.instance(); + HV hashvalue1 = hashValueFactory.instance(); + HK hashkey2 = hashKeyFactory.instance(); + HV hashvalue2 = hashValueFactory.instance(); + HK hashkey3 = hashKeyFactory.instance(); + HV hashvalue3 = hashValueFactory.instance(); + + // Set up existing fields + putAll(key, hashkey1, hashvalue1, hashkey2, hashvalue2); + + // Try to update existing fields - should succeed + Map fieldMap = Map.of(hashkey1, hashvalue3, hashkey2, hashvalue3); + + hashOperations.putAndExpire(key, fieldMap, RedisHashCommands.HashFieldSetOption.ifAllExist(), Expiration.seconds(60)) + .as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + // Verify values were updated + hashOperations.get(key, hashkey1).as(StepVerifier::create) + .expectNext(hashvalue3) + .verifyComplete(); + + hashOperations.get(key, hashkey2).as(StepVerifier::create) + .expectNext(hashvalue3) + .verifyComplete(); + + // Try with non-existent field - should fail + Map mixedFieldMap = Map.of(hashkey1, hashvalue1, hashkey3, hashvalue1); + + hashOperations.putAndExpire(key, mixedFieldMap, RedisHashCommands.HashFieldSetOption.ifAllExist(), Expiration.seconds(120)) + .as(StepVerifier::create) + .expectNext(false) + .verifyComplete(); + + // Verify values unchanged + hashOperations.get(key, hashkey1).as(StepVerifier::create) + .expectNext(hashvalue3) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey3).as(StepVerifier::create) + .expectNext(false) + .verifyComplete(); + } + + @Test // GH-3211 + @EnabledOnCommand("HSETEX") + void putAndExpireWithDifferentExpirationPolicies() { + + K key = keyFactory.instance(); + HK hashkey1 = hashKeyFactory.instance(); + HV hashvalue1 = hashValueFactory.instance(); + HK hashkey2 = hashKeyFactory.instance(); + HV hashvalue2 = hashValueFactory.instance(); + HK hashkey3 = hashKeyFactory.instance(); + HV hashvalue3 = hashValueFactory.instance(); + HK hashkey4 = hashKeyFactory.instance(); + HV hashvalue4 = hashValueFactory.instance(); + HK hashkey5 = hashKeyFactory.instance(); + HV hashvalue5 = hashValueFactory.instance(); + + // Test with seconds expiration + Map fieldMap1 = Map.of(hashkey1, hashvalue1); + hashOperations.putAndExpire(key, fieldMap1, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.seconds(60)) + .as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey1).as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + // Test with milliseconds expiration + Map fieldMap2 = Map.of(hashkey2, hashvalue2); + hashOperations.putAndExpire(key, fieldMap2, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.milliseconds(120000)) + .as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey2).as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + // Test with Duration expiration + Map fieldMap3 = Map.of(hashkey3, hashvalue3); + hashOperations.putAndExpire(key, fieldMap3, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.from(Duration.ofMinutes(3))) + .as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey3).as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + // Test with unix timestamp expiration (5 minutes from now) + long futureTimestamp = System.currentTimeMillis() / 1000 + 300; // 5 minutes from now + Map fieldMap4 = Map.of(hashkey4, hashvalue4); + hashOperations.putAndExpire(key, fieldMap4, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.unixTimestamp(futureTimestamp, TimeUnit.SECONDS)) + .as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey4).as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + // Test with keepTtl expiration + // First set a field with TTL, then update it with keepTtl + hashOperations.put(key, hashkey5, hashvalue5).as(StepVerifier::create).expectNext(true).verifyComplete(); + hashOperations.expire(key, Duration.ofMinutes(4), Arrays.asList(hashkey5)).as(StepVerifier::create).expectNextCount(1).verifyComplete(); + + Map fieldMap5 = Map.of(hashkey5, hashvalue5); + hashOperations.putAndExpire(key, fieldMap5, RedisHashCommands.HashFieldSetOption.upsert(), Expiration.keepTtl()) + .as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + + hashOperations.hasKey(key, hashkey5).as(StepVerifier::create) + .expectNext(true) + .verifyComplete(); + } }