mirror of
https://codeberg.org/redict/redict.git
synced 2025-01-23 16:48:27 -05:00
ae020e3d56
## Move library meta data to be part of the library payload. Following the discussion on https://github.com/redis/redis/issues/10429 and the intention to add (in the future) library versioning support, we believe that the entire library metadata (like name and engine) should be part of the library payload and not provided by the `FUNCTION LOAD` command. The reasoning behind this is that the programmer who developed the library should be the one who set those values (name, engine, and in the future also version). **It is not the responsibility of the admin who load the library into the database.** The PR moves all the library metadata (engine and function name) to be part of the library payload. The metadata needs to be provided on the first line of the payload using the shebang format (`#!<engine> name=<name>`), example: ```lua #!lua name=test redis.register_function('foo', function() return 1 end) ``` The above script will run on the Lua engine and will create a library called `test`. ## API Changes (compare to 7.0 rc2) * `FUNCTION LOAD` command was change and now it simply gets the library payload and extract the engine and name from the payload. In addition, the command will now return the function name which can later be used on `FUNCTION DELETE` and `FUNCTION LIST`. * The description field was completely removed from`FUNCTION LOAD`, and `FUNCTION LIST` ## Breaking Changes (compare to 7.0 rc2) * Library description was removed (we can re-add it in the future either as part of the shebang line or an additional line). * Loading an AOF file that was generated by either 7.0 rc1 or 7.0 rc2 will fail because the old command syntax is invalid. ## Notes * Loading an RDB file that was generated by rc1 / rc2 **is** supported, Redis will automatically add the shebang to the libraries payloads (we can probably delete that code after 7.0.3 or so since there's no need to keep supporting upgrades from an RC build).
225 lines
7.6 KiB
Tcl
225 lines
7.6 KiB
Tcl
start_server {tags {"aofrw external:skip"}} {
|
|
# Enable the AOF
|
|
r config set appendonly yes
|
|
r config set auto-aof-rewrite-percentage 0 ; # Disable auto-rewrite.
|
|
waitForBgrewriteaof r
|
|
|
|
foreach rdbpre {yes no} {
|
|
r config set aof-use-rdb-preamble $rdbpre
|
|
test "AOF rewrite during write load: RDB preamble=$rdbpre" {
|
|
# Start a write load for 10 seconds
|
|
set master [srv 0 client]
|
|
set master_host [srv 0 host]
|
|
set master_port [srv 0 port]
|
|
set load_handle0 [start_write_load $master_host $master_port 10]
|
|
set load_handle1 [start_write_load $master_host $master_port 10]
|
|
set load_handle2 [start_write_load $master_host $master_port 10]
|
|
set load_handle3 [start_write_load $master_host $master_port 10]
|
|
set load_handle4 [start_write_load $master_host $master_port 10]
|
|
|
|
# Make sure the instance is really receiving data
|
|
wait_for_condition 50 100 {
|
|
[r dbsize] > 0
|
|
} else {
|
|
fail "No write load detected."
|
|
}
|
|
|
|
# After 3 seconds, start a rewrite, while the write load is still
|
|
# active.
|
|
after 3000
|
|
r bgrewriteaof
|
|
waitForBgrewriteaof r
|
|
|
|
# Let it run a bit more so that we'll append some data to the new
|
|
# AOF.
|
|
after 1000
|
|
|
|
# Stop the processes generating the load if they are still active
|
|
stop_write_load $load_handle0
|
|
stop_write_load $load_handle1
|
|
stop_write_load $load_handle2
|
|
stop_write_load $load_handle3
|
|
stop_write_load $load_handle4
|
|
|
|
# Make sure no more commands processed, before taking debug digest
|
|
wait_load_handlers_disconnected
|
|
|
|
# Get the data set digest
|
|
set d1 [debug_digest]
|
|
|
|
# Load the AOF
|
|
r debug loadaof
|
|
set d2 [debug_digest]
|
|
|
|
# Make sure they are the same
|
|
assert {$d1 eq $d2}
|
|
}
|
|
}
|
|
}
|
|
|
|
start_server {tags {"aofrw external:skip"} overrides {aof-use-rdb-preamble no}} {
|
|
test {Turning off AOF kills the background writing child if any} {
|
|
r config set appendonly yes
|
|
waitForBgrewriteaof r
|
|
|
|
# start a slow AOFRW
|
|
r set k v
|
|
r config set rdb-key-save-delay 10000000
|
|
r bgrewriteaof
|
|
|
|
# disable AOF and wait for the child to be killed
|
|
r config set appendonly no
|
|
wait_for_condition 50 100 {
|
|
[string match {*Killing*AOF*child*} [exec tail -5 < [srv 0 stdout]]]
|
|
} else {
|
|
fail "Can't find 'Killing AOF child' into recent logs"
|
|
}
|
|
r config set rdb-key-save-delay 0
|
|
}
|
|
|
|
foreach d {string int} {
|
|
foreach e {quicklist} {
|
|
test "AOF rewrite of list with $e encoding, $d data" {
|
|
r flushall
|
|
set len 1000
|
|
for {set j 0} {$j < $len} {incr j} {
|
|
if {$d eq {string}} {
|
|
set data [randstring 0 16 alpha]
|
|
} else {
|
|
set data [randomInt 4000000000]
|
|
}
|
|
r lpush key $data
|
|
}
|
|
assert_equal [r object encoding key] $e
|
|
set d1 [debug_digest]
|
|
r bgrewriteaof
|
|
waitForBgrewriteaof r
|
|
r debug loadaof
|
|
set d2 [debug_digest]
|
|
if {$d1 ne $d2} {
|
|
error "assertion:$d1 is not equal to $d2"
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
foreach d {string int} {
|
|
foreach e {intset hashtable} {
|
|
test "AOF rewrite of set with $e encoding, $d data" {
|
|
r flushall
|
|
if {$e eq {intset}} {set len 10} else {set len 1000}
|
|
for {set j 0} {$j < $len} {incr j} {
|
|
if {$d eq {string}} {
|
|
set data [randstring 0 16 alpha]
|
|
} else {
|
|
set data [randomInt 4000000000]
|
|
}
|
|
r sadd key $data
|
|
}
|
|
if {$d ne {string}} {
|
|
assert_equal [r object encoding key] $e
|
|
}
|
|
set d1 [debug_digest]
|
|
r bgrewriteaof
|
|
waitForBgrewriteaof r
|
|
r debug loadaof
|
|
set d2 [debug_digest]
|
|
if {$d1 ne $d2} {
|
|
error "assertion:$d1 is not equal to $d2"
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
foreach d {string int} {
|
|
foreach e {listpack hashtable} {
|
|
test "AOF rewrite of hash with $e encoding, $d data" {
|
|
r flushall
|
|
if {$e eq {listpack}} {set len 10} else {set len 1000}
|
|
for {set j 0} {$j < $len} {incr j} {
|
|
if {$d eq {string}} {
|
|
set data [randstring 0 16 alpha]
|
|
} else {
|
|
set data [randomInt 4000000000]
|
|
}
|
|
r hset key $data $data
|
|
}
|
|
assert_equal [r object encoding key] $e
|
|
set d1 [debug_digest]
|
|
r bgrewriteaof
|
|
waitForBgrewriteaof r
|
|
r debug loadaof
|
|
set d2 [debug_digest]
|
|
if {$d1 ne $d2} {
|
|
error "assertion:$d1 is not equal to $d2"
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
foreach d {string int} {
|
|
foreach e {listpack skiplist} {
|
|
test "AOF rewrite of zset with $e encoding, $d data" {
|
|
r flushall
|
|
if {$e eq {listpack}} {set len 10} else {set len 1000}
|
|
for {set j 0} {$j < $len} {incr j} {
|
|
if {$d eq {string}} {
|
|
set data [randstring 0 16 alpha]
|
|
} else {
|
|
set data [randomInt 4000000000]
|
|
}
|
|
r zadd key [expr rand()] $data
|
|
}
|
|
assert_equal [r object encoding key] $e
|
|
set d1 [debug_digest]
|
|
r bgrewriteaof
|
|
waitForBgrewriteaof r
|
|
r debug loadaof
|
|
set d2 [debug_digest]
|
|
if {$d1 ne $d2} {
|
|
error "assertion:$d1 is not equal to $d2"
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
test "AOF rewrite functions" {
|
|
r flushall
|
|
r FUNCTION LOAD {#!lua name=test
|
|
redis.register_function('test', function() return 1 end)
|
|
}
|
|
r bgrewriteaof
|
|
waitForBgrewriteaof r
|
|
r function flush
|
|
r debug loadaof
|
|
assert_equal [r fcall test 0] 1
|
|
r FUNCTION LIST
|
|
} {{library_name test engine LUA functions {{name test description {} flags {}}}}}
|
|
|
|
test {BGREWRITEAOF is delayed if BGSAVE is in progress} {
|
|
r flushall
|
|
r set k v
|
|
r config set rdb-key-save-delay 10000000
|
|
r bgsave
|
|
assert_match {*scheduled*} [r bgrewriteaof]
|
|
assert_equal [s aof_rewrite_scheduled] 1
|
|
r config set rdb-key-save-delay 0
|
|
catch {exec kill -9 [get_child_pid 0]}
|
|
while {[s aof_rewrite_scheduled] eq 1} {
|
|
after 100
|
|
}
|
|
}
|
|
|
|
test {BGREWRITEAOF is refused if already in progress} {
|
|
r config set aof-use-rdb-preamble yes
|
|
r config set rdb-key-save-delay 10000000
|
|
catch {
|
|
r bgrewriteaof
|
|
r bgrewriteaof
|
|
} e
|
|
assert_match {*ERR*already*} $e
|
|
r config set rdb-key-save-delay 0
|
|
catch {exec kill -9 [get_child_pid 0]}
|
|
}
|
|
}
|