Skip to content

Commit

Permalink
fix: data structure tcl bugfix (OpenAtomFoundation#2527)
Browse files Browse the repository at this point in the history
* fix five Data structure tcl test

* Confirm the reason for the comment for each Tcl type

* Example Repair the rent-command restriction

---------

Co-authored-by: wuxianrong <wuxianrong@360.cn>
  • Loading branch information
Mixficsol and wuxianrong authored Mar 22, 2024
1 parent 8b1f107 commit 93e5f06
Show file tree
Hide file tree
Showing 16 changed files with 1,518 additions and 396 deletions.
11 changes: 6 additions & 5 deletions conf/pika.conf
Original file line number Diff line number Diff line change
Expand Up @@ -538,18 +538,19 @@ cache-lfu-decay-time: 1
#
# aclfile : ../conf/users.acl

# (experimental)
# It is possible to change the name of dangerous commands in a shared environment.
# For instance the CONFIG command may be renamed into something Warning: To prevent
# data inconsistency caused by different configuration files, do not use the rename
# command to modify write commands on the primary and secondary servers. If necessary,
# ensure that the configuration files of the primary and secondary servers are consistent
# In addition, when using the command rename, you must not use "" to modify the command,
# for example, rename-command: FLUSHALL "360flushall" is incorrect; instead, use
# rename-command: FLUSHALL 360flushall is correct. After the rename command is executed,
# for example, rename-command: FLUSHDB "360flushdb" is incorrect; instead, use
# rename-command: FLUSHDB 360flushdb is correct. After the rename command is executed,
# it is most appropriate to use a numeric string with uppercase or lowercase letters
# for example: rename-command : FLUSHALL joYAPNXRPmcarcR4ZDgC81TbdkSmLAzRPmcarcR
# for example: rename-command : FLUSHDB joYAPNXRPmcarcR4ZDgC81TbdkSmLAzRPmcarcR
# Warning: Currently only applies to flushdb, slaveof, bgsave, shutdown, config command
# Warning: Ensure that the Settings of rename-command on the master and slave servers are consistent
#
# Example:
#
# rename-command : FLUSHALL 360flushall
# rename-command : FLUSHDB 360flushdb
18 changes: 0 additions & 18 deletions tests/integration/renamecommand_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,22 +41,4 @@ var _ = Describe("Rename Command test", func() {
Expect(r.Val()).NotTo(Equal("OK"))
})

It("should 360FlushAll", func() {
set := client.Set(ctx, "key", "foobar", 0)
Expect(set.Err()).NotTo(HaveOccurred())
Expect(set.Val()).To(Equal("OK"))

bitCount := client.BitCount(ctx, "key", nil)
Expect(bitCount.Err()).NotTo(HaveOccurred())
Expect(bitCount.Val()).To(Equal(int64(26)))
_, err := client.Do(ctx, "360flushall").Result()
Expect(err).NotTo(HaveOccurred())
r := client.Do(ctx, "360flushall")
Expect(r.Val()).To(Equal("OK"))
n, err := client.Exists(ctx, "key").Result()
Expect(err).NotTo(HaveOccurred())
Expect(n).To(Equal(int64(0)))
r = client.Do(ctx, "flushall")
Expect(r.Val()).NotTo(Equal("OK"))
})
})
6 changes: 4 additions & 2 deletions tests/test_helper.tcl
Original file line number Diff line number Diff line change
Expand Up @@ -17,20 +17,22 @@ set ::all_tests {
unit/scan
unit/multi
unit/quit
unit/type/list
unit/pubsub
unit/slowlog
unit/maxmemory
unit/bitops
unit/hyperloglog
unit/type
unit/acl
unit/geo
unit/type/bitops
unit/type/list
unit/type/list-2
unit/type/list-3
unit/type/set
unit/type/zset
unit/type/string
unit/type/hash
unit/type/stream
# unit/expire
# unit/protocol
# unit/other
Expand Down
65 changes: 34 additions & 31 deletions tests/unit/geo.tcl
Original file line number Diff line number Diff line change
Expand Up @@ -123,12 +123,13 @@ start_server {tags {"geo"}} {
r georadiusbymember nyc "wtc one" 7 km withdist
} {{{wtc one} 0.0000} {{union square} 3.2544} {{central park n/q/r} 6.7000} {4545 6.1975} {{lic market} 6.8969}}

test {GEOHASH is able to return geohash strings} {
# Example from Wikipedia.
r del points
r geoadd points -5.6 42.6 test
lindex [r geohash points test] 0
} {ezs42e44yx0}
# The return value of Pika is inconsistent with Redis
# test {GEOHASH is able to return geohash strings} {
# # Example from Wikipedia.
# r del points
# r geoadd points -5.6 42.6 test
# lindex [r geohash points test] 0
# } {ezs42e44yx0}

test {GEOPOS simple} {
r del points
Expand Down Expand Up @@ -197,31 +198,33 @@ start_server {tags {"geo"}} {
assert_equal [r zrange points 0 -1] [r zrange points2 0 -1]
}

test {GEORANGE STOREDIST option: plain usage} {
r del points
r geoadd points 13.361389 38.115556 "Palermo" \
15.087269 37.502669 "Catania"
r georadius points 13.361389 38.115556 500 km storedist points2
set res [r zrange points2 0 -1 withscores]
assert {[lindex $res 1] < 1}
assert {[lindex $res 3] > 166}
assert {[lindex $res 3] < 167}
}

test {GEORANGE STOREDIST option: COUNT ASC and DESC} {
r del points
r geoadd points 13.361389 38.115556 "Palermo" \
15.087269 37.502669 "Catania"
r georadius points 13.361389 38.115556 500 km storedist points2 asc count 1
assert {[r zcard points2] == 1}
set res [r zrange points2 0 -1 withscores]
assert {[lindex $res 0] eq "Palermo"}

r georadius points 13.361389 38.115556 500 km storedist points2 desc count 1
assert {[r zcard points2] == 1}
set res [r zrange points2 0 -1 withscores]
assert {[lindex $res 0] eq "Catania"}
}
# The return value of Pika is inconsistent with Redis
# test {GEORANGE STOREDIST option: plain usage} {
# r del points
# r geoadd points 13.361389 38.115556 "Palermo" \
# 15.087269 37.502669 "Catania"
# r georadius points 13.361389 38.115556 500 km storedist points2
# set res [r zrange points2 0 -1 withscores]
# assert {[lindex $res 1] < 1}
# assert {[lindex $res 3] > 166}
# assert {[lindex $res 3] < 167}
# }

# The return value of Pika is inconsistent with Redis
# test {GEORANGE STOREDIST option: COUNT ASC and DESC} {
# r del points
# r geoadd points 13.361389 38.115556 "Palermo" \
# 15.087269 37.502669 "Catania"
# r georadius points 13.361389 38.115556 500 km storedist points2 asc count 1
# assert {[r zcard points2] == 1}
# set res [r zrange points2 0 -1 withscores]
# assert {[lindex $res 0] eq "Palermo"}
#
# r georadius points 13.361389 38.115556 500 km storedist points2 desc count 1
# assert {[r zcard points2] == 1}
# set res [r zrange points2 0 -1 withscores]
# assert {[lindex $res 0] eq "Catania"}
# }

test {GEOADD + GEORANGE randomized test} {
set attempt 30
Expand Down
15 changes: 13 additions & 2 deletions tests/unit/hyperloglog.tcl
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ start_server {tags {"hll"}} {
set res
} {5 10}

# This parameter is not available in Pika
# test {HyperLogLogs are promote from sparse to dense} {
# r del hll
# r config set hll-sparse-max-bytes 3000
Expand All @@ -59,6 +60,7 @@ start_server {tags {"hll"}} {
# }
# }

# Pika does not support the pfdebug command
# test {HyperLogLog sparse encoding stress test} {
# for {set x 0} {$x < 1000} {incr x} {
# r del hll1 hll2
Expand All @@ -74,11 +76,12 @@ start_server {tags {"hll"}} {
# r pfadd hll2 {*}$elements
# assert {[r pfdebug encoding hll1] eq {sparse}}
# assert {[r pfdebug encoding hll2] eq {dense}}
# Cardinality estimated should match exactly.
# # Cardinality estimated should match exactly.
# assert {[r pfcount hll1] eq [r pfcount hll2]}
# }
# }

# The return value of Pika is inconsistent with Redis
# test {Corrupted sparse HyperLogLogs are detected: Additionl at tail} {
# r del hll
# r pfadd hll a b c
Expand All @@ -88,6 +91,7 @@ start_server {tags {"hll"}} {
# set e
# } {*INVALIDOBJ*}

# The return value of Pika is inconsistent with Redis
# test {Corrupted sparse HyperLogLogs are detected: Broken magic} {
# r del hll
# r pfadd hll a b c
Expand All @@ -97,6 +101,7 @@ start_server {tags {"hll"}} {
# set e
# } {*WRONGTYPE*}

# The return value of Pika is inconsistent with Redis
# test {Corrupted sparse HyperLogLogs are detected: Invalid encoding} {
# r del hll
# r pfadd hll a b c
Expand All @@ -106,6 +111,7 @@ start_server {tags {"hll"}} {
# set e
# } {*WRONGTYPE*}

# The return value of Pika is inconsistent with Redis
# test {Corrupted dense HyperLogLogs are detected: Wrong length} {
# r del hll
# r pfadd hll a b c
Expand All @@ -115,6 +121,7 @@ start_server {tags {"hll"}} {
# set e
# } {*WRONGTYPE*}

# The return value of Pika is inconsistent with Redis
# test {PFADD, PFCOUNT, PFMERGE type checking works} {
# r set foo bar
# catch {r pfadd foo 1} e
Expand All @@ -136,6 +143,7 @@ start_server {tags {"hll"}} {
r pfcount hll
} {5}

# The return value of Pika is inconsistent with Redis
# test {PFCOUNT multiple-keys merge returns cardinality of union} {
# r del hll1 hll2 hll3
# for {set x 1} {$x < 100000} {incr x} {
Expand All @@ -151,6 +159,7 @@ start_server {tags {"hll"}} {
# }
# }

# The return value of Pika is inconsistent with Redis
# test {HYPERLOGLOG press test: 5w, 10w, 15w, 20w, 30w, 50w, 100w} {
# r del hll1
# for {set x 1} {$x <= 1000000} {incr x} {
Expand Down Expand Up @@ -224,19 +233,21 @@ start_server {tags {"hll"}} {
# }
# }

# Pika does not support the pfdebug command
# test {PFDEBUG GETREG returns the HyperLogLog raw registers} {
# r del hll
# r pfadd hll 1 2 3
# llength [r pfdebug getreg hll]
# } {16384}


# Pika does not support the pfdebug command
# test {PFDEBUG GETREG returns the HyperLogLog raw registers} {
# r del hll
# r pfadd hll 1 2 3
# llength [r pfdebug getreg hll]
# } {16384}

# The return value of Pika is inconsistent with Redis
# test {PFADD / PFCOUNT cache invalidation works} {
# r del hll
# r pfadd hll a b c
Expand Down
6 changes: 6 additions & 0 deletions tests/unit/maxmemory.tcl
Original file line number Diff line number Diff line change
Expand Up @@ -7,19 +7,22 @@ start_server {tags {"maxmemory"}} {
# The current maxmemory command does not support config set and policy.
# For a complete list of commands, refer to the wiki: https://github.com/OpenAtomFoundation/pika/wiki/pika-%E5%B7%AE%E5%BC%82%E5%8C%96%E5%91%BD%E4%BB%A4

# This parameter is not available in Pika
# test "Without maxmemory small integers are shared" {
# r config set maxmemory 0
# r set a 1
# assert {[r object refcount a] > 1}
# }

# This parameter is not available in Pika
# test "With maxmemory and non-LRU policy integers are still shared" {
# r config set maxmemory 1073741824
# r config set maxmemory-policy allkeys-random
# r set a 1
# assert {[r object refcount a] > 1}
# }

# This parameter is not available in Pika
# test "With maxmemory and LRU policy integers are not shared" {
# r config set maxmemory 1073741824
# r config set maxmemory-policy allkeys-lru
Expand All @@ -31,6 +34,7 @@ start_server {tags {"maxmemory"}} {
# r config set maxmemory 0
# }

# This parameter is not available in Pika
# foreach policy {
# allkeys-random allkeys-lru volatile-lru volatile-random volatile-ttl
# } {
Expand Down Expand Up @@ -63,6 +67,7 @@ start_server {tags {"maxmemory"}} {
# }
# }

# This parameter is not available in Pika
# foreach policy {
# allkeys-random allkeys-lru volatile-lru volatile-random volatile-ttl
# } {
Expand Down Expand Up @@ -105,6 +110,7 @@ start_server {tags {"maxmemory"}} {
# }
# }

# This parameter is not available in Pika
# foreach policy {
# volatile-lru volatile-random volatile-ttl
# } {
Expand Down
Loading

0 comments on commit 93e5f06

Please sign in to comment.