Skip to content

Commit

Permalink
fix(tests): Small fixes
Browse files Browse the repository at this point in the history
Signed-off-by: Vladislav Oleshko <vlad@dragonflydb.io>
  • Loading branch information
dranikpg committed Jan 6, 2023
1 parent 0e0efc1 commit cbccb3e
Show file tree
Hide file tree
Showing 6 changed files with 18 additions and 23 deletions.
1 change: 1 addition & 0 deletions tests/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ You can override the location of the binary using `DRAGONFLY_PATH` environment v
### Custom arguments

- use `--gdb` to start all instances inside gdb.
- use `--df arg=val` to pass custom arguments to all dragonfly instances.

### Before you start
Please make sure that you have python 3 installed on you local host.
Expand Down
3 changes: 2 additions & 1 deletion tests/dragonfly/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ class DflyParams:
path: str
cwd: str
gdb: bool
args: list
env: any


Expand Down Expand Up @@ -56,7 +57,7 @@ def stop(self, kill=False):
proc.kill()

def _start(self):
arglist = DflyInstance.format_args(self.args)
arglist = DflyInstance.format_args(self.args) + [f"--{v}" for v in self.params.args]
print(f"Starting instance on {self.port} with arguments {arglist}")

args = [self.params.path, *arglist]
Expand Down
4 changes: 4 additions & 0 deletions tests/dragonfly/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,9 @@ def pytest_addoption(parser):
parser.addoption(
'--gdb', action='store_true', default=False, help='Run instances in gdb'
)
parser.addoption(
'--df', action='append', default=[], help='Add arguments to dragonfly'
)


@pytest.fixture(scope="session", params=[{}])
Expand All @@ -61,6 +64,7 @@ def df_factory(request, tmp_dir, test_env) -> DflyInstanceFactory:
path=path,
cwd=tmp_dir,
gdb=request.config.getoption("--gdb"),
args=request.config.getoption("--df"),
env=test_env
)

Expand Down
1 change: 0 additions & 1 deletion tests/dragonfly/replication_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,6 @@
(4, [1] * 10, dict(keys=500, dbcount=2)),
]

@dfly_args({"logtostdout":""})
@pytest.mark.asyncio
@pytest.mark.parametrize("t_master, t_replicas, seeder_config", replication_cases)
async def test_replication_all(df_local_factory, t_master, t_replicas, seeder_config):
Expand Down
14 changes: 0 additions & 14 deletions tests/dragonfly/runner.py

This file was deleted.

18 changes: 11 additions & 7 deletions tests/dragonfly/utility.py
Original file line number Diff line number Diff line change
Expand Up @@ -310,10 +310,16 @@ async def run(self, target_times=None, target_deviation=None):
for i, queue in enumerate(queues)
]

await producer
time_start = time.time()

cmdcount = await producer
for consumer in consumers:
await consumer

took = time.time() - time_start
qps = round(cmdcount * self.dbcount / took, 2)
print(f"Filling took: {took}, QPS: {qps}")

def stop(self):
"""Stop all invocations to run"""
self.stop_flag = True
Expand Down Expand Up @@ -371,17 +377,19 @@ def should_run():
await asyncio.gather(*(q.put(blob) for q in queues))
submitted += 1

print('.', end='', flush=True)
await asyncio.sleep(0.0)

print("cpu time", cpu_time, "batches", submitted)
print("\ncpu time", cpu_time, "batches", submitted)

await asyncio.gather(*(q.put(None) for q in queues))
for q in queues:
await q.join()

return submitted * self.gen.batch_size

async def _consumer_task(self, db, queue):
client = aioredis.Redis(port=self.port, db=db)
pipe_time = 0
while True:
cmds = await queue.get()
if cmds is None:
Expand All @@ -392,13 +400,9 @@ async def _consumer_task(self, db, queue):
for cmd in cmds:
pipe.execute_command(cmd)

s = time.time()
await pipe.execute()
pipe_time += (time.time() - s)
queue.task_done()

print("pipe time", str(pipe_time))

CAPTURE_COMMANDS = {
ValueType.STRING: lambda pipe, k: pipe.get(k),
ValueType.LIST: lambda pipe, k: pipe.lrange(k, 0, -1),
Expand Down

0 comments on commit cbccb3e

Please sign in to comment.