Skip to content

Commit

Permalink
fix(server): Fix replication bug, add gdb opt to pytest (#513)
Browse files Browse the repository at this point in the history
fix(server): Fix small bugs, add gdb opt to pytest

Signed-off-by: Vladislav Oleshko <[email protected]>
  • Loading branch information
dranikpg authored Nov 28, 2022
1 parent e430328 commit 2493434
Show file tree
Hide file tree
Showing 7 changed files with 67 additions and 38 deletions.
2 changes: 2 additions & 0 deletions src/server/rdb_save.cc
Original file line number Diff line number Diff line change
Expand Up @@ -884,6 +884,8 @@ void RdbSaver::Impl::Cancel() {
dfly::SliceSnapshot::DbRecord rec;
while (channel_.Pop(rec)) {
}

snapshot->Join();
}

void RdbSaver::Impl::FillFreqMap(RdbTypeFreqMap* dest) const {
Expand Down
2 changes: 1 addition & 1 deletion src/server/replica.cc
Original file line number Diff line number Diff line change
Expand Up @@ -624,8 +624,8 @@ void Replica::FullSyncDflyFb(SyncBlock* sb, string eof_token) {
std::unique_lock lk(sb->mu_);
sb->flows_left--;
ran = true;
sb->cv_.notify_all();
}
sb->cv_.notify_all();
});
loader.Load(&ps);

Expand Down
20 changes: 8 additions & 12 deletions src/server/server_family.cc
Original file line number Diff line number Diff line change
Expand Up @@ -782,26 +782,25 @@ static void RunStage(bool new_version, std::function<void(unsigned)> cb) {
};

using PartialSaveOpts =
tuple<const string& /*filename*/, const string& /*path*/, absl::Time /*start*/>;
tuple<const fs::path& /*filename*/, const fs::path& /*path*/, absl::Time /*start*/>;

// Start saving a single snapshot of a multi-file dfly snapshot.
// If shard is null, then this is the summary file.
error_code DoPartialSave(PartialSaveOpts opts, const dfly::StringVec& scripts,
RdbSnapshot* snapshot, EngineShard* shard) {
auto [filename, path, now] = opts;
// Construct resulting filename.
fs::path file = filename, abs_path = path;
fs::path full_filename = filename;
if (shard == nullptr) {
ExtendFilename(now, "summary", &file);
ExtendFilename(now, "summary", &full_filename);
} else {
ExtendFilenameWithShard(now, shard->shard_id(), &file);
ExtendFilenameWithShard(now, shard->shard_id(), &full_filename);
}
abs_path /= file; // use / operator to concatenate paths.
VLOG(1) << "Saving partial file to " << abs_path;
fs::path full_path = path / full_filename; // use / operator to concatenate paths.

// Start rdb saving.
SaveMode mode = shard == nullptr ? SaveMode::SUMMARY : SaveMode::SINGLE_SHARD;
std::error_code local_ec = snapshot->Start(mode, abs_path.generic_string(), scripts);
error_code local_ec = snapshot->Start(mode, full_path.generic_string(), scripts);

if (!local_ec && mode == SaveMode::SINGLE_SHARD) {
snapshot->StartInShard(shard);
Expand Down Expand Up @@ -897,7 +896,6 @@ GenericError ServerFamily::DoSave(bool new_version, Transaction* trans) {

ExtendFilenameWithShard(start, -1, &filename);
path /= filename; // use / operator to concatenate paths.
VLOG(1) << "Saving to " << path;

snapshots[0].reset(new RdbSnapshot(fq_threadpool_.get()));
const auto lua_scripts = script_mgr_->GetLuaScripts();
Expand Down Expand Up @@ -933,13 +931,11 @@ GenericError ServerFamily::DoSave(bool new_version, Transaction* trans) {
absl::Duration dur = absl::Now() - start;
double seconds = double(absl::ToInt64Milliseconds(dur)) / 1000;

{
// Populate LastSaveInfo.
if (!ec) {
LOG(INFO) << "Saving " << path << " finished after "
<< strings::HumanReadableElapsedTime(seconds);
}

// Populate LastSaveInfo.
if (!ec) {
save_info = make_shared<LastSaveInfo>();
for (const auto& k_v : rdb_name_map) {
save_info->freq_map.emplace_back(k_v);
Expand Down
21 changes: 12 additions & 9 deletions tests/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,17 @@

The tests assume you have the "dragonfly" binary in `<root>/build-dbg` directory.
You can override the location of the binary using `DRAGONFLY_PATH` environment var.

### Important fixtures

- `df_server` is the default instance that is available for testing. Use the `dfly_args` decorator to change its default arguments.
- `client` and `async_client` are clients to the default instance. The default instance is re-used accross tests with the same arguments, but each new client flushes the instance.
- `pool` and `async_pool` are client pools that are connected to the default instance

### Custom arguments

- use `--gdb` to start all instances inside gdb.

### Before you start
Please make sure that you have python 3 installed on you local host.
If have more both python 2 and python 3 installed on you host, you can run the tests with the following command:
Expand All @@ -20,6 +31,7 @@ Then install all the required dependencies for the tests:
```
pip install -r dragonfly/requirements.txt
```

### Running the tests
to run pytest, run:
`pytest -xv dragonfly`
Expand All @@ -33,15 +45,6 @@ Pytest will recursively search the `tests/dragonfly` directory for files matchin

**Note**: When making a new directory in `tests/dragonfly` be sure to create an `__init__.py` file to avoid [name conflicts](https://docs.pytest.org/en/7.1.x/explanation/goodpractices.html#tests-outside-application-code)

### Interacting with Dragonfly
Pytest allows for parameters with a specific name to be automatically resolved through [fixtures](https://docs.pytest.org/en/7.1.x/explanation/fixtures.html) for any test function. The following fixtures are to be used to interact with Dragonfly when writing a test:
| Name | Type | [Scope](https://docs.pytest.org/en/7.1.x/how-to/fixtures.html?highlight=scope#scope-sharing-fixtures-across-classes-modules-packages-or-session) | Description
| ----- | ---- | ----- | ----------- |
| tmp_dir | [pathlib.Path](https://docs.python.org/3/library/pathlib.html) | Session | The temporary directory the Dragonfly binary will be running in. The environment variable `DRAGONFLY_TMP` is also set to this value |
| test_env | `dict` | Session | The environment variables used when running Dragonfly as a dictionary |
| client | [redis.Redis](https://redis-py.readthedocs.io/en/stable/connections.html#generic-client) | Class | The redis client to interact with the Dragonfly instance |
| async_client | [aioredis.Redis](https://aioredis.readthedocs.io/en/latest/api/high-level/#aioredis.client.Redis) | Class | The async redis client to interact with the Dragonfly instance |

### Passing CLI commands to Dragonfly
To pass custom flags to the Dragonfly executable two class decorators have been created. `@dfly_args` allows you to pass a list of parameters to the Dragonfly executable, similarly `@dfly_multi_test_args` allows you to specify multiple parameter configurations to test with a given test class.

Expand Down
35 changes: 24 additions & 11 deletions tests/dragonfly/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,27 +6,42 @@
import time
import subprocess

from dataclasses import dataclass


@dataclass
class DflyParams:
path: str
cwd: str
gdb: bool
env: any


class DflyInstance:
"""
Represents a runnable and stoppable Dragonfly instance
with fixed arguments.
"""

def __init__(self, path, args, cwd):
self.path = path
def __init__(self, params: DflyParams, args):
self.args = args
self.cwd = cwd
self.params = params
self.proc = None

def start(self):
arglist = DflyInstance.format_args(self.args)

print(f"Starting instance on {self.port} with arguments {arglist}")
self.proc = subprocess.Popen([self.path, *arglist], cwd=self.cwd)

args = [self.params.path, *arglist]
if self.params.gdb:
args = ["gdb", "--ex", "r", "--args"] + args

self.proc = subprocess.Popen(args, cwd=self.params.cwd)

# Give Dragonfly time to start and detect possible failure causes
time.sleep(0.3)
# Gdb starts slowly
time.sleep(0.4 if not self.params.gdb else 3.0)

return_code = self.proc.poll()
if return_code is not None:
Expand Down Expand Up @@ -70,19 +85,17 @@ class DflyInstanceFactory:
A factory for creating dragonfly instances with pre-supplied arguments.
"""

def __init__(self, env, cwd, path, args):
self.env = env
self.cwd = cwd
self.path = path
def __init__(self, params: DflyParams, args):
self.args = args
self.params = params
self.instances = []

def create(self, **kwargs) -> DflyInstance:
args = {**self.args, **kwargs}
for k, v in args.items():
args[k] = v.format(**self.env) if isinstance(v, str) else v
args[k] = v.format(**self.params.env) if isinstance(v, str) else v

instance = DflyInstance(self.path, args, self.cwd)
instance = DflyInstance(self.params, args)
self.instances.append(instance)
return instance

Expand Down
21 changes: 17 additions & 4 deletions tests/dragonfly/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
from pathlib import Path
from tempfile import TemporaryDirectory

from . import DflyInstance, DflyInstanceFactory
from . import DflyInstance, DflyInstanceFactory, DflyParams

DATABASE_INDEX = 1

Expand Down Expand Up @@ -40,6 +40,12 @@ def test_env(tmp_dir: Path):
return env


def pytest_addoption(parser):
parser.addoption(
'--gdb', action='store_true', default=False, help='Run instances in gdb'
)


@pytest.fixture(scope="session", params=[{}])
def df_factory(request, tmp_dir, test_env) -> DflyInstanceFactory:
"""
Expand All @@ -50,15 +56,22 @@ def df_factory(request, tmp_dir, test_env) -> DflyInstanceFactory:
scripts_dir, '../../build-dbg/dragonfly'))

args = request.param if request.param else {}
factory = DflyInstanceFactory(test_env, tmp_dir, path=path, args=args)

params = DflyParams(
path=path,
cwd=tmp_dir,
gdb=request.config.getoption("--gdb"),
env=test_env
)

factory = DflyInstanceFactory(params, args)
yield factory
factory.stop_all()


@pytest.fixture(scope="function")
def df_local_factory(df_factory: DflyInstanceFactory):
factory = DflyInstanceFactory(
df_factory.env, df_factory.cwd, df_factory.path, df_factory.args)
factory = DflyInstanceFactory(df_factory.params, df_factory.args)
yield factory
factory.stop_all()

Expand Down
4 changes: 3 additions & 1 deletion tests/dragonfly/replication_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -205,7 +205,7 @@ async def stable_sync(replica, c_replica, crash_type):
def check_gen(): return gen_test_data(n_keys//5, seed=0)

await batch_fill_data_async(c_master, check_gen())
await asyncio.sleep(0.1)
await asyncio.sleep(1.0)
for _, c_replica, _ in replicas_of_type(lambda t: t > 1):
await batch_check_data_async(c_replica, check_gen())

Expand All @@ -217,6 +217,8 @@ async def disconnect(replica, c_replica, crash_type):
await asyncio.gather(*(disconnect(*args) for args
in replicas_of_type(lambda t: t == 2)))

await asyncio.sleep(0.5)

# Check phase 3 replica survived
for _, c_replica, _ in replicas_of_type(lambda t: t == 2):
assert await c_replica.ping()
Expand Down

0 comments on commit 2493434

Please sign in to comment.