diff --git a/docs/command_line_reference.rst b/docs/command_line_reference.rst index 1dd05ec9a..7aa45f439 100644 --- a/docs/command_line_reference.rst +++ b/docs/command_line_reference.rst @@ -437,7 +437,7 @@ This enables the HTTP exporter of `X-Pack Monitoring ` that Rally should run. -Rally can autodetect the pipeline in most cases. If you specify ``--distribution-version`` it will auto-select the pipeline ``from-distribution`` otherwise it will use ``from-sources-complete``. +Rally can autodetect the pipeline in most cases. If you specify ``--distribution-version`` it will auto-select the pipeline ``from-distribution`` otherwise it will use ``from-sources``. .. _clr_enable_driver_profiling: diff --git a/docs/migrate.rst b/docs/migrate.rst index db8a8043d..390f579bd 100644 --- a/docs/migrate.rst +++ b/docs/migrate.rst @@ -1,6 +1,15 @@ Migration Guide =============== +Migrating to Rally 2.0.1 +------------------------ + +Pipelines from-sources-complete and from-sources-skip-build are deprecated +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Rally 2.0.1 caches source artifacts automatically in ``~/.rally/benchmarks/distributions/src``. Therefore, it is not necessary anymore to explicitly skip the build with ``--pipeline=from-sources-skip-build``. Specify ``--pipeline=from-sources`` instead. See the :doc:`pipeline reference documentation ` for more details. + + Migrating to Rally 2.0.0 ------------------------ diff --git a/docs/pipelines.rst b/docs/pipelines.rst index c587179c2..d426ec7e8 100644 --- a/docs/pipelines.rst +++ b/docs/pipelines.rst @@ -11,10 +11,11 @@ You can get a list of all pipelines with ``esrally list pipelines``:: Name Description ----------------------- --------------------------------------------------------------------------------------------- + from-sources Builds and provisions Elasticsearch, runs a benchmark and reports results. + from-sources-complete Builds and provisions Elasticsearch, runs a benchmark and reports results [deprecated]. + from-sources-skip-build Provisions Elasticsearch (skips the build), runs a benchmark and reports results [deprecated]. from-distribution Downloads an Elasticsearch distribution, provisions it, runs a benchmark and reports results. - from-sources-complete Builds and provisions Elasticsearch, runs a benchmark and reports results. benchmark-only Assumes an already running Elasticsearch instance, runs a benchmark and reports results - from-sources-skip-build Provisions Elasticsearch (skips the build), runs a benchmark and reports results. benchmark-only ~~~~~~~~~~~~~~ @@ -45,16 +46,14 @@ However, this feature is mainly intended for continuous integration environments This pipeline is just mentioned for completeness but Rally will autoselect it for you. All you need to do is to define the ``--distribution-version`` flag. -.. _pipelines_from-sources-complete: - -from-sources-complete -~~~~~~~~~~~~~~~~~~~~~ +from-sources +~~~~~~~~~~~~ You should use this pipeline when you want to build and benchmark Elasticsearch from sources. This pipeline will only work from Elasticsearch 5.0 onwards because Elasticsearch switched from Maven to Gradle and Rally only supports one build tool in the interest of maintainability. Remember that you also need git installed. If that's not the case you'll get an error and have to run ``esrally configure`` first. An example invocation:: - esrally --pipeline=from-sources-complete --revision=latest + esrally --pipeline=from-sources --revision=latest You have to specify a :ref:`revision `. @@ -62,11 +61,17 @@ You have to specify a :ref:`revision `. This pipeline is just mentioned for completeness but Rally will automatically select it for you. All you need to do is to define the ``--revision`` flag. -To enable artifact caching for source builds, set ``cache`` to ``true`` in the section ``source`` in the configuration file in ``~/.rally/rally.ini``. Source builds will then be cached in ``~/.rally/benchmarks/distributions`` but artifacts will not be evicted automatically. +Artifacts are cached for seven days by default in ``~/.rally/benchmarks/distributions/src``. Artifact caching can be configured with the following sections in the section ``source`` in the configuration file in ``~/.rally/rally.ini``: + +* ``cache`` (default: ``True``): Set to ``False`` to disable artifact caching. +* ``cache.days`` (default: ``7``): The maximum age in days of an artifact before it gets evicted from the artifact cache. + +from-sources-complete +~~~~~~~~~~~~~~~~~~~~~ + +This deprecated pipeline is an alias for ``from-sources`` and is only provided for backwards-compatibility. Use ``from-sources`` instead. from-sources-skip-build ~~~~~~~~~~~~~~~~~~~~~~~ -This pipeline is similar to ``from-sources-complete`` except that it assumes you have built the binary once. It saves time if you want to run a benchmark twice for the exact same version of Elasticsearch. Obviously it doesn't make sense to provide a revision: It is always the previously built revision. An example invocation:: - - esrally --pipeline=from-sources-skip-build +This deprecated pipeline is similar to ``from-sources-complete`` except that it assumes you have built the binary once. Use ``from-sources`` instead which caches built artifacts automatically. diff --git a/esrally/mechanic/supplier.py b/esrally/mechanic/supplier.py index f73988843..dd8e1ac82 100644 --- a/esrally/mechanic/supplier.py +++ b/esrally/mechanic/supplier.py @@ -15,6 +15,7 @@ # specific language governing permissions and limitations # under the License. +import datetime import glob import logging import os @@ -34,6 +35,7 @@ def create(cfg, sources, distribution, build, car, plugins=None): logger = logging.getLogger(__name__) if plugins is None: plugins = [] + caching_enabled = cfg.opts("source", "cache", mandatory=False, default_value=True) revisions = _extract_revisions(cfg.opts("mechanic", "source.revision")) distribution_version = cfg.opts("mechanic", "distribution.version", mandatory=False) supply_requirements = _supply_requirements(sources, distribution, build, plugins, revisions, distribution_version) @@ -63,6 +65,18 @@ def create(cfg, sources, distribution, build, car, plugins=None): # ... but the user can override it in rally.ini dist_cfg.update(cfg.all_opts("distributions")) + if caching_enabled: + logger.info("Enabling source artifact caching.") + max_age_days = int(cfg.opts("source", "cache.days", mandatory=False, default_value=7)) + if max_age_days <= 0: + raise exceptions.SystemSetupError(f"cache.days must be a positive number but is {max_age_days}") + + source_distributions_root = os.path.join(distributions_root, "src") + _prune(source_distributions_root, max_age_days) + else: + logger.info("Disabling source artifact caching.") + source_distributions_root = None + if es_supplier_type == "source": es_src_dir = os.path.join(_src_dir(cfg), _config_value(src_config, "elasticsearch.src.subdir")) @@ -73,12 +87,12 @@ def create(cfg, sources, distribution, build, car, plugins=None): builder=builder, template_renderer=template_renderer) - if cfg.opts("source", "cache", mandatory=False, default_value=False): - logger.info("Enabling source artifact caching.") - source_supplier = CachedElasticsearchSourceSupplier(distributions_root, - source_supplier, - dist_cfg, - template_renderer) + if caching_enabled: + es_file_resolver = ElasticsearchFileNameResolver(dist_cfg, template_renderer) + source_supplier = CachedSourceSupplier(source_distributions_root, + source_supplier, + es_file_resolver) + suppliers.append(source_supplier) repo = None else: @@ -94,14 +108,21 @@ def create(cfg, sources, distribution, build, car, plugins=None): if supplier_type == "source": if CorePluginSourceSupplier.can_handle(plugin): logger.info("Adding core plugin source supplier for [%s].", plugin.name) - assert es_src_dir is not None, "Cannot build core plugin %s when Elasticsearch is not built from source." % plugin.name - suppliers.append(CorePluginSourceSupplier(plugin, es_src_dir, builder)) + assert es_src_dir is not None, f"Cannot build core plugin {plugin.name} when Elasticsearch is not built from source." + plugin_supplier = CorePluginSourceSupplier(plugin, es_src_dir, builder) elif ExternalPluginSourceSupplier.can_handle(plugin): logger.info("Adding external plugin source supplier for [%s].", plugin.name) - suppliers.append(ExternalPluginSourceSupplier(plugin, plugin_version, _src_dir(cfg, mandatory=False), src_config, builder)) + plugin_supplier = ExternalPluginSourceSupplier(plugin, plugin_version, _src_dir(cfg, mandatory=False), src_config, builder) else: raise exceptions.RallyError("Plugin %s can neither be treated as core nor as external plugin. Requirements: %s" % (plugin.name, supply_requirements[plugin.name])) + + if caching_enabled: + plugin_file_resolver = PluginFileNameResolver(plugin.name) + plugin_supplier = CachedSourceSupplier(source_distributions_root, + plugin_supplier, + plugin_file_resolver) + suppliers.append(plugin_supplier) else: logger.info("Adding plugin distribution supplier for [%s].", plugin.name) assert repo is not None, "Cannot benchmark plugin %s from a distribution version but Elasticsearch from sources" % plugin.name @@ -192,6 +213,35 @@ def _src_dir(cfg, mandatory=True): " all prerequisites and reconfigure Rally with %s configure" % PROGRAM_NAME) +def _prune(root_path, max_age_days): + """ + Removes files that are older than ``max_age_days`` from ``root_path``. Subdirectories are not traversed. + + :param root_path: A directory which should be checked. + :param max_age_days: Files that have been created more than ``max_age_days`` ago are deleted. + """ + logger = logging.getLogger(__name__) + if not os.path.exists(root_path): + logger.info("[%s] does not exist. Skipping pruning.", root_path) + return + + for f in os.listdir(root_path): + artifact = os.path.join(root_path, f) + if os.path.isfile(artifact): + max_age = datetime.datetime.now() - datetime.timedelta(days=max_age_days) + try: + created_at = datetime.datetime.fromtimestamp(os.lstat(artifact).st_ctime) + if created_at < max_age: + logger.info("Deleting [%s] from artifact cache (reached max age).", f) + os.remove(artifact) + else: + logger.debug("Keeping [%s] (max age not yet reached)", f) + except OSError: + logger.exception("Could not check whether [%s] needs to be deleted from artifact cache.", artifact) + else: + logger.info("Skipping [%s] (not a file).", artifact) + + class TemplateRenderer: def __init__(self, version, os_name=None, arch=None): self.version = version @@ -231,16 +281,19 @@ def __call__(self, *args, **kwargs): return binaries -class CachedElasticsearchSourceSupplier: - def __init__(self, distributions_root, source_supplier, distribution_config, template_renderer): - self.distributions_root = distributions_root - self.source_supplier = source_supplier - self.template_renderer = template_renderer +class ElasticsearchFileNameResolver: + def __init__(self, distribution_config, template_renderer): self.cfg = distribution_config self.runtime_jdk_bundled = convert.to_bool(self.cfg.get("runtime.jdk.bundled", False)) - self.revision = None - self.cached_path = None - self.logger = logging.getLogger(__name__) + self.template_renderer = template_renderer + + @property + def revision(self): + return self.template_renderer.version + + @revision.setter + def revision(self, revision): + self.template_renderer.version = revision @property def file_name(self): @@ -251,6 +304,29 @@ def file_name(self): url = self.template_renderer.render(self.cfg[url_key]) return url[url.rfind("/") + 1:] + @property + def artifact_key(self): + return "elasticsearch" + + def to_artifact_path(self, file_system_path): + return file_system_path + + def to_file_system_path(self, artifact_path): + return artifact_path + + +class CachedSourceSupplier: + def __init__(self, distributions_root, source_supplier, file_resolver): + self.distributions_root = distributions_root + self.source_supplier = source_supplier + self.file_resolver = file_resolver + self.cached_path = None + self.logger = logging.getLogger(__name__) + + @property + def file_name(self): + return self.file_resolver.file_name + @property def cached(self): return self.cached_path is not None and os.path.exists(self.cached_path) @@ -259,7 +335,7 @@ def fetch(self): resolved_revision = self.source_supplier.fetch() if resolved_revision: # ensure we use the resolved revision for rendering the artifact - self.template_renderer.version = resolved_revision + self.file_resolver.revision = resolved_revision self.cached_path = os.path.join(self.distributions_root, self.file_name) def prepare(self): @@ -269,10 +345,10 @@ def prepare(self): def add(self, binaries): if self.cached: self.logger.info("Using cached artifact in [%s]", self.cached_path) - binaries["elasticsearch"] = self.cached_path + binaries[self.file_resolver.artifact_key] = self.file_resolver.to_artifact_path(self.cached_path) else: self.source_supplier.add(binaries) - original_path = binaries["elasticsearch"] + original_path = self.file_resolver.to_file_system_path(binaries[self.file_resolver.artifact_key]) # this can be None if the Elasticsearch does not reside in a git repo and the user has only # copied all source files. In that case, we cannot resolve a revision hash and thus we cannot cache. if self.cached_path: @@ -280,7 +356,7 @@ def add(self, binaries): io.ensure_dir(io.dirname(self.cached_path)) shutil.copy(original_path, self.cached_path) self.logger.info("Caching artifact in [%s]", self.cached_path) - binaries["elasticsearch"] = self.cached_path + binaries[self.file_resolver.artifact_key] = self.file_resolver.to_artifact_path(self.cached_path) except OSError: self.logger.exception("Not caching [%s].", original_path) else: @@ -318,6 +394,26 @@ def resolve_binary(self): raise SystemSetupError("Couldn't find a tar.gz distribution. Please run Rally with the pipeline 'from-sources-complete'.") +class PluginFileNameResolver: + def __init__(self, plugin_name): + self.plugin_name = plugin_name + self.revision = None + + @property + def file_name(self): + return f"{self.plugin_name}-{self.revision}.zip" + + @property + def artifact_key(self): + return self.plugin_name + + def to_artifact_path(self, file_system_path): + return f"file://{file_system_path}" + + def to_file_system_path(self, artifact_path): + return artifact_path[len("file://"):] + + class ExternalPluginSourceSupplier: def __init__(self, plugin, revision, src_dir, src_config, builder): assert not plugin.core_plugin, "Plugin %s is a core plugin" % plugin.name @@ -348,7 +444,7 @@ def can_handle(plugin): def fetch(self): # optional (but then source code is assumed to be available locally) plugin_remote_url = self.src_config.get("plugin.%s.remote.repo.url" % self.plugin.name) - SourceRepository(self.plugin.name, plugin_remote_url, self.plugin_src_dir).fetch(self.revision) + return SourceRepository(self.plugin.name, plugin_remote_url, self.plugin_src_dir).fetch(self.revision) def prepare(self): if self.builder: @@ -380,7 +476,8 @@ def can_handle(plugin): return plugin.core_plugin def fetch(self): - pass + # Just retrieve the current revision *number* and assume that Elasticsearch has prepared the source tree. + return SourceRepository("Elasticsearch", None, self.es_src_dir).fetch(revision="current") def prepare(self): if self.builder: diff --git a/esrally/mechanic/team.py b/esrally/mechanic/team.py index 0c505ccf3..fdee50597 100644 --- a/esrally/mechanic/team.py +++ b/esrally/mechanic/team.py @@ -353,12 +353,17 @@ def load_plugin(self, name, config_names, plugin_params=None): self.logger.info("Loading plugin [%s] with default configuration.", name) root_path = self._plugin_root_path(name) + # used to determine whether this is a core plugin + core_plugin = self._core_plugin(name) if not config_names: # maybe we only have a config folder but nothing else (e.g. if there is only an install hook) if io.exists(root_path): - return PluginDescriptor(name=name, config=config_names, root_path=root_path, variables=plugin_params) + return PluginDescriptor(name=name, + core_plugin=core_plugin is not None, + config=config_names, + root_path=root_path, + variables=plugin_params) else: - core_plugin = self._core_plugin(name, plugin_params) if core_plugin: return core_plugin # If we just have a plugin name then we assume that this is a community plugin and the user has specified a download URL @@ -371,8 +376,6 @@ def load_plugin(self, name, config_names, plugin_params=None): config_paths = [] # used for deduplication known_config_bases = set() - # used to determine whether this is a core plugin - core_plugin = self._core_plugin(name) for config_name in config_names: config_file = self._plugin_file(name, config_name) diff --git a/esrally/racecontrol.py b/esrally/racecontrol.py index 13718963e..0216fb0b6 100644 --- a/esrally/racecontrol.py +++ b/esrally/racecontrol.py @@ -273,13 +273,20 @@ def set_default_hosts(cfg, host="127.0.0.1", port=9200): # Poor man's curry -def from_sources_complete(cfg): +def from_sources(cfg): port = cfg.opts("provisioning", "node.http.port") set_default_hosts(cfg, port=port) return race(cfg, sources=True, build=True) +def from_sources_complete(cfg): + console.warn("The pipeline from-sources-complete is deprecated. Use the pipeline \"from-sources\" instead.") + return from_sources(cfg) + + def from_sources_skip_build(cfg): + console.warn("The pipeline from-sources-skip-build is deprecated. Rally caches artifacts now automatically. " + "Use the pipeline \"from-sources\" instead") port = cfg.opts("provisioning", "node.http.port") set_default_hosts(cfg, port=port) return race(cfg, sources=True, build=False) @@ -303,11 +310,14 @@ def docker(cfg): return race(cfg, docker=True) +Pipeline("from-sources", + "Builds and provisions Elasticsearch, runs a benchmark and reports results.", from_sources) + Pipeline("from-sources-complete", - "Builds and provisions Elasticsearch, runs a benchmark and reports results.", from_sources_complete) + "Builds and provisions Elasticsearch, runs a benchmark and reports results [deprecated].", from_sources_complete) Pipeline("from-sources-skip-build", - "Provisions Elasticsearch (skips the build), runs a benchmark and reports results.", from_sources_skip_build) + "Provisions Elasticsearch (skips the build), runs a benchmark and reports results [deprecated].", from_sources_skip_build) Pipeline("from-distribution", "Downloads an Elasticsearch distribution, provisions it, runs a benchmark and reports results.", from_distribution) @@ -338,7 +348,7 @@ def run(cfg): if cfg.exists("mechanic", "distribution.version"): name = "from-distribution" else: - name = "from-sources-complete" + name = "from-sources" logger.info("User specified no pipeline. Automatically derived pipeline [%s].", name) cfg.add(config.Scope.applicationOverride, "race", "pipeline", name) else: diff --git a/tests/mechanic/data/plugins/v1/core-plugins.txt b/tests/mechanic/data/plugins/v1/core-plugins.txt index 1aa7b34e6..5c46d9c52 100644 --- a/tests/mechanic/data/plugins/v1/core-plugins.txt +++ b/tests/mechanic/data/plugins/v1/core-plugins.txt @@ -1,4 +1,5 @@ # look, lines starting with a hash are ignored. #some-ignored-plugin my-analysis-plugin -my-ingest-plugin \ No newline at end of file +my-ingest-plugin +my-core-plugin-with-config diff --git a/tests/mechanic/data/plugins/v1/my_core_plugin_with_config/.gitkeep b/tests/mechanic/data/plugins/v1/my_core_plugin_with_config/.gitkeep new file mode 100644 index 000000000..e69de29bb diff --git a/tests/mechanic/supplier_test.py b/tests/mechanic/supplier_test.py index dad2d5fd9..cdc0787eb 100644 --- a/tests/mechanic/supplier_test.py +++ b/tests/mechanic/supplier_test.py @@ -15,6 +15,8 @@ # specific language governing permissions and limitations # under the License. +import collections +import datetime import unittest.mock as mock from unittest import TestCase @@ -195,10 +197,12 @@ def add_es_artifact(binaries): renderer = supplier.TemplateRenderer(version=None, os_name="linux", arch="x86_64") - cached_supplier = supplier.CachedElasticsearchSourceSupplier(distributions_root="/tmp", - source_supplier=es, - distribution_config={}, - template_renderer=renderer) + cached_supplier = supplier.CachedSourceSupplier(distributions_root="/tmp", + source_supplier=es, + file_resolver=supplier.ElasticsearchFileNameResolver( + distribution_config={}, + template_renderer=renderer + )) cached_supplier.fetch() cached_supplier.prepare() @@ -231,10 +235,12 @@ def add_es_artifact(binaries): "jdk.bundled.release_url": "https://elstc.co/elasticsearch-{{VERSION}}-{{OSNAME}}-{{ARCH}}.tar.gz" } - cached_supplier = supplier.CachedElasticsearchSourceSupplier(distributions_root="/tmp", - source_supplier=es, - distribution_config=dist_cfg, - template_renderer=renderer) + cached_supplier = supplier.CachedSourceSupplier(distributions_root="/tmp", + source_supplier=es, + file_resolver=supplier.ElasticsearchFileNameResolver( + distribution_config=dist_cfg, + template_renderer=renderer + )) cached_supplier.fetch() cached_supplier.prepare() @@ -282,10 +288,12 @@ def add_es_artifact(binaries): "jdk.bundled.release_url": "https://elstc.co/elasticsearch-{{VERSION}}-{{OSNAME}}-{{ARCH}}.tar.gz" } - cached_supplier = supplier.CachedElasticsearchSourceSupplier(distributions_root="/tmp", - source_supplier=es, - distribution_config=dist_cfg, - template_renderer=renderer) + cached_supplier = supplier.CachedSourceSupplier(distributions_root="/tmp", + source_supplier=es, + file_resolver=supplier.ElasticsearchFileNameResolver( + distribution_config=dist_cfg, + template_renderer=renderer + )) cached_supplier.fetch() cached_supplier.prepare() @@ -301,6 +309,99 @@ def add_es_artifact(binaries): self.assertEqual("/path/to/artifact.tar.gz", binaries["elasticsearch"]) +class ElasticsearchFileNameResolverTests(TestCase): + def setUp(self): + super().setUp() + renderer = supplier.TemplateRenderer(version="8.0.0-SNAPSHOT", os_name="linux", arch="x86_64") + + dist_cfg = { + "runtime.jdk.bundled": "true", + "jdk.bundled.release_url": "https://elstc.co/elasticsearch-{{VERSION}}-{{OSNAME}}-{{ARCH}}.tar.gz" + } + + self.resolver = supplier.ElasticsearchFileNameResolver( + distribution_config=dist_cfg, + template_renderer=renderer + ) + + def test_resolve(self): + self.resolver.revision = "abc123" + self.assertEqual("elasticsearch-abc123-linux-x86_64.tar.gz", self.resolver.file_name) + + def test_artifact_key(self): + self.assertEqual("elasticsearch", self.resolver.artifact_key) + + def test_to_artifact_path(self): + file_system_path = "/tmp/test" + self.assertEqual(file_system_path, self.resolver.to_artifact_path(file_system_path)) + + def test_to_file_system_path(self): + artifact_path = "/tmp/test" + self.assertEqual(artifact_path, self.resolver.to_file_system_path(artifact_path)) + + +class PluginFileNameResolverTests(TestCase): + def setUp(self): + super().setUp() + self.resolver = supplier.PluginFileNameResolver("test-plugin") + + def test_resolve(self): + self.resolver.revision = "abc123" + self.assertEqual("test-plugin-abc123.zip", self.resolver.file_name) + + def test_artifact_key(self): + self.assertEqual("test-plugin", self.resolver.artifact_key) + + def test_to_artifact_path(self): + file_system_path = "/tmp/test" + self.assertEqual(f"file://{file_system_path}", self.resolver.to_artifact_path(file_system_path)) + + def test_to_file_system_path(self): + file_system_path = "/tmp/test" + self.assertEqual(file_system_path, self.resolver.to_file_system_path(f"file://{file_system_path}")) + + +class PruneTests(TestCase): + LStat = collections.namedtuple("LStat", "st_ctime") + + @mock.patch("os.path.exists") + @mock.patch("os.listdir") + @mock.patch("os.path.isfile") + @mock.patch("os.lstat") + @mock.patch("os.remove") + def test_does_not_touch_nonexisting_directory(self, rm, lstat, isfile, listdir, exists): + exists.return_value = False + + supplier._prune(root_path="/tmp/test", max_age_days=7) + + self.assertEqual(0, listdir.call_count, "attempted to list a non-existing directory") + + @mock.patch("os.path.exists") + @mock.patch("os.listdir") + @mock.patch("os.path.isfile") + @mock.patch("os.lstat") + @mock.patch("os.remove") + def test_prunes_old_files(self, rm, lstat, isfile, listdir, exists): + exists.return_value = True + listdir.return_value = ["elasticsearch-6.8.0.tar.gz", "some-subdir", "elasticsearch-7.3.0-darwin-x86_64.tar.gz"] + isfile.side_effect = [True, False, True] + + now = datetime.datetime.now(tz=datetime.timezone.utc) + ten_days_ago = now - datetime.timedelta(days=10) + one_day_ago = now - datetime.timedelta(days=1) + + lstat.side_effect = [ + # elasticsearch-6.8.0.tar.gz + PruneTests.LStat(st_ctime=int(ten_days_ago.timestamp())), + # elasticsearch-7.3.0-darwin-x86_64.tar.gz + PruneTests.LStat(st_ctime=int(one_day_ago.timestamp())) + ] + + supplier._prune(root_path="/tmp/test", max_age_days=7) + + rm.assert_called_with("/tmp/test/elasticsearch-6.8.0.tar.gz") + + class ElasticsearchSourceSupplierTests(TestCase): def test_no_build(self): car = team.Car("default", root_path=None, config_paths=[], variables={ @@ -581,9 +682,9 @@ def test_create_suppliers_for_es_distribution_plugin_source_skip(self): self.assertIsInstance(composite_supplier.suppliers[0], supplier.ElasticsearchDistributionSupplier) self.assertIsInstance(composite_supplier.suppliers[1], supplier.PluginDistributionSupplier) self.assertEqual(core_plugin, composite_supplier.suppliers[1].plugin) - self.assertIsInstance(composite_supplier.suppliers[2], supplier.ExternalPluginSourceSupplier) - self.assertEqual(external_plugin, composite_supplier.suppliers[2].plugin) - self.assertIsNone(composite_supplier.suppliers[2].builder) + self.assertIsInstance(composite_supplier.suppliers[2].source_supplier, supplier.ExternalPluginSourceSupplier) + self.assertEqual(external_plugin, composite_supplier.suppliers[2].source_supplier.plugin) + self.assertIsNone(composite_supplier.suppliers[2].source_supplier.builder) def test_create_suppliers_for_es_missing_distribution_plugin_source_skip(self): cfg = config.Config() @@ -639,9 +740,9 @@ def test_create_suppliers_for_es_distribution_plugin_source_build(self): self.assertIsInstance(composite_supplier.suppliers[0], supplier.ElasticsearchDistributionSupplier) self.assertIsInstance(composite_supplier.suppliers[1], supplier.PluginDistributionSupplier) self.assertEqual(core_plugin, composite_supplier.suppliers[1].plugin) - self.assertIsInstance(composite_supplier.suppliers[2], supplier.ExternalPluginSourceSupplier) - self.assertEqual(external_plugin, composite_supplier.suppliers[2].plugin) - self.assertIsNotNone(composite_supplier.suppliers[2].builder) + self.assertIsInstance(composite_supplier.suppliers[2].source_supplier, supplier.ExternalPluginSourceSupplier) + self.assertEqual(external_plugin, composite_supplier.suppliers[2].source_supplier.plugin) + self.assertIsNotNone(composite_supplier.suppliers[2].source_supplier.builder) @mock.patch("esrally.utils.jvm.resolve_path", lambda v: (v, "/opt/java/java{}".format(v))) def test_create_suppliers_for_es_and_plugin_source_build(self): @@ -672,12 +773,12 @@ def test_create_suppliers_for_es_and_plugin_source_build(self): ]) self.assertEqual(3, len(composite_supplier.suppliers)) - self.assertIsInstance(composite_supplier.suppliers[0], supplier.ElasticsearchSourceSupplier) - self.assertIsInstance(composite_supplier.suppliers[1], supplier.CorePluginSourceSupplier) - self.assertEqual(core_plugin, composite_supplier.suppliers[1].plugin) - self.assertIsInstance(composite_supplier.suppliers[2], supplier.ExternalPluginSourceSupplier) - self.assertEqual(external_plugin, composite_supplier.suppliers[2].plugin) - self.assertIsNotNone(composite_supplier.suppliers[2].builder) + self.assertIsInstance(composite_supplier.suppliers[0].source_supplier, supplier.ElasticsearchSourceSupplier) + self.assertIsInstance(composite_supplier.suppliers[1].source_supplier, supplier.CorePluginSourceSupplier) + self.assertEqual(core_plugin, composite_supplier.suppliers[1].source_supplier.plugin) + self.assertIsInstance(composite_supplier.suppliers[2].source_supplier, supplier.ExternalPluginSourceSupplier) + self.assertEqual(external_plugin, composite_supplier.suppliers[2].source_supplier.plugin) + self.assertIsNotNone(composite_supplier.suppliers[2].source_supplier.builder) class DistributionRepositoryTests(TestCase): diff --git a/tests/mechanic/team_test.py b/tests/mechanic/team_test.py index ec3571e3f..53e0ff323 100644 --- a/tests/mechanic/team_test.py +++ b/tests/mechanic/team_test.py @@ -150,13 +150,29 @@ def test_lists_plugins(self): team.PluginDescriptor(name="complex-plugin", config="config-a"), team.PluginDescriptor(name="complex-plugin", config="config-b"), team.PluginDescriptor(name="my-analysis-plugin", core_plugin=True), - team.PluginDescriptor(name="my-ingest-plugin", core_plugin=True) + team.PluginDescriptor(name="my-ingest-plugin", core_plugin=True), + team.PluginDescriptor(name="my-core-plugin-with-config", core_plugin=True) ], self.loader.plugins()) def test_loads_core_plugin(self): self.assertEqual(team.PluginDescriptor(name="my-analysis-plugin", core_plugin=True, variables={"dbg": True}), self.loader.load_plugin("my-analysis-plugin", config_names=None, plugin_params={"dbg": True})) + def test_loads_core_plugin_with_config(self): + plugin = self.loader.load_plugin("my-core-plugin-with-config", config_names=None, plugin_params={"dbg": True}) + self.assertEqual("my-core-plugin-with-config", plugin.name) + self.assertTrue(plugin.core_plugin) + + expected_root_path = os.path.join(current_dir, "data", "plugins", "v1", "my_core_plugin_with_config") + + self.assertEqual(expected_root_path, plugin.root_path) + self.assertEqual(0, len(plugin.config_paths)) + + self.assertEqual({ + # from plugin params + "dbg": True + }, plugin.variables) + def test_cannot_load_plugin_with_missing_config(self): with self.assertRaises(exceptions.SystemSetupError) as ctx: self.loader.load_plugin("my-analysis-plugin", ["missing-config"]) @@ -176,6 +192,7 @@ def test_cannot_load_community_plugin_with_missing_config(self): def test_loads_configured_plugin(self): plugin = self.loader.load_plugin("complex-plugin", ["config-a", "config-b"], plugin_params={"dbg": True}) self.assertEqual("complex-plugin", plugin.name) + self.assertFalse(plugin.core_plugin) self.assertCountEqual(["config-a", "config-b"], plugin.config) expected_root_path = os.path.join(current_dir, "data", "plugins", "v1", "complex_plugin") diff --git a/tests/racecontrol_test.py b/tests/racecontrol_test.py index 48124bf3b..aa91a858d 100644 --- a/tests/racecontrol_test.py +++ b/tests/racecontrol_test.py @@ -25,8 +25,9 @@ class RaceControlTests(TestCase): def test_finds_available_pipelines(self): expected = [ - ["from-sources-complete", "Builds and provisions Elasticsearch, runs a benchmark and reports results."], - ["from-sources-skip-build", "Provisions Elasticsearch (skips the build), runs a benchmark and reports results."], + ["from-sources", "Builds and provisions Elasticsearch, runs a benchmark and reports results."], + ["from-sources-complete", "Builds and provisions Elasticsearch, runs a benchmark and reports results [deprecated]."], + ["from-sources-skip-build", "Provisions Elasticsearch (skips the build), runs a benchmark and reports results [deprecated]."], ["from-distribution", "Downloads an Elasticsearch distribution, provisions it, runs a benchmark and reports results."], ["benchmark-only", "Assumes an already running Elasticsearch instance, runs a benchmark and reports results"], ]