← Back to team overview

sts-sponsors team mailing list archive

[Merge] ~bjornt/maas:perf-test-no-custom-runner into maas:master

 

Björn Tillenius has proposed merging ~bjornt/maas:perf-test-no-custom-runner into maas:master.

Commit message:
Remove the custom bin/test.perf runner.

There's no reason to have a custom test runner. It's better to
use pytest directly, so that it's clear how to use it.

I also improved the way we set the random seed, so that we do
it per-test and not per-session, and print out the used values
on test failures.

I kept bin/test.perf as a symlink, in case someone is very
used to it.



Requested reviews:
  MAAS Maintainers (maas-maintainers)

For more details, see:
https://code.launchpad.net/~bjornt/maas/+git/maas/+merge/433518
-- 
Your team MAAS Committers is subscribed to branch maas:master.
diff --git a/Makefile b/Makefile
index 6ddf017..6f826b7 100644
--- a/Makefile
+++ b/Makefile
@@ -48,7 +48,6 @@ bin/subunit-1to2 \
 bin/subunit2junitxml \
 bin/subunit2pyunit \
 bin/test.parallel \
-bin/test.perf \
 bin/test.rack \
 bin/test.region \
 bin/test.region.legacy
@@ -62,7 +61,8 @@ build: \
   .run \
   $(VENV) \
   $(BIN_SCRIPTS) \
-  bin/py
+  bin/py \
+  bin/test.perf
 .PHONY: build
 
 all: build ui go-bins doc
@@ -100,6 +100,9 @@ $(BIN_SCRIPTS): $(VENV) bin
 bin/py: $(VENV) bin
 	ln -sf ../$(VENV)/bin/ipython3 $@
 
+bin/test.perf: $(VENV) bin
+	ln -sf ./pytest $@
+
 bin/database: bin/postgresfixture
 	ln -sf $(notdir $<) $@
 
@@ -127,16 +130,16 @@ test-py: bin/test.parallel bin/subunit-1to2 bin/subunit2junitxml bin/subunit2pyu
 	@utilities/run-py-tests-ci
 .PHONY: test-py
 
-test-perf: bin/test.perf
+test-perf: bin/pytest
 	GIT_BRANCH=$(shell git rev-parse --abbrev-ref HEAD) \
 	GIT_HASH=$(shell git rev-parse HEAD) \
-	bin/test.perf
+	bin/pytest src/maasperf/
 .PHONY: test-perf
 
 test-perf-quiet: bin/test.perf
 	GIT_BRANCH=$(shell git rev-parse --abbrev-ref HEAD) \
 	GIT_HASH=$(shell git rev-parse HEAD) \
-	bin/test.perf -q --disable-warnings --show-capture=no --no-header --no-summary
+	bin/pytest -q --disable-warnings --show-capture=no --no-header --no-summary src/maasperf/
 .PHONY: test-perf-quiet
 
 update-initial-sql: bin/database bin/maas-region cleandb
diff --git a/setup.cfg b/setup.cfg
index a849f7f..402ce94 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -37,7 +37,6 @@ console_scripts =
   test.region.legacy = maastesting.scripts:run_region_legacy
   test.rack = maastesting.scripts:run_rack
   test.parallel = maastesting.scripts:run_parallel
-  test.perf = maastesting.scripts:run_perf
 
 [options.packages.find]
 where = src
diff --git a/src/maasperf/tests/cli/test_machines.py b/src/maasperf/tests/cli/test_machines.py
index 5e3fa46..7d7c01b 100644
--- a/src/maasperf/tests/cli/test_machines.py
+++ b/src/maasperf/tests/cli/test_machines.py
@@ -8,12 +8,12 @@ from httplib2 import Response
 from maascli import api
 from maascli.config import ProfileConfig
 from maascli.parser import get_deepest_subparser, prepare_parser
-from maastesting.perftest import perf_test, perf_tester
+from maastesting.perftest import perf_test
 
 
 @perf_test()
 def test_perf_list_machines_CLI(
-    cli_profile, monkeypatch, cli_machines_api_response
+    perf, cli_profile, monkeypatch, cli_machines_api_response
 ):
     @contextmanager
     def mock_ProfileConfig_enter(*args):
@@ -35,12 +35,13 @@ def test_perf_list_machines_CLI(
     monkeypatch.setattr(api, "http_request", mock_http_response)
 
     args = ["maas", cli_profile["name"], "machines", "read"]
-    parser = prepare_parser(args)
-    with perf_tester.record("test_perf_list_machines_CLI.parse"):
-        options = parser.parse_args(args[1:])
-    if hasattr(options, "execute"):
-        with perf_tester.record("test_perf_list_machines_CLI.execute"):
-            options.execute(options)
-    else:
-        sub_parser = get_deepest_subparser(parser, args[1:])
-        sub_parser.error("too few arguments")
+    with perf.record("test_perf_list_machines_CLI"):
+        parser = prepare_parser(args)
+        with perf.record("test_perf_list_machines_CLI.parse"):
+            options = parser.parse_args(args[1:])
+        if hasattr(options, "execute"):
+            with perf.record("test_perf_list_machines_CLI.execute"):
+                options.execute(options)
+        else:
+            sub_parser = get_deepest_subparser(parser, args[1:])
+            sub_parser.error("too few arguments")
diff --git a/src/maasperf/tests/conftest.py b/src/maasperf/tests/conftest.py
index f44495e..02a24bb 100644
--- a/src/maasperf/tests/conftest.py
+++ b/src/maasperf/tests/conftest.py
@@ -6,6 +6,19 @@ from pytest import fixture
 from maasserver.models.user import get_auth_tokens
 from maasserver.testing.factory import factory as maasserver_factory
 from maasserver.testing.testclient import MAASSensibleOAuthClient
+from maastesting.perftest import perf
+from maastesting.pytest import configure_seeds, random_seed
+
+__all__ = [
+    "admin_api_client",
+    "api_client",
+    "configure_seeds",
+    "django_db_setup",
+    "factory",
+    "maas_user",
+    "perf",
+    "random_seed",
+]
 
 
 # override pytest-django's db setup
diff --git a/src/maasperf/tests/maasserver/api/test_machines.py b/src/maasperf/tests/maasserver/api/test_machines.py
index dfc2c36..c2acf71 100644
--- a/src/maasperf/tests/maasserver/api/test_machines.py
+++ b/src/maasperf/tests/maasserver/api/test_machines.py
@@ -7,7 +7,7 @@ from piston3.handler import typemapper
 
 from maasserver.api.machines import MachinesHandler
 from maastesting.http import make_HttpRequest
-from maastesting.perftest import perf_test, profile
+from maastesting.perftest import perf_test
 
 
 class DummyEmitter(Emitter):
@@ -16,18 +16,20 @@ class DummyEmitter(Emitter):
 
 
 @perf_test()
-def test_perf_list_machines_MachineHandler_api_endpoint(admin_api_client):
-    with profile("test_perf_list_machines_MachineHandler_api_endpoint"):
+def test_perf_list_machines_MachineHandler_api_endpoint(
+    perf, admin_api_client
+):
+    with perf.record("test_perf_list_machines_MachineHandler_api_endpoint"):
         admin_api_client.get(reverse("machines_handler"))
 
 
 @perf_test(db_only=True)
-def test_perf_list_machines_MachinesHander_direct_call(admin):
+def test_perf_list_machines_MachinesHander_direct_call(perf, admin):
     handler = MachinesHandler()
     request = make_HttpRequest()
     request.user = admin
 
-    with profile("test_perf_list_machines_MachinesHander_direct_call"):
+    with perf.record("test_perf_list_machines_MachinesHander_direct_call"):
         emitter = DummyEmitter(
             handler.read(request),
             typemapper,
@@ -39,10 +41,10 @@ def test_perf_list_machines_MachinesHander_direct_call(admin):
 
 
 @perf_test(db_only=True)
-def test_perf_list_machines_MachinesHander_only_objects(admin):
+def test_perf_list_machines_MachinesHander_only_objects(perf, admin):
     handler = MachinesHandler()
     request = make_HttpRequest()
     request.user = admin
 
-    with profile("test_perf_list_machines_MachinesHander_only_objects"):
+    with perf.record("test_perf_list_machines_MachinesHander_only_objects"):
         list(handler.read(request))
diff --git a/src/maasperf/tests/maasserver/models/test_machine.py b/src/maasperf/tests/maasserver/models/test_machine.py
index 5175d1f..4a183c1 100644
--- a/src/maasperf/tests/maasserver/models/test_machine.py
+++ b/src/maasperf/tests/maasserver/models/test_machine.py
@@ -6,13 +6,14 @@ from maastesting.perftest import perf_test
 
 
 @perf_test(commit_transaction=True, db_only=True)
-def test_perf_create_machines(factory):
+def test_perf_create_machines(perf, factory):
     # TODO use create machines script
-    for _ in range(30):
-        factory.make_Machine()
+    with perf.record("test_perf_create_machines"):
+        for _ in range(30):
+            factory.make_Machine()
 
 
 @perf_test(db_only=True)
-def test_perf_list_machines():
-
-    list(Machine.objects.all())
+def test_perf_list_machines(perf):
+    with perf.record("test_perf_list_machines"):
+        list(Machine.objects.all())
diff --git a/src/maasperf/tests/maasserver/websockets/test_machines.py b/src/maasperf/tests/maasserver/websockets/test_machines.py
index 7fcf8b5..a642214 100644
--- a/src/maasperf/tests/maasserver/websockets/test_machines.py
+++ b/src/maasperf/tests/maasserver/websockets/test_machines.py
@@ -3,14 +3,14 @@
 
 from maasserver.models import Machine
 from maasserver.websockets.handlers.machine import MachineHandler
-from maastesting.perftest import perf_test, profile
+from maastesting.perftest import perf_test
 
 
 @perf_test(db_only=True)
-def test_perf_list_machines_Websocket_endpoint(admin):
+def test_perf_list_machines_Websocket_endpoint(perf, admin):
     # This should test the websocket calls that are used to load
     # the machine listing page on the initial page load.
-    with profile("test_perf_list_machines_Websocket_endpoint"):
+    with perf.record("test_perf_list_machines_Websocket_endpoint"):
         ws_handler = MachineHandler(admin, {}, None)
         # Extracted from a clean load of labmaas with empty local
         # storage
@@ -27,11 +27,11 @@ def test_perf_list_machines_Websocket_endpoint(admin):
 
 
 @perf_test(db_only=True)
-def test_perf_list_machines_Websocket_endpoint_all(admin):
+def test_perf_list_machines_Websocket_endpoint_all(perf, admin):
     # How long would it take to list all the machines using the
     # websocket without any pagination.
     machine_count = Machine.objects.all().count()
-    with profile("test_perf_list_machines_Websocket_endpoint_all"):
+    with perf.record("test_perf_list_machines_Websocket_endpoint_all"):
         ws_handler = MachineHandler(admin, {}, None)
         # Extracted from a clean load of labmaas with empty local
         # storage
diff --git a/src/maastesting/perftest.py b/src/maastesting/perftest.py
index 666018e..01d1063 100644
--- a/src/maastesting/perftest.py
+++ b/src/maastesting/perftest.py
@@ -4,18 +4,15 @@
 """Performance testing related classes and functions for MAAS and its applications"""
 
 
-from contextlib import contextmanager
+from contextlib import contextmanager, ExitStack
 from cProfile import Profile
-from datetime import datetime
 from functools import wraps
 import json
 import os
-import random
 import sys
+import time
 
-from pytest import fixture
-from pytest import main as pytest_main
-from pytest import mark, skip
+from pytest import fixture, mark, skip
 
 from maastesting.fixtures import MAASDataFixture, MAASRootFixture
 
@@ -36,7 +33,37 @@ def maas_data():
     return None
 
 
-perf_tester = None
+@fixture(scope="session")
+def perf():
+    perf_tester = PerfTester(
+        os.environ.get("GIT_BRANCH"), os.environ.get("GIT_HASH")
+    )
+    yield perf_tester
+    output = os.environ.get("OUTPUT_FILE")
+    if output:
+        with open(output, "w") as f:
+            perf_tester.finish_build(f)
+    else:
+        perf_tester.finish_build(sys.stdout)
+
+
+class Timing:
+    duration = None
+
+    def __init__(self):
+        self.start = time.monotonic()
+
+    def stop(self):
+        assert self.duration is None, "Can't call stop() twice."
+        end = time.monotonic()
+        self.duration = end - self.start
+
+
+@contextmanager
+def measure_time():
+    timing = Timing()
+    yield timing
+    timing.stop()
 
 
 class PerfTester:
@@ -45,18 +72,15 @@ class PerfTester:
     def __init__(self, git_branch, git_hash):
         self.results = {"branch": git_branch, "commit": git_hash, "tests": {}}
 
-    def _record(self, name, start, end):
-        delta = (end - start).total_seconds()
-        self.results["tests"][name] = {"duration": delta}
-
     @contextmanager
     def record(self, name):
-        start = datetime.utcnow()
-        try:
+        with ExitStack() as stack:
+            profiling_tag = os.environ.get("MAAS_PROFILING")
+            if profiling_tag:
+                stack.enter_context(profile(name, profiling_tag))
+            timing = stack.enter_context(measure_time())
             yield
-        finally:
-            end = datetime.utcnow()
-            self._record(name, start, end)
+        self.results["tests"][name] = {"duration": timing.duration}
 
     def finish_build(self, output):
         json.dump(self.results, output)
@@ -80,8 +104,7 @@ def perf_test(commit_transaction=False, db_only=False):
             if django_loaded:
                 save_point = transaction.savepoint()
 
-            with perf_tester.record(fn.__name__):
-                fn(*args, **kwargs)
+            fn(*args, **kwargs)
 
             if save_point and commit_transaction:
                 transaction.savepoint_commit(save_point)
@@ -93,33 +116,8 @@ def perf_test(commit_transaction=False, db_only=False):
     return inner
 
 
-def perf_test_finish(output):
-    if output:
-        with open(output, "w") as f:
-            perf_tester.finish_build(f)
-    else:
-        perf_tester.finish_build(sys.stdout)
-
-
-def run_perf_tests(env):
-    global perf_tester
-
-    rand_seed = os.environ.get("MAAS_RAND_SEED")
-    random.seed(rand_seed)
-
-    try:
-        cmd_args = sys.argv[1:]
-        perf_tester = PerfTester(env.get("GIT_BRANCH"), env.get("GIT_HASH"))
-
-        pytest_main(
-            args=cmd_args,
-        )
-    finally:
-        perf_test_finish(env.get("OUTPUT_FILE"))
-
-
 @contextmanager
-def profile(testname: str):
+def profile(testname: str, profiling_tag: str):
     """Produces profiling info for tests
 
     This functions uses cProfile module to provide deterministic
@@ -139,11 +137,8 @@ def profile(testname: str):
         with perftest.profile("my_test_case"):
             <<block being profiled>>
     """
-    if profiling_tag := os.environ.get("MAAS_PROFILING"):
-        with Profile() as profiler:
-            yield
-        filename = f"{testname}.{profiling_tag}.profile"
-        profiler.dump_stats(filename)
-        print(f"Dumped stats to {filename}")
-    else:
+    with Profile() as profiler:
         yield
+    filename = f"{testname}.{profiling_tag}.profile"
+    profiler.dump_stats(filename)
+    print(f"Dumped stats to {filename}")
diff --git a/src/maastesting/pytest.py b/src/maastesting/pytest.py
new file mode 100644
index 0000000..60e01cc
--- /dev/null
+++ b/src/maastesting/pytest.py
@@ -0,0 +1,30 @@
+# Copyright 2022 Canonical Ltd.  This software is licensed under the
+# GNU Affero General Public License version 3 (see the file LICENSE).
+import os
+import random
+import time
+
+import pytest
+
+
+@pytest.fixture(scope="session")
+def configure_seeds():
+    maas_rand_seed = os.environ.get("MAAS_RAND_SEED")
+    if maas_rand_seed is None:
+        maas_rand_seed = time.time_ns()
+    python_hash_seed = os.environ.get("PYTHONHASHSEED")
+    if python_hash_seed is None:
+        python_hash_seed = random.randint(1, 4294967295)
+        os.environ["PYTHONHASHSEED"] = str(python_hash_seed)
+    return maas_rand_seed, python_hash_seed
+
+
+@pytest.fixture(autouse=True, scope="function")
+def random_seed(configure_seeds):
+    maas_rand_seed, python_hash_seed = configure_seeds
+    random.seed(maas_rand_seed)
+    yield
+    print(
+        f"MAAS_RAND_SEED={maas_rand_seed} "
+        f"PYTHONHASHSEED={python_hash_seed}",
+    )
diff --git a/utilities/run-perf-tests-ci b/utilities/run-perf-tests-ci
index 1fa17fb..2ddc8ab 100755
--- a/utilities/run-perf-tests-ci
+++ b/utilities/run-perf-tests-ci
@@ -21,7 +21,7 @@ echo "MAAS_RAND_SEED=${MAAS_RAND_SEED}"
 echo "PYTHONHASHSEED=${PYTHONHASHSEED}"
 
 bin/database --preserve run make syncdb || exit 1
-exec bin/database --preserve run -- bin/test.perf \
+exec bin/database --preserve run -- bin/pytest \
     -q \
     --disable-warnings \
     --show-capture=no \

References