Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Selftest refactoring to fix masked tests #5822

Merged
merged 4 commits into from
Dec 13, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 1 addition & 3 deletions avocado/core/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,9 +209,7 @@ def __exit__(self, _exc_type, _exc_value, _traceback):
def __start_job_logging(self):
# Enable test logger
full_log = os.path.join(self.logdir, "full.log")
fmt = (
"%(asctime)s %(module)-16.16s L%(lineno)-.4d %(levelname)-5.5s| %(message)s"
)
fmt = "%(asctime)s %(name)s %(module)-16.16s L%(lineno)-.4d %(levelname)-5.5s| %(message)s"
output.add_log_handler(
LOG_JOB,
logging.FileHandler,
Expand Down
2 changes: 1 addition & 1 deletion avocado/core/utils/messages.py
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ def start_logging(config, queue):
log_handler = RunnerLogHandler(queue, "log")
stdout_handler = RunnerLogHandler(queue, "stdout")
stderr_handler = RunnerLogHandler(queue, "stderr")
fmt = "%(asctime)s %(module)-16.16s L%(lineno)-.4d %(levelname)-5.5s| %(message)s"
fmt = "%(asctime)s %(name)s %(module)-16.16s L%(lineno)-.4d %(levelname)-5.5s| %(message)s"
formatter = logging.Formatter(fmt=fmt)
log_handler.setFormatter(formatter)
stdout_handler.setFormatter(formatter)
Expand Down
7 changes: 7 additions & 0 deletions docs/source/guides/writer/chapters/writing.rst
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,13 @@ leveraging its API power.
As can be seen in the example above, an Avocado test is a method that starts
with ``test`` in a class that inherits from :mod:`avocado.Test`.

.. warning:: Note that combining unittests and avocado-instrumented tests within
the same file is not feasible. If a class inherits from :mod:`avocado.Test`,
and another class inherits from :class:`unittest.TestCase` in the same file,
the unittest class will be excluded from testing. In such instances, it is
advisable to segregate these tests into separate files.


.. note:: Avocado also supports coroutines as tests. Simply declare
your test method using the ``async def`` syntax, and Avocado
will run it inside an asyncio loop.
Expand Down
53 changes: 53 additions & 0 deletions selftests/check.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,29 @@
from avocado.utils import process
from selftests.utils import python_module_available

TEST_SIZE = {
"static-checks": 7,
"job-api-1": 1,
"job-api-2": 1,
"job-api-3": 2,
"job-api-4": 9,
"job-api-5": 12,
"job-api-6": 4,
"job-api-7": 1,
"nrunner-interface": 70,
"nrunner-requirement": 12,
"unit": 667,
"jobs": 11,
"functional-parallel": 297,
"functional-serial": 4,
"optional-plugins": 0,
"optional-plugins-golang": 2,
"optional-plugins-html": 3,
"optional-plugins-robot": 3,
"optional-plugins-varianter_cit": 40,
"optional-plugins-varianter_yaml_to_mux": 50,
}


class JobAPIFeaturesTest(Test):
def check_directory_exists(self, path=None):
Expand Down Expand Up @@ -579,6 +602,7 @@ def create_suites(args): # pylint: disable=W0621
# ========================================================================
# Run nrunner interface checks for all available runners
# ========================================================================
nrunner_interface_size = 10
config_nrunner_interface = {
"resolver.references": ["selftests/functional/nrunner_interface.py"],
"run.dict_variants.variant_id_keys": ["runner"],
Expand Down Expand Up @@ -616,6 +640,7 @@ def create_suites(args): # pylint: disable=W0621
"runner": "avocado-runner-golang",
}
)
TEST_SIZE["nrunner-interface"] += nrunner_interface_size

if (
python_module_available("avocado-framework-plugin-robot")
Expand All @@ -626,6 +651,7 @@ def create_suites(args): # pylint: disable=W0621
"runner": "avocado-runner-robot",
}
)
TEST_SIZE["nrunner-interface"] += nrunner_interface_size

if (
python_module_available("avocado-framework-plugin-ansible")
Expand All @@ -636,6 +662,7 @@ def create_suites(args): # pylint: disable=W0621
"runner": "avocado-runner-ansible-module",
}
)
TEST_SIZE["nrunner-interface"] += nrunner_interface_size

if args.dict_tests["nrunner-interface"]:
suites.append(
Expand Down Expand Up @@ -727,6 +754,19 @@ def main(args): # pylint: disable=W0621
"optional-plugins": False,
}

if python_module_available("avocado-framework-plugin-golang"):
TEST_SIZE["optional-plugins"] += TEST_SIZE["optional-plugins-golang"]
if python_module_available("avocado-framework-plugin-result-html"):
TEST_SIZE["optional-plugins"] += TEST_SIZE["optional-plugins-html"]
if python_module_available("avocado-framework-plugin-robot"):
TEST_SIZE["optional-plugins"] += TEST_SIZE["optional-plugins-robot"]
if python_module_available("avocado-framework-plugin-varianter-cit"):
TEST_SIZE["optional-plugins"] += TEST_SIZE["optional-plugins-varianter_cit"]
if python_module_available("avocado-framework-plugin-varianter-yaml-to-mux"):
TEST_SIZE["optional-plugins"] += TEST_SIZE[
"optional-plugins-varianter_yaml_to_mux"
]

# Make a list of strings instead of a list with a single string
if len(args.disable_plugin_checks) > 0:
args.disable_plugin_checks = args.disable_plugin_checks[0].split(",")
Expand Down Expand Up @@ -809,6 +849,19 @@ def main(args): # pylint: disable=W0621
print("check.py didn't clean test results.")
print("uncleaned directories:")
print(post_job_test_result_dirs.difference(pre_job_test_result_dirs))
for suite in j.test_suites:
if suite.size != TEST_SIZE[suite.name]:
if exit_code == 0:
exit_code = 1
print(
f"suite {suite.name} doesn't have {TEST_SIZE[suite.name]} tests"
f" it has {suite.size}."
)
print(
"If you made some changes into selftests please update `TEST_SIZE`"
" variable in `check.py`. If you haven't done any changes to"
" selftests this behavior is an ERROR, and it needs to be fixed."
)

# tmp dirs clean up check
process.run(f"{sys.executable} selftests/check_tmp_dirs")
Expand Down
27 changes: 11 additions & 16 deletions selftests/functional/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
import xml.dom.minidom
import zipfile

from avocado import Test
from avocado.core import exit_codes
from avocado.utils import path as utils_path
from avocado.utils import process, script
Expand Down Expand Up @@ -507,7 +506,7 @@ def test_runner_timeout(self):
expected_rc,
f"Avocado did not return rc {expected_rc}:\n{result}",
)
self.assertIn("timeout", result_json["tests"][0]["fail_reason"])
self.assertIn("Timeout reached", result_json["tests"][0]["fail_reason"])
# Ensure no test aborted error messages show up
self.assertNotIn(b"TestAbortError: Test aborted unexpectedly", output)

Expand Down Expand Up @@ -553,9 +552,7 @@ def test_show_user_stream(self):
)
result = process.run(cmd_line, ignore_status=True)
self.assertEqual(result.exit_status, exit_codes.AVOCADO_ALL_OK)
self.assertIn(
b"Plant.test_plant_organic: preparing soil on row 0", result.stdout
)
self.assertIn(b"preparing soil on row 0", result.stdout)
lines = result.stdout.split(b"\n")
self.assertEqual(
len(lines), len(set(lines)), "The --show option has duplicities"
Expand Down Expand Up @@ -783,9 +780,7 @@ def test_store_logging_stream(self):
with open(progress_info, encoding="utf-8") as file:
stream_line = file.readline()
self.assertIn(
"avocado.test.progress INFO | "
"1-examples/tests/logging_streams.py:Plant.test_plant_organic: "
"preparing soil on row 0",
"logging_streams L0017 INFO | preparing soil on row 0",
stream_line,
)

Expand All @@ -805,10 +800,10 @@ def test_full_log(self):
self.assertTrue(os.path.exists(progress_info))
with open(progress_info, encoding="utf-8") as file:
stream = file.read()
self.assertIn("avocado.job", stream)
self.assertIn("avocado.core", stream)
self.assertIn("avocado.test", stream)
self.assertIn("avocado.app", stream)
self.assertIn("INFO | Avocado config:", stream)
self.assertIn("requested -> triagin", stream)
self.assertIn("preparing soil on row 0", stream)
self.assertIn("INFO | RESULTS : PASS 1 |", stream)

@unittest.skipUnless(
os.getenv("CI"),
Expand All @@ -824,7 +819,7 @@ def check_matplotlib_logs(file_path):
self.assertTrue(os.path.exists(file_path))
with open(file_path, encoding="utf-8") as file:
stream = file.read()
self.assertIn("matplotlib DEBUG|", stream)
self.assertIn("matplotlib __init__ L0337 DEBUG|", stream)

log_dir = os.path.join(self.tmpdir.name, "latest")
test_log_dir = os.path.join(
Expand Down Expand Up @@ -891,7 +886,7 @@ def test_store_logging_stream_level(self):
with open(progress_info, encoding="utf-8") as file:
stream_line = file.readline()
self.assertIn(
"avocado.test.progress ERROR| Avocados are Gone",
"logging_streams L0037 ERROR| Avocados are Gone",
stream_line,
)
progress_info = os.path.join(
Expand All @@ -903,12 +898,12 @@ def test_store_logging_stream_level(self):
with open(progress_info, encoding="utf-8") as file:
stream_line = file.readline()
self.assertIn(
"avocado.test.progress ERROR| 1-examples/tests/logging_streams.py:Plant.test_plant_organic: Avocados are Gone",
"logging_streams L0037 ERROR| Avocados are Gone",
stream_line,
)


class DryRunTest(Test):
class DryRunTest(unittest.TestCase):
def test_dry_run(self):
examples_path = os.path.join("examples", "tests")
passtest = os.path.join(examples_path, "passtest.py")
Expand Down
Loading