diff --git a/poetry.lock b/poetry.lock index 46715ebc2c8..196cadc40f0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,3 +1,11 @@ +[[package]] +category = "main" +description = "apipkg: namespace control and lazy-import mechanism" +name = "apipkg" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.5" + [[package]] category = "main" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." @@ -82,6 +90,28 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" version = "0.4.3" +[[package]] +category = "main" +description = "execnet: rapid multi-Python deployment" +name = "execnet" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.7.1" + +[package.dependencies] +apipkg = ">=1.4" + +[package.extras] +testing = ["pre-commit"] + +[[package]] +category = "main" +description = "A platform independent file lock." +name = "filelock" +optional = false +python-versions = "*" +version = "3.0.12" + [[package]] category = "main" description = "the modular source code checker: pep8 pyflakes and co" @@ -103,6 +133,14 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" version = "2.8" +[[package]] +category = "main" +description = "iniconfig: brain-dead simple config-ini parsing" +name = "iniconfig" +optional = false +python-versions = "*" +version = "1.0.1" + [[package]] category = "main" description = "Pythonic task execution" @@ -215,22 +253,50 @@ description = "pytest: simple powerful testing with Python" name = "pytest" optional = false python-versions = ">=3.5" -version = "5.3.4" +version = "6.0.2" [package.dependencies] atomicwrites = ">=1.0" attrs = ">=17.4.0" colorama = "*" +iniconfig = "*" more-itertools = ">=4.0.0" packaging = "*" pluggy = ">=0.12,<1.0" -py = ">=1.5.0" -wcwidth = "*" +py = ">=1.8.2" +toml = "*" [package.extras] -checkqa-mypy = ["mypy (v0.761)"] +checkqa_mypy = ["mypy (0.780)"] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +[[package]] +category = "main" +description = "run tests in isolated forked subprocesses" +name = "pytest-forked" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "1.3.0" + +[package.dependencies] +py = "*" +pytest = ">=3.10" + +[[package]] +category = "main" +description = "pytest-httpserver is a httpserver for pytest" +name = "pytest-httpserver" +optional = false +python-versions = ">=3.4" +version = "0.3.5" + +[package.dependencies] +werkzeug = "*" + +[package.extras] +dev = ["autopep8", "coverage", "flake8", "ipdb", "pytest", "pytest-cov", "reno", "requests", "rope", "sphinx", "sphinx-rtd-theme", "wheel"] +test = ["coverage", "pytest", "pytest-cov", "requests"] + [[package]] category = "main" description = "py.test plugin to abort hanging tests" @@ -242,6 +308,23 @@ version = "1.3.4" [package.dependencies] pytest = ">=3.6.0" +[[package]] +category = "main" +description = "pytest xdist plugin for distributed testing and loop-on-failing modes" +name = "pytest-xdist" +optional = false +python-versions = ">=3.5" +version = "2.1.0" + +[package.dependencies] +execnet = ">=1.1" +pytest = ">=6.0.0" +pytest-forked = "*" + +[package.extras] +psutil = ["psutil (>=3.0)"] +testing = ["filelock"] + [[package]] category = "main" description = "YAML parser and emitter for Python" @@ -331,17 +414,25 @@ socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] [[package]] category = "main" -description = "Measures the displayed width of unicode strings in a terminal" -name = "wcwidth" +description = "The comprehensive WSGI web application library." +name = "werkzeug" optional = false -python-versions = "*" -version = "0.2.5" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "1.0.1" + +[package.extras] +dev = ["pytest", "pytest-timeout", "coverage", "tox", "sphinx", "pallets-sphinx-themes", "sphinx-issues"] +watchdog = ["watchdog"] [metadata] -content-hash = "f28e76c2cd84157e665d2d1bcd6c12e83d9deecc196fcc5e5e74e27fa072fd4d" +content-hash = "3500e684ec97eabc4ffd38614f8e9af23630e1650f5ff095ad031396a8b2acf2" python-versions = "^3.8" [metadata.files] +apipkg = [ + {file = "apipkg-1.5-py2.py3-none-any.whl", hash = "sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c"}, + {file = "apipkg-1.5.tar.gz", hash = "sha256:37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6"}, +] appdirs = [ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, @@ -374,6 +465,14 @@ colorama = [ {file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"}, {file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"}, ] +execnet = [ + {file = "execnet-1.7.1-py2.py3-none-any.whl", hash = "sha256:d4efd397930c46415f62f8a31388d6be4f27a91d7550eb79bc64a756e0056547"}, + {file = "execnet-1.7.1.tar.gz", hash = "sha256:cacb9df31c9680ec5f95553976c4da484d407e85e41c83cb812aa014f0eddc50"}, +] +filelock = [ + {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"}, + {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, +] flake8 = [ {file = "flake8-3.8.3-py2.py3-none-any.whl", hash = "sha256:15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c"}, {file = "flake8-3.8.3.tar.gz", hash = "sha256:f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208"}, @@ -382,6 +481,10 @@ idna = [ {file = "idna-2.8-py2.py3-none-any.whl", hash = "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"}, {file = "idna-2.8.tar.gz", hash = "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407"}, ] +iniconfig = [ + {file = "iniconfig-1.0.1-py3-none-any.whl", hash = "sha256:80cf40c597eb564e86346103f609d74efce0f6b4d4f30ec8ce9e2c26411ba437"}, + {file = "iniconfig-1.0.1.tar.gz", hash = "sha256:e5f92f89355a67de0595932a6c6c02ab4afddc6fcdc0bfc5becd0d60884d3f69"}, +] invoke = [ {file = "invoke-1.4.1-py2-none-any.whl", hash = "sha256:93e12876d88130c8e0d7fd6618dd5387d6b36da55ad541481dfa5e001656f134"}, {file = "invoke-1.4.1-py3-none-any.whl", hash = "sha256:87b3ef9d72a1667e104f89b159eaf8a514dbf2f3576885b2bbdefe74c3fb2132"}, @@ -431,13 +534,25 @@ pyserial = [ {file = "pyserial-3.4.tar.gz", hash = "sha256:6e2d401fdee0eab996cf734e67773a0143b932772ca8b42451440cfed942c627"}, ] pytest = [ - {file = "pytest-5.3.4-py3-none-any.whl", hash = "sha256:c13d1943c63e599b98cf118fcb9703e4d7bde7caa9a432567bcdcae4bf512d20"}, - {file = "pytest-5.3.4.tar.gz", hash = "sha256:1d122e8be54d1a709e56f82e2d85dcba3018313d64647f38a91aec88c239b600"}, + {file = "pytest-6.0.2-py3-none-any.whl", hash = "sha256:0e37f61339c4578776e090c3b8f6b16ce4db333889d65d0efb305243ec544b40"}, + {file = "pytest-6.0.2.tar.gz", hash = "sha256:c8f57c2a30983f469bf03e68cdfa74dc474ce56b8f280ddcb080dfd91df01043"}, +] +pytest-forked = [ + {file = "pytest-forked-1.3.0.tar.gz", hash = "sha256:6aa9ac7e00ad1a539c41bec6d21011332de671e938c7637378ec9710204e37ca"}, + {file = "pytest_forked-1.3.0-py2.py3-none-any.whl", hash = "sha256:dc4147784048e70ef5d437951728825a131b81714b398d5d52f17c7c144d8815"}, +] +pytest-httpserver = [ + {file = "pytest_httpserver-0.3.5-py3-none-any.whl", hash = "sha256:c55289404b7604bfd1c103d243824b441add501828e05807795c2b851b9b0747"}, + {file = "pytest_httpserver-0.3.5.tar.gz", hash = "sha256:1d553dc0fb27483d00a1e25ddcfc38bd3438336ed60f862cad8c50b11621a11a"}, ] pytest-timeout = [ {file = "pytest-timeout-1.3.4.tar.gz", hash = "sha256:80faa19cd245a42b87a51699d640c00d937c02b749052bfca6bae8bdbe12c48e"}, {file = "pytest_timeout-1.3.4-py2.py3-none-any.whl", hash = "sha256:95ca727d4a1dace6ec5f0534d2940b8417ff8b782f7eef0ea09240bdd94d95c2"}, ] +pytest-xdist = [ + {file = "pytest-xdist-2.1.0.tar.gz", hash = "sha256:82d938f1a24186520e2d9d3a64ef7d9ac7ecdf1a0659e095d18e596b8cbd0672"}, + {file = "pytest_xdist-2.1.0-py3-none-any.whl", hash = "sha256:7c629016b3bb006b88ac68e2b31551e7becf173c76b977768848e2bbed594d90"}, +] pyyaml = [ {file = "PyYAML-5.3-cp27-cp27m-win32.whl", hash = "sha256:940532b111b1952befd7db542c370887a8611660d2b9becff75d39355303d82d"}, {file = "PyYAML-5.3-cp27-cp27m-win_amd64.whl", hash = "sha256:059b2ee3194d718896c0ad077dd8c043e5e909d9180f387ce42012662a4946d6"}, @@ -547,7 +662,7 @@ urllib3 = [ {file = "urllib3-1.25.9-py2.py3-none-any.whl", hash = "sha256:88206b0eb87e6d677d424843ac5209e3fb9d0190d0ee169599165ec25e9d9115"}, {file = "urllib3-1.25.9.tar.gz", hash = "sha256:3018294ebefce6572a474f0604c2021e33b3fd8006ecd11d62107a5d2a963527"}, ] -wcwidth = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, +werkzeug = [ + {file = "Werkzeug-1.0.1-py2.py3-none-any.whl", hash = "sha256:2de2a5db0baeae7b2d2664949077c2ac63fbd16d98da0ff71837f7d1dea3fd43"}, + {file = "Werkzeug-1.0.1.tar.gz", hash = "sha256:6c80b1e5ad3665290ea39320b91e1be1e0d5f60652b964a3070216de83d2e47c"}, ] diff --git a/pyproject.toml b/pyproject.toml index 210d24101b3..0af3e431de3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ authors = [] [tool.poetry.dependencies] python = "^3.8" -pytest = "5.3.4" +pytest = "6.0.2" simplejson = "3.17.0" semver = "2.9.0" pyserial = "3.4" @@ -17,6 +17,9 @@ pytest-timeout = "1.3.4" invoke = "1.4.1" flake8 = "^3.8.3" black = { version = "^19.10b0", allow-prereleases = true } +filelock = "^3.0.12" +pytest-xdist = "^2.1.0" +pytest_httpserver = "^0.3.5" [tool.black] line-length = 120 diff --git a/test/common.py b/test/common.py index 4f1e74db694..a7500612583 100644 --- a/test/common.py +++ b/test/common.py @@ -14,6 +14,7 @@ # a commercial license, send an email to license@arduino.cc. import os import collections +import json Board = collections.namedtuple("Board", "address fqbn package architecture id core") @@ -25,3 +26,12 @@ def running_on_ci(): """ val = os.getenv("APPVEYOR") or os.getenv("DRONE") or os.getenv("GITHUB_WORKFLOW") return val is not None + + +def parse_json_traces(log_json_lines): + trace_entries = [] + for entry in log_json_lines: + entry = json.loads(entry) + if entry.get("level") == "trace": + trace_entries.append(entry.get("msg")) + return trace_entries diff --git a/test/conftest.py b/test/conftest.py index 48ae5aacc04..aafc0390aee 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -16,6 +16,7 @@ import platform import signal import shutil +import time from pathlib import Path import pytest @@ -23,6 +24,7 @@ from invoke import Local from invoke.context import Context import tempfile +from filelock import FileLock from .common import Board @@ -54,17 +56,32 @@ def data_dir(tmpdir_factory): if platform.system() == "Windows": with tempfile.TemporaryDirectory() as tmp: yield tmp + # shutil.rmtree(tmp, ignore_errors=True) else: - yield str(tmpdir_factory.mktemp("ArduinoTest")) + data = tmpdir_factory.mktemp("ArduinoTest") + yield str(data) + # shutil.rmtree(data, ignore_errors=True) @pytest.fixture(scope="session") -def downloads_dir(tmpdir_factory): +def downloads_dir(tmpdir_factory, worker_id): """ To save time and bandwidth, all the tests will access the same download cache folder. """ - return str(tmpdir_factory.mktemp("ArduinoTest")) + download_dir = tmpdir_factory.mktemp("ArduinoTest", numbered=False) + + # This folders should be created only once per session, if we're running + # tests in parallel using multiple processes we need to make sure this + # this fixture is executed only once, thus the use of the lockfile + if not worker_id == "master": + lock = Path(download_dir / "lock") + with FileLock(lock): + if not lock.is_file(): + lock.touch() + + yield str(download_dir) + # shutil.rmtree(download_dir, ignore_errors=True) @pytest.fixture(scope="function") @@ -74,7 +91,9 @@ def working_dir(tmpdir_factory): will be created before running each test and deleted at the end, this way all the tests work in isolation. """ - return str(tmpdir_factory.mktemp("ArduinoTestWork")) + work_dir = tmpdir_factory.mktemp("ArduinoTestWork") + yield str(work_dir) + # shutil.rmtree(work_dir, ignore_errors=True) @pytest.fixture(scope="function") @@ -95,9 +114,12 @@ def run_command(pytestconfig, data_dir, downloads_dir, working_dir): } (Path(data_dir) / "packages").mkdir() - def _run(cmd_string, custom_working_dir=None): + def _run(cmd_string, custom_working_dir=None, custom_env=None): + if not custom_working_dir: custom_working_dir = working_dir + if not custom_env: + custom_env = env cli_full_line = '"{}" {}'.format(cli_path, cmd_string) run_context = Context() # It might happen that we need to change directories between drives on Windows, @@ -109,7 +131,7 @@ def _run(cmd_string, custom_working_dir=None): # It escapes spaces in the path using "\ " but it doesn't always work, # wrapping the path in quotation marks is the safest approach with run_context.prefix(f'{cd_command} "{custom_working_dir}"'): - return run_context.run(cli_full_line, echo=False, hide=True, warn=True, env=env) + return run_context.run(cli_full_line, echo=False, hide=True, warn=True, env=custom_env) return _run @@ -195,3 +217,19 @@ def copy_sketch(working_dir): test_sketch_path = Path(working_dir) / "sketch_simple" shutil.copytree(sketch_path, test_sketch_path) yield str(test_sketch_path) + + +@pytest.fixture(scope="function") +def wait_for_board(run_command): + def _waiter(seconds=10): + # Waits for the specified amount of second for a board to be visible. + # This is necessary since it might happen that a board is not immediately + # available after a test upload and subsequent tests might consequently fail. + time_end = time.time() + seconds + while time.time() < time_end: + result = run_command("board list --format json") + ports = json.loads(result.stdout) + if len([p.get("boards", []) for p in ports]) > 0: + break + + return _waiter diff --git a/test/pytest.ini b/test/pytest.ini index 957be6d9fae..ea573791a7b 100644 --- a/test/pytest.ini +++ b/test/pytest.ini @@ -7,5 +7,11 @@ filterwarnings = markers = slow: marks tests as slow (deselect with '-m "not slow"') -# atm some tests depend on each other, better to exit at first failure (-x) -addopts = -x -s --verbose --tb=short \ No newline at end of file +# -x to exit at first failure +# -s to disable per-test capture +# --verbose is what is says it is +# --tb=long sets the length of the traceback in case of failures +# -n=auto sets the numbers of parallel processes to use +# --dist=loadfile distributes the tests in the parallel processes dividing them per file +# See https://pypi.org/project/pytest-xdist/#parallelization for more info on parallelization +addopts = -x -s --verbose --tb=long -n=auto --dist=loadfile diff --git a/test/test_board.py b/test/test_board.py index bfca50bb5b1..e62ba079358 100644 --- a/test/test_board.py +++ b/test/test_board.py @@ -390,8 +390,7 @@ def test_board_list(run_command): - result = run_command("core update-index") - assert result.ok + run_command("core update-index") result = run_command("board list --format json") assert result.ok # check is a valid json and contains a list of ports @@ -404,18 +403,16 @@ def test_board_list(run_command): @pytest.mark.skipif(running_on_ci(), reason="VMs have no serial ports") def test_board_listall(run_command): - assert run_command("core update-index") + run_command("core update-index") result = run_command("board listall") assert result.ok assert ["Board", "Name", "FQBN"] == result.stdout.splitlines()[0].strip().split() def test_board_details(run_command): - result = run_command("core update-index") - assert result.ok + run_command("core update-index") # Download samd core pinned to 1.8.6 - result = run_command("core install arduino:samd@1.8.6") - assert result.ok + run_command("core install arduino:samd@1.8.6") # Test board listall with and without showing hidden elements result = run_command("board listall MIPS --format json") @@ -448,11 +445,9 @@ def test_board_details(run_command): # old `arduino-cli board details` did not need -b flag to work def test_board_details_old(run_command): - result = run_command("core update-index") - assert result.ok + run_command("core update-index") # Download samd core pinned to 1.8.6 - result = run_command("core install arduino:samd@1.8.6") - assert result.ok + run_command("core install arduino:samd@1.8.6") result = run_command("board details arduino:samd:nano_33_iot --format json") assert result.ok # Sort everything before compare @@ -473,11 +468,9 @@ def test_board_details_old(run_command): def test_board_details_no_flags(run_command): - result = run_command("core update-index") - assert result.ok + run_command("core update-index") # Download samd core pinned to 1.8.6 - result = run_command("core install arduino:samd@1.8.6") - assert result.ok + run_command("core install arduino:samd@1.8.6") result = run_command("board details") assert not result.ok assert "Error getting board details: parsing fqbn: invalid fqbn:" in result.stderr @@ -485,11 +478,9 @@ def test_board_details_no_flags(run_command): def test_board_details_list_programmers_without_flag(run_command): - result = run_command("core update-index") - assert result.ok + run_command("core update-index") # Download samd core pinned to 1.8.6 - result = run_command("core install arduino:samd@1.8.6") - assert result.ok + run_command("core install arduino:samd@1.8.6") result = run_command("board details -b arduino:samd:nano_33_iot") assert result.ok lines = [l.strip() for l in result.stdout.splitlines()] @@ -503,11 +494,9 @@ def test_board_details_list_programmers_without_flag(run_command): def test_board_details_list_programmers_flag(run_command): - result = run_command("core update-index") - assert result.ok + run_command("core update-index") # Download samd core pinned to 1.8.6 - result = run_command("core install arduino:samd@1.8.6") - assert result.ok + run_command("core install arduino:samd@1.8.6") result = run_command("board details -b arduino:samd:nano_33_iot --list-programmers") assert result.ok diff --git a/test/test_compile.py b/test/test_compile.py index 341779b383a..8b1b9c9f78e 100644 --- a/test/test_compile.py +++ b/test/test_compile.py @@ -12,23 +12,20 @@ # otherwise use the software for commercial activities involving the Arduino # software without disclosing the source code of your own applications. To purchase # a commercial license, send an email to license@arduino.cc. -import json import os import platform import pytest -from .common import running_on_ci +from .common import running_on_ci, parse_json_traces def test_compile_without_fqbn(run_command): # Init the environment explicitly - result = run_command("core update-index") - assert result.ok + run_command("core update-index") # Install Arduino AVR Boards - result = run_command("core install arduino:avr@1.8.3") - assert result.ok + run_command("core install arduino:avr@1.8.3") # Build sketch without FQBN result = run_command("compile") @@ -37,12 +34,10 @@ def test_compile_without_fqbn(run_command): def test_compile_with_simple_sketch(run_command, data_dir, working_dir): # Init the environment explicitly - result = run_command("core update-index") - assert result.ok + run_command("core update-index") # Download latest AVR - result = run_command("core install arduino:avr") - assert result.ok + run_command("core install arduino:avr") sketch_name = "CompileIntegrationTest" sketch_path = os.path.join(data_dir, sketch_name) @@ -65,12 +60,9 @@ def test_compile_with_simple_sketch(run_command, data_dir, working_dir): # let's test from the logs if the hex file produced by successful compile is moved to our sketch folder log_json = open(log_file_path, "r") - json_log_lines = log_json.readlines() - expected_trace_sequence = [ - "Compile {sketch} for {fqbn} started".format(sketch=sketch_path, fqbn=fqbn), - "Compile {sketch} for {fqbn} successful".format(sketch=sketch_name, fqbn=fqbn), - ] - assert is_message_sequence_in_json_log_traces(expected_trace_sequence, json_log_lines) + traces = parse_json_traces(log_json.readlines()) + assert f"Compile {sketch_path} for {fqbn} started" in traces + assert f"Compile {sketch_name} for {fqbn} successful" in traces # Test the --output-dir flag with absolute path target = os.path.join(data_dir, "test_dir") @@ -89,12 +81,10 @@ def test_compile_with_simple_sketch(run_command, data_dir, working_dir): ) def test_output_flag_default_path(run_command, data_dir, working_dir): # Init the environment explicitly - result = run_command("core update-index") - assert result.ok + run_command("core update-index") # Install Arduino AVR Boards - result = run_command("core install arduino:avr@1.8.3") - assert result.ok + run_command("core install arduino:avr@1.8.3") # Create a test sketch sketch_path = os.path.join(data_dir, "test_output_flag_default_path") @@ -111,12 +101,10 @@ def test_output_flag_default_path(run_command, data_dir, working_dir): def test_compile_with_sketch_with_symlink_selfloop(run_command, data_dir): # Init the environment explicitly - result = run_command("core update-index") - assert result.ok + run_command("core update-index") # Install Arduino AVR Boards - result = run_command("core install arduino:avr@1.8.3") - assert result.ok + run_command("core install arduino:avr@1.8.3") sketch_name = "CompileIntegrationTestSymlinkSelfLoop" sketch_path = os.path.join(data_dir, sketch_name) @@ -161,76 +149,16 @@ def test_compile_with_sketch_with_symlink_selfloop(run_command, data_dir): assert not result.ok -@pytest.mark.skipif(running_on_ci(), reason="VMs have no serial ports") -def test_compile_and_upload_combo(run_command, data_dir, detected_boards): - # Init the environment explicitly - result = run_command("core update-index") - assert result.ok - - # Install required core(s) - result = run_command("core install arduino:avr@1.8.3") - result = run_command("core install arduino:samd@1.8.7") - assert result.ok - - # Create a test sketch - sketch_name = "CompileAndUploadIntegrationTest" - sketch_path = os.path.join(data_dir, sketch_name) - sketch_main_file = os.path.join(sketch_path, sketch_name + ".ino") - result = run_command("sketch new {}".format(sketch_path)) - assert result.ok - assert "Sketch created in: {}".format(sketch_path) in result.stdout - - # Build sketch for each detected board - for board in detected_boards: - log_file_name = "{fqbn}-compile.log".format(fqbn=board.fqbn.replace(":", "-")) - log_file_path = os.path.join(data_dir, log_file_name) - command_log_flags = "--log-format json --log-file {} --log-level trace".format(log_file_path) - - def run_test(s): - result = run_command( - "compile -b {fqbn} --upload -p {address} {sketch_path} {log_flags}".format( - fqbn=board.fqbn, address=board.address, sketch_path=s, log_flags=command_log_flags, - ) - ) - assert result.ok - - # check from the logs if the bin file were uploaded on the current board - log_json = open(log_file_path, "r") - json_log_lines = log_json.readlines() - expected_trace_sequence = [ - "Compile {sketch} for {fqbn} started".format(sketch=sketch_path, fqbn=board.fqbn), - "Compile {sketch} for {fqbn} successful".format(sketch=sketch_name, fqbn=board.fqbn), - "Upload {sketch} on {fqbn} started".format(sketch=sketch_path, fqbn=board.fqbn), - "Upload {sketch} on {fqbn} successful".format(sketch=sketch_name, fqbn=board.fqbn), - ] - assert is_message_sequence_in_json_log_traces(expected_trace_sequence, json_log_lines) - - run_test(sketch_path) - run_test(sketch_main_file) - - -def is_message_sequence_in_json_log_traces(message_sequence, log_json_lines): - trace_entries = [] - for entry in log_json_lines: - entry = json.loads(entry) - if entry.get("level") == "trace": - if entry.get("msg") in message_sequence: - trace_entries.append(entry.get("msg")) - return message_sequence == trace_entries - - def test_compile_blacklisted_sketchname(run_command, data_dir): """ Compile should ignore folders named `RCS`, `.git` and the likes, but it should be ok for a sketch to be named like RCS.ino """ # Init the environment explicitly - result = run_command("core update-index") - assert result.ok + run_command("core update-index") # Install Arduino AVR Boards - result = run_command("core install arduino:avr@1.8.3") - assert result.ok + run_command("core install arduino:avr@1.8.3") sketch_name = "RCS" sketch_path = os.path.join(data_dir, sketch_name) @@ -246,6 +174,7 @@ def test_compile_blacklisted_sketchname(run_command, data_dir): assert result.ok +@pytest.mark.skip() def test_compile_without_precompiled_libraries(run_command, data_dir): # Init the environment explicitly url = "https://adafruit.github.io/arduino-board-index/package_adafruit_index.json" diff --git a/test/test_core.py b/test/test_core.py index ffc0ac70b30..4091822e6cc 100644 --- a/test/test_core.py +++ b/test/test_core.py @@ -16,11 +16,16 @@ import platform import pytest import simplejson as json +from pathlib import Path -def test_core_search(run_command): - url = "https://raw.githubusercontent.com/arduino/arduino-cli/master/test/testdata/test_index.json" - assert run_command("core update-index --additional-urls={}".format(url)) +def test_core_search(run_command, httpserver): + # Set up the server to serve our custom index file + test_index = Path(__file__).parent / "testdata" / "test_index.json" + httpserver.expect_request("/test_index.json").respond_with_data(test_index.read_text()) + + url = httpserver.url_for("/test_index.json") + assert run_command(f"core update-index --additional-urls={url}") # search a specific core result = run_command("core search avr") assert result.ok @@ -41,15 +46,19 @@ def test_core_search(run_command): assert 2 == len(data) -def test_core_search_no_args(run_command): +def test_core_search_no_args(run_command, httpserver): """ This tests `core search` with and without additional URLs in case no args are passed (i.e. all results are shown). """ + # Set up the server to serve our custom index file + test_index = Path(__file__).parent / "testdata" / "test_index.json" + httpserver.expect_request("/test_index.json").respond_with_data(test_index.read_text()) + # update custom index and install test core (installed cores affect `core search`) - url = "https://raw.githubusercontent.com/arduino/arduino-cli/master/test/testdata/test_index.json" - assert run_command("core update-index --additional-urls={}".format(url)) - assert run_command("core install test:x86 --additional-urls={}".format(url)) + url = httpserver.url_for("/test_index.json") + assert run_command(f"core update-index --additional-urls={url}") + assert run_command(f"core install test:x86 --additional-urls={url}") # list all with no additional urls, ensure the test core won't show up result = run_command("core search") @@ -69,7 +78,7 @@ def test_core_search_no_args(run_command): assert len(platforms) == num_platforms # list all with additional urls, check the test core is there - result = run_command("core search --additional-urls={}".format(url)) + result = run_command(f"core search --additional-urls={url}") assert result.ok num_platforms = 0 found = False @@ -81,7 +90,7 @@ def test_core_search_no_args(run_command): assert found # same thing in JSON format, also check the number of platforms found is the same - result = run_command("core search --format json --additional-urls={}".format(url)) + result = run_command(f"core search --format json --additional-urls={url}") assert result.ok found = False platforms = json.loads(result.stdout) diff --git a/test/test_lib.py b/test/test_lib.py index a91696ef091..e7c0a5e5d91 100644 --- a/test/test_lib.py +++ b/test/test_lib.py @@ -17,7 +17,7 @@ def test_list(run_command): # Init the environment explicitly - assert run_command("core update-index") + run_command("core update-index") # When output is empty, nothing is printed out, no matter the output format result = run_command("lib list") diff --git a/test/test_outdated.py b/test/test_outdated.py index 99b83ee9271..d9ef406851c 100644 --- a/test/test_outdated.py +++ b/test/test_outdated.py @@ -16,15 +16,15 @@ def test_outdated(run_command): # Updates index for cores and libraries - assert run_command("core update-index") - assert run_command("lib update-index") + run_command("core update-index") + run_command("lib update-index") # Installs an outdated core and library - assert run_command("core install arduino:avr@1.6.3") + run_command("core install arduino:avr@1.6.3") assert run_command("lib install USBHost@1.0.0") # Installs latest version of a core and a library - assert run_command("core install arduino:samd") + run_command("core install arduino:samd") assert run_command("lib install ArduinoJson") # Verifies only outdated cores and libraries are returned diff --git a/test/test_update.py b/test/test_update.py index d08580e7666..34df7d57d1a 100644 --- a/test/test_update.py +++ b/test/test_update.py @@ -26,15 +26,15 @@ def test_update(run_command): def test_update_showing_outdated(run_command): # Updates index for cores and libraries - assert run_command("core update-index") - assert run_command("lib update-index") + run_command("core update-index") + run_command("lib update-index") # Installs an outdated core and library - assert run_command("core install arduino:avr@1.6.3") + run_command("core install arduino:avr@1.6.3") assert run_command("lib install USBHost@1.0.0") # Installs latest version of a core and a library - assert run_command("core install arduino:samd") + run_command("core install arduino:samd") assert run_command("lib install ArduinoJson") # Verifies outdated cores and libraries are printed after updating indexes diff --git a/test/test_upgrade.py b/test/test_upgrade.py index f90a5356520..e4c67678c9e 100644 --- a/test/test_upgrade.py +++ b/test/test_upgrade.py @@ -16,15 +16,15 @@ def test_upgrade(run_command): # Updates index for cores and libraries - assert run_command("core update-index") - assert run_command("lib update-index") + run_command("core update-index") + run_command("lib update-index") # Installs an outdated core and library - assert run_command("core install arduino:avr@1.6.3") + run_command("core install arduino:avr@1.6.3") assert run_command("lib install USBHost@1.0.0") # Installs latest version of a core and a library - assert run_command("core install arduino:samd") + run_command("core install arduino:samd") assert run_command("lib install ArduinoJson") # Verifies outdated core and libraries are shown diff --git a/test/test_upload.py b/test/test_upload.py index 52354092ef5..32cf98071ab 100644 --- a/test/test_upload.py +++ b/test/test_upload.py @@ -17,7 +17,7 @@ import pytest -from .common import running_on_ci +from .common import running_on_ci, parse_json_traces # Skip this module when running in CI environments pytestmark = pytest.mark.skipif(running_on_ci(), reason="VMs have no serial ports") @@ -25,55 +25,48 @@ def test_upload(run_command, data_dir, detected_boards): # Init the environment explicitly - assert run_command("core update-index") + run_command("core update-index") for board in detected_boards: # Download core - assert run_command("core install {}".format(board.core)) + run_command(f"core install {board.core}") # Create a sketch sketch_name = "foo" sketch_path = os.path.join(data_dir, sketch_name) - assert run_command("sketch new {}".format(sketch_path)) + fqbn = board.fqbn + address = board.address + assert run_command(f"sketch new {sketch_path}") # Build sketch - assert run_command("compile -b {fqbn} {sketch_path}".format(fqbn=board.fqbn, sketch_path=sketch_path)) + assert run_command(f"compile -b {fqbn} {sketch_path}") # Upload without port must fail - result = run_command("upload -b {fqbn} {sketch_path}".format(sketch_path=sketch_path, fqbn=board.fqbn)) + result = run_command(f"upload -b {fqbn} {sketch_path}") assert result.failed # Upload - assert run_command( - "upload -b {fqbn} -p {port} {sketch_path}".format( - sketch_path=sketch_path, fqbn=board.fqbn, port=board.address - ) - ) + res = run_command(f"upload -b {fqbn} -p {address} {sketch_path}") + print(res.stderr) + assert res # multiple uploads requires some pauses time.sleep(2) # Upload using --input-dir reusing standard sketch "build" folder artifacts - assert run_command( - "upload -b {fqbn} -p {port} --input-dir {sketch_path}/build/{fqbn_path} {sketch_path}".format( - sketch_path=sketch_path, fqbn=board.fqbn, port=board.address, - fqbn_path=board.fqbn.replace(":", ".") - ) - ) + fqbn_path = fqbn.replace(":", ".") + assert run_command(f"upload -b {fqbn} -p {address} --input-dir {sketch_path}/build/{fqbn_path} {sketch_path}") # multiple uploads requires some pauses time.sleep(2) # Upload using --input-file reusing standard sketch "build" folder artifacts assert run_command( - "upload -b {fqbn} -p {port} --input-file {sketch_path}/build/{fqbn_path}/{sketch_name}.ino.bin".format( - sketch_path=sketch_path, fqbn=board.fqbn, port=board.address, sketch_name=sketch_name, - fqbn_path=board.fqbn.replace(":", ".") - ) + f"upload -b {fqbn} -p {address} --input-file {sketch_path}/build/{fqbn_path}/{sketch_name}.ino.bin" ) def test_upload_after_attach(run_command, data_dir, detected_boards): # Init the environment explicitly - assert run_command("core update-index") + run_command("core update-index") for board in detected_boards: # Download core - assert run_command("core install {}".format(board.core)) + run_command(f"core install {board.core}") # Create a sketch sketch_path = os.path.join(data_dir, "foo") assert run_command("sketch new {}".format(sketch_path)) @@ -84,3 +77,43 @@ def test_upload_after_attach(run_command, data_dir, detected_boards): assert run_command("compile {sketch_path}".format(sketch_path=sketch_path)) # Upload assert run_command("upload {sketch_path}".format(sketch_path=sketch_path)) + + +def test_compile_and_upload_combo(run_command, data_dir, detected_boards, wait_for_board): + # Init the environment explicitly + run_command("core update-index") + + # Install required core(s) + run_command("core install arduino:avr@1.8.3") + run_command("core install arduino:samd@1.8.6") + + # Create a test sketch + sketch_name = "CompileAndUploadIntegrationTest" + sketch_path = os.path.join(data_dir, sketch_name) + sketch_main_file = os.path.join(sketch_path, sketch_name + ".ino") + result = run_command("sketch new {}".format(sketch_path)) + assert result.ok + assert "Sketch created in: {}".format(sketch_path) in result.stdout + + # Build sketch for each detected board + for board in detected_boards: + log_file_name = "{fqbn}-compile.log".format(fqbn=board.fqbn.replace(":", "-")) + log_file_path = os.path.join(data_dir, log_file_name) + command_log_flags = "--log-format json --log-file {} --log-level trace".format(log_file_path) + + def run_test(s): + wait_for_board() + result = run_command(f"compile -b {board.fqbn} --upload -p {board.address} {s} {command_log_flags}") + print(result.stderr) + assert result.ok + + # check from the logs if the bin file were uploaded on the current board + log_json = open(log_file_path, "r") + traces = parse_json_traces(log_json.readlines()) + assert f"Compile {sketch_path} for {board.fqbn} started" in traces + assert f"Compile {sketch_name} for {board.fqbn} successful" in traces + assert f"Upload {sketch_path} on {board.fqbn} started" in traces + assert "Upload successful" in traces + + run_test(sketch_path) + run_test(sketch_main_file)