]> granicus.if.org Git - esp-idf/commitdiff
idf.py: Add support for action specific options
authorSergei Silnov <sergei.silnov@espressif.com>
Wed, 10 Apr 2019 16:06:52 +0000 (18:06 +0200)
committerSergei Silnov <sergei.silnov@espressif.com>
Mon, 3 Jun 2019 11:07:02 +0000 (13:07 +0200)
Changes argument parsing mechanism from argparse to a new one, that provides better support for extensions and options that are only applicable to specific subcommands,

Breaking changes:

1. All global options should go before subcommands, i.e. `idf.py build -C ~/some/project` will not work anymore, only `idf.py -C ~/some/project build` is acceptable
2. To provide multiple values to an option like `--define-cache-entry` it's necessary to repeat option many times, i.e. `idf.py -D entry1 entry2 entry3` will not work, right way is: `idf.py -D entry1 -D entry2 -D entry3`
At the moment there are 3 options like this:  `--define-cache-entry` in base list and `--test-components` and `--test-exclude-components` in the unit test extensions
3. Drops `defconfig` and `bootloader-clean` subcommands

Closes https://github.com/espressif/esp-idf/issues/3570
Closes https://github.com/espressif/esp-idf/issues/3571

examples/system/unit_test/test/CMakeLists.txt
requirements.txt
tools/ci/test_build_system_cmake.sh
tools/idf.py
tools/unit-test-app/README.md
tools/unit-test-app/idf_ext.py

index 778a2b907bb2a4dd154da3d4c70dff916b271a4a..78ee94b4f00f9f149add863302d075e6a7763f45 100644 (file)
@@ -1,4 +1,4 @@
-# This is the project CMakeLists.txt file for the test subproject 
+# This is the project CMakeLists.txt file for the test subproject
 cmake_minimum_required(VERSION 3.5)
 
 # Include the components directory of the main application:
@@ -8,7 +8,7 @@ set(EXTRA_COMPONENT_DIRS "../components")
 # Set the components to include the tests for.
 # This can be overriden from CMake cache:
 # - when invoking CMake directly: cmake -D TEST_COMPONENTS="xxxxx" ..
-# - when using idf.py: idf.py build -T xxxxx
+# - when using idf.py: idf.py -T xxxxx build
 #
 set(TEST_COMPONENTS "testable" CACHE STRING "List of components to test")
 
index 6846eac8ade034d0a41341929592c0fbd327dd6a..a43c6d4ae63873db21be1d44861ada8acfd3c561 100644 (file)
@@ -5,6 +5,7 @@ setuptools
 # The setuptools package is required to install source distributions and on some systems is not installed by default.
 # Please keep it as the first item of this list.
 #
+click>=5.0
 pyserial>=3.0
 future>=0.15.2
 cryptography>=2.1.4
index 133afe2ab80ae5d28fbbca5aecf6721324984625..3587efd9d5c6a7fe4366a5042a278f4c5d0dcb8e 100755 (executable)
@@ -348,7 +348,7 @@ function run_tests()
     echo "CONFIG_ESP32_SPIRAM_SUPPORT=y" >> sdkconfig.defaults
     echo "CONFIG_SPIRAM_CACHE_WORKAROUND=y" >> sdkconfig.defaults
     # note: we do 'reconfigure' here, as we just need to run cmake
-    idf.py -C $IDF_PATH/examples/build_system/cmake/import_lib -B `pwd`/build reconfigure -D SDKCONFIG_DEFAULTS="`pwd`/sdkconfig.defaults"
+    idf.py -C $IDF_PATH/examples/build_system/cmake/import_lib -B `pwd`/build -D SDKCONFIG_DEFAULTS="`pwd`/sdkconfig.defaults" reconfigure
     grep -q '"command"' build/compile_commands.json || failure "compile_commands.json missing or has no no 'commands' in it"
     (grep '"command"' build/compile_commands.json | grep -v mfix-esp32-psram-cache-issue) && failure "All commands in compile_commands.json should use PSRAM cache workaround"
     rm -r sdkconfig.defaults build
@@ -411,7 +411,7 @@ endmenu\n" >> ${IDF_PATH}/Kconfig;
     print_status "Check ccache is used to build when present"
     touch ccache && chmod +x ccache  # make sure that ccache is present for this test
     (export PATH=$PWD:$PATH && idf.py reconfigure | grep "ccache will be used for faster builds") || failure "ccache should be used when present"
-    (export PATH=$PWD:$PATH && idf.py reconfigure --no-ccache | grep -c "ccache will be used for faster builds" | grep -wq 0) \
+    (export PATH=$PWD:$PATH && idf.py  --no-ccache reconfigure| grep -c "ccache will be used for faster builds" | grep -wq 0) \
         || failure "ccache should not be used even when present if --no-ccache is specified"
     rm -f ccache
 
index c9a3e464eecbe64999e2ef1d88b4ba5054f63f73..7605280df8e364a21ab5db45d14e7ca006414009 100755 (executable)
@@ -7,7 +7,7 @@
 #
 #
 #
-# Copyright 2018 Espressif Systems (Shanghai) PTE LTD
+# Copyright 2019 Espressif Systems (Shanghai) PTE LTD
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # check_environment() function below. If possible, avoid importing
 # any external libraries here - put in external script, or import in
 # their specific function instead.
-import sys
-import argparse
+import codecs
+import json
+import locale
+import multiprocessing
 import os
 import os.path
-import subprocess
-import multiprocessing
 import re
 import shutil
-import json
+import subprocess
+import sys
 
 
 class FatalError(RuntimeError):
     """
     Wrapper class for runtime errors that aren't caused by bugs in idf.py or the build proces.s
     """
+
     pass
 
 
@@ -53,25 +55,29 @@ os.environ["PYTHON"] = sys.executable
 
 # Name of the program, normally 'idf.py'.
 # Can be overridden from idf.bat using IDF_PY_PROGRAM_NAME
-PROG = os.getenv('IDF_PY_PROGRAM_NAME', sys.argv[0])
+PROG = os.getenv("IDF_PY_PROGRAM_NAME", sys.argv[0])
 
 # Make flavors, across the various kinds of Windows environments & POSIX...
 if "MSYSTEM" in os.environ:  # MSYS
     MAKE_CMD = "make"
     MAKE_GENERATOR = "MSYS Makefiles"
-elif os.name == 'nt':  # other Windows
+elif os.name == "nt":  # other Windows
     MAKE_CMD = "mingw32-make"
     MAKE_GENERATOR = "MinGW Makefiles"
 else:
     MAKE_CMD = "make"
     MAKE_GENERATOR = "Unix Makefiles"
 
-GENERATORS = \
-    [
-        # ('generator name', 'build command line', 'version command line', 'verbose flag')
-        ("Ninja", ["ninja"], ["ninja", "--version"], "-v"),
-        (MAKE_GENERATOR, [MAKE_CMD, "-j", str(multiprocessing.cpu_count() + 2)], [MAKE_CMD, "--version"], "VERBOSE=1"),
-    ]
+GENERATORS = [
+    # ('generator name', 'build command line', 'version command line', 'verbose flag')
+    ("Ninja", ["ninja"], ["ninja", "--version"], "-v"),
+    (
+        MAKE_GENERATOR,
+        [MAKE_CMD, "-j", str(multiprocessing.cpu_count() + 2)],
+        [MAKE_CMD, "--version"],
+        "VERBOSE=1",
+    ),
+]
 GENERATOR_CMDS = dict((a[0], a[1]) for a in GENERATORS)
 GENERATOR_VERBOSE = dict((a[0], a[3]) for a in GENERATORS)
 
@@ -82,6 +88,7 @@ def _run_tool(tool_name, args, cwd):
         if " " in arg and not (arg.startswith('"') or arg.startswith("'")):
             return "'" + arg + "'"
         return arg
+
     display_args = " ".join(quote_arg(arg) for arg in args)
     print("Running %s in directory %s" % (tool_name, quote_arg(cwd)))
     print('Executing "%s"...' % str(display_args))
@@ -115,9 +122,11 @@ def check_environment():
     if "IDF_PATH" in os.environ:
         set_idf_path = _realpath(os.environ["IDF_PATH"])
         if set_idf_path != detected_idf_path:
-            print("WARNING: IDF_PATH environment variable is set to %s but %s path indicates IDF directory %s. "
-                  "Using the environment variable directory, but results may be unexpected..."
-                  % (set_idf_path, PROG, detected_idf_path))
+            print(
+                "WARNING: IDF_PATH environment variable is set to %s but %s path indicates IDF directory %s. "
+                "Using the environment variable directory, but results may be unexpected..."
+                % (set_idf_path, PROG, detected_idf_path)
+            )
     else:
         print("Setting IDF_PATH environment variable: %s" % detected_idf_path)
         os.environ["IDF_PATH"] = detected_idf_path
@@ -125,9 +134,15 @@ def check_environment():
     # check Python dependencies
     print("Checking Python dependencies...")
     try:
-        subprocess.check_call([os.environ["PYTHON"],
-                               os.path.join(os.environ["IDF_PATH"], "tools", "check_python_dependencies.py")],
-                              env=os.environ)
+        subprocess.check_call(
+            [
+                os.environ["PYTHON"],
+                os.path.join(
+                    os.environ["IDF_PATH"], "tools", "check_python_dependencies.py"
+                ),
+            ],
+            env=os.environ,
+        )
     except subprocess.CalledProcessError:
         raise SystemExit(1)
 
@@ -147,7 +162,10 @@ def detect_cmake_generator():
     for (generator, _, version_check, _) in GENERATORS:
         if executable_exists(version_check):
             return generator
-    raise FatalError("To use %s, either the 'ninja' or 'GNU make' build tool must be available in the PATH" % PROG)
+    raise FatalError(
+        "To use %s, either the 'ninja' or 'GNU make' build tool must be available in the PATH"
+        % PROG
+    )
 
 
 def _ensure_build_directory(args, always_run_cmake=False):
@@ -168,7 +186,9 @@ def _ensure_build_directory(args, always_run_cmake=False):
         else:
             raise FatalError("%s must be a project directory" % project_dir)
     if not os.path.exists(os.path.join(project_dir, "CMakeLists.txt")):
-        raise FatalError("CMakeLists.txt not found in project directory %s" % project_dir)
+        raise FatalError(
+            "CMakeLists.txt not found in project directory %s" % project_dir
+        )
 
     # Verify/create the build directory
     build_dir = args.build_dir
@@ -179,7 +199,13 @@ def _ensure_build_directory(args, always_run_cmake=False):
         if args.generator is None:
             args.generator = detect_cmake_generator()
         try:
-            cmake_args = ["cmake", "-G", args.generator, "-DPYTHON_DEPS_CHECKED=1", "-DESP_PLATFORM=1"]
+            cmake_args = [
+                "cmake",
+                "-G",
+                args.generator,
+                "-DPYTHON_DEPS_CHECKED=1",
+                "-DESP_PLATFORM=1",
+            ]
             if not args.no_warnings:
                 cmake_args += ["--warn-uninitialized"]
             if args.no_ccache:
@@ -203,16 +229,22 @@ def _ensure_build_directory(args, always_run_cmake=False):
     except KeyError:
         generator = detect_cmake_generator()
     if args.generator is None:
-        args.generator = generator  # reuse the previously configured generator, if none was given
+        args.generator = (
+            generator
+        )  # reuse the previously configured generator, if none was given
     if generator != args.generator:
-        raise FatalError("Build is configured for generator '%s' not '%s'. Run '%s fullclean' to start again."
-                         % (generator, args.generator, PROG))
+        raise FatalError(
+            "Build is configured for generator '%s' not '%s'. Run '%s fullclean' to start again."
+            % (generator, args.generator, PROG)
+        )
 
     try:
         home_dir = cache["CMAKE_HOME_DIRECTORY"]
         if _realpath(home_dir) != _realpath(project_dir):
-            raise FatalError("Build directory '%s' configured for project '%s' not '%s'. Run '%s fullclean' to start again."
-                             % (build_dir, _realpath(home_dir), _realpath(project_dir), PROG))
+            raise FatalError(
+                "Build directory '%s' configured for project '%s' not '%s'. Run '%s fullclean' to start again."
+                % (build_dir, _realpath(home_dir), _realpath(project_dir), PROG)
+            )
     except KeyError:
         pass  # if cmake failed part way, CMAKE_HOME_DIRECTORY may not be set yet
 
@@ -236,7 +268,7 @@ def parse_cmakecache(path):
     return result
 
 
-def build_target(target_name, args):
+def build_target(target_name, ctx, args):
     """
     Execute the target build system to build target 'target_name'
 
@@ -262,7 +294,9 @@ def build_target(target_name, args):
 
 
 def _get_esptool_args(args):
-    esptool_path = os.path.join(os.environ["IDF_PATH"], "components/esptool_py/esptool/esptool.py")
+    esptool_path = os.path.join(
+        os.environ["IDF_PATH"], "components/esptool_py/esptool/esptool.py"
+    )
     if args.port is None:
         args.port = get_default_serial_port()
     result = [PYTHON, esptool_path]
@@ -277,28 +311,30 @@ def _get_esptool_args(args):
     return result
 
 
-def flash(action, args):
+def flash(action, ctx, args):
     """
     Run esptool to flash the entire project, from an argfile generated by the build system
     """
     flasher_args_path = {  # action -> name of flasher args file generated by build system
-        "bootloader-flash":      "flash_bootloader_args",
+        "bootloader-flash": "flash_bootloader_args",
         "partition_table-flash": "flash_partition_table_args",
-        "app-flash":             "flash_app_args",
-        "flash":                 "flash_project_args",
-    }[action]
+        "app-flash": "flash_app_args",
+        "flash": "flash_project_args",
+    }[
+        action
+    ]
     esptool_args = _get_esptool_args(args)
     esptool_args += ["write_flash", "@" + flasher_args_path]
     _run_tool("esptool.py", esptool_args, args.build_dir)
 
 
-def erase_flash(action, args):
+def erase_flash(action, ctx, args):
     esptool_args = _get_esptool_args(args)
     esptool_args += ["erase_flash"]
     _run_tool("esptool.py", esptool_args, args.build_dir)
 
 
-def monitor(action, args):
+def monitor(action, ctx, args):
     """
     Run idf_monitor.py to watch build output
     """
@@ -312,9 +348,11 @@ def monitor(action, args):
 
     elf_file = os.path.join(args.build_dir, project_desc["app_elf"])
     if not os.path.exists(elf_file):
-        raise FatalError("ELF file '%s' not found. You need to build & flash the project before running 'monitor', "
-                         "and the binary on the device must match the one in the build directory exactly. "
-                         "Try '%s flash monitor'." % (elf_file, PROG))
+        raise FatalError(
+            "ELF file '%s' not found. You need to build & flash the project before running 'monitor', "
+            "and the binary on the device must match the one in the build directory exactly. "
+            "Try '%s flash monitor'." % (elf_file, PROG)
+        )
     idf_monitor = os.path.join(os.environ["IDF_PATH"], "tools/idf_monitor.py")
     monitor_args = [PYTHON, idf_monitor]
     if args.port is not None:
@@ -322,7 +360,7 @@ def monitor(action, args):
     monitor_args += ["-b", project_desc["monitor_baud"]]
     monitor_args += [elf_file]
 
-    idf_py = [PYTHON] + get_commandline_options()  # commands to re-run idf.py
+    idf_py = [PYTHON] + get_commandline_options(ctx)  # commands to re-run idf.py
     monitor_args += ["-m", " ".join("'%s'" % a for a in idf_py)]
 
     if "MSYSTEM" in os.environ:
@@ -330,14 +368,14 @@ def monitor(action, args):
     _run_tool("idf_monitor", monitor_args, args.project_dir)
 
 
-def clean(action, args):
+def clean(action, ctx, args):
     if not os.path.isdir(args.build_dir):
         print("Build directory '%s' not found. Nothing to clean." % args.build_dir)
         return
-    build_target("clean", args)
+    build_target("clean", ctx, args)
 
 
-def reconfigure(action, args):
+def reconfigure(action, ctx, args):
     _ensure_build_directory(args, True)
 
 
@@ -346,13 +384,14 @@ def _delete_windows_symlinks(directory):
     It deletes symlinks recursively on Windows. It is useful for Python 2 which doesn't detect symlinks on Windows.
     """
     deleted_paths = []
-    if os.name == 'nt':
+    if os.name == "nt":
         import ctypes
-        for root, dirnames, filenames in os.walk(directory):
+
+        for root, dirnames, _filenames in os.walk(directory):
             for d in dirnames:
                 full_path = os.path.join(root, d)
                 try:
-                    full_path = full_path.decode('utf-8')
+                    full_path = full_path.decode("utf-8")
                 except Exception:
                     pass
                 if ctypes.windll.kernel32.GetFileAttributesW(full_path) & 0x0400:
@@ -361,7 +400,7 @@ def _delete_windows_symlinks(directory):
     return deleted_paths
 
 
-def fullclean(action, args):
+def fullclean(action, ctx, args):
     build_dir = args.build_dir
     if not os.path.isdir(build_dir):
         print("Build directory '%s' not found. Nothing to clean." % build_dir)
@@ -371,24 +410,35 @@ def fullclean(action, args):
         return
 
     if not os.path.exists(os.path.join(build_dir, "CMakeCache.txt")):
-        raise FatalError("Directory '%s' doesn't seem to be a CMake build directory. Refusing to automatically "
-                         "delete files in this directory. Delete the directory manually to 'clean' it." % build_dir)
+        raise FatalError(
+            "Directory '%s' doesn't seem to be a CMake build directory. Refusing to automatically "
+            "delete files in this directory. Delete the directory manually to 'clean' it."
+            % build_dir
+        )
     red_flags = ["CMakeLists.txt", ".git", ".svn"]
     for red in red_flags:
         red = os.path.join(build_dir, red)
         if os.path.exists(red):
-            raise FatalError("Refusing to automatically delete files in directory containing '%s'. Delete files manually if you're sure." % red)
+            raise FatalError(
+                "Refusing to automatically delete files in directory containing '%s'. Delete files manually if you're sure."
+                % red
+            )
     # OK, delete everything in the build directory...
     # Note: Python 2.7 doesn't detect symlinks on Windows (it is supported form 3.2). Tools promising to not
     # follow symlinks will actually follow them. Deleting the build directory with symlinks deletes also items
     # outside of this directory.
     deleted_symlinks = _delete_windows_symlinks(build_dir)
     if args.verbose and len(deleted_symlinks) > 1:
-        print('The following symlinks were identified and removed:\n%s' % "\n".join(deleted_symlinks))
-    for f in os.listdir(build_dir):  # TODO: once we are Python 3 only, this can be os.scandir()
+        print(
+            "The following symlinks were identified and removed:\n%s"
+            % "\n".join(deleted_symlinks)
+        )
+    for f in os.listdir(
+        build_dir
+    ):  # TODO: once we are Python 3 only, this can be os.scandir()
         f = os.path.join(build_dir, f)
         if args.verbose:
-            print('Removing: %s' % f)
+            print("Removing: %s" % f)
         if os.path.isdir(f):
             shutil.rmtree(f)
         else:
@@ -406,96 +456,17 @@ def _safe_relpath(path, start=None):
         return os.path.abspath(path)
 
 
-def print_closing_message(args):
-    # print a closing message of some kind
-    #
-    if "flash" in str(args.actions):
-        print("Done")
-        return
-
-    # Otherwise, if we built any binaries print a message about
-    # how to flash them
-    def print_flashing_message(title, key):
-        print("\n%s build complete. To flash, run this command:" % title)
-
-        with open(os.path.join(args.build_dir, "flasher_args.json")) as f:
-            flasher_args = json.load(f)
-
-        def flasher_path(f):
-            return _safe_relpath(os.path.join(args.build_dir, f))
-
-        if key != "project":  # flashing a single item
-            cmd = ""
-            if key == "bootloader":  # bootloader needs --flash-mode, etc to be passed in
-                cmd = " ".join(flasher_args["write_flash_args"]) + " "
-
-            cmd += flasher_args[key]["offset"] + " "
-            cmd += flasher_path(flasher_args[key]["file"])
-        else:  # flashing the whole project
-            cmd = " ".join(flasher_args["write_flash_args"]) + " "
-            flash_items = sorted(((o,f) for (o,f) in flasher_args["flash_files"].items() if len(o) > 0),
-                                 key=lambda x: int(x[0], 0))
-            for o,f in flash_items:
-                cmd += o + " " + flasher_path(f) + " "
-
-        print("%s -p %s -b %s --after %s write_flash %s" % (
-            _safe_relpath("%s/components/esptool_py/esptool/esptool.py" % os.environ["IDF_PATH"]),
-            args.port or "(PORT)",
-            args.baud,
-            flasher_args["extra_esptool_args"]["after"],
-            cmd.strip()))
-        print("or run '%s -p %s %s'" % (PROG, args.port or "(PORT)", key + "-flash" if key != "project" else "flash",))
-
-    if "all" in args.actions or "build" in args.actions:
-        print_flashing_message("Project", "project")
-    else:
-        if "app" in args.actions:
-            print_flashing_message("App", "app")
-        if "partition_table" in args.actions:
-            print_flashing_message("Partition Table", "partition_table")
-        if "bootloader" in args.actions:
-            print_flashing_message("Bootloader", "bootloader")
-
-
-ACTIONS = {
-    # action name : ( function (or alias), dependencies, order-only dependencies )
-    "all":                   (build_target, [], ["reconfigure", "menuconfig", "clean", "fullclean"]),
-    "build":                 ("all",        [], []),  # build is same as 'all' target
-    "clean":                 (clean,        [], ["fullclean"]),
-    "fullclean":             (fullclean,    [], []),
-    "reconfigure":           (reconfigure,  [], ["menuconfig"]),
-    "menuconfig":            (build_target, [], []),
-    "defconfig":             (build_target, [], []),
-    "confserver":            (build_target, [], []),
-    "size":                  (build_target, ["app"], []),
-    "size-components":       (build_target, ["app"], []),
-    "size-files":            (build_target, ["app"], []),
-    "bootloader":            (build_target, [], []),
-    "bootloader-clean":      (build_target, [], []),
-    "bootloader-flash":      (flash,        ["bootloader"], ["erase_flash"]),
-    "app":                   (build_target, [], ["clean", "fullclean", "reconfigure"]),
-    "app-flash":             (flash,        ["app"], ["erase_flash"]),
-    "efuse_common_table":    (build_target, [], ["reconfigure"]),
-    "efuse_custom_table":    (build_target, [], ["reconfigure"]),
-    "show_efuse_table":      (build_target, [], ["reconfigure"]),
-    "partition_table":       (build_target, [], ["reconfigure"]),
-    "partition_table-flash": (flash,        ["partition_table"], ["erase_flash"]),
-    "flash":                 (flash,        ["all"], ["erase_flash"]),
-    "erase_flash":           (erase_flash,  [], []),
-    "monitor":               (monitor,      [], ["flash", "partition_table-flash", "bootloader-flash", "app-flash"]),
-    "erase_otadata":         (build_target, [], []),
-    "read_otadata":          (build_target, [], []),
-}
-
-
-def get_commandline_options():
-    """ Return all the command line options up to but not including the action """
+def get_commandline_options(ctx):
+    """ Return all the command line options up to first action """
+    # This approach ignores argument parsing done Click
     result = []
-    for a in sys.argv:
-        if a in ACTIONS.keys():
+
+    for arg in sys.argv:
+        if arg in ctx.command.commands_with_aliases:
             break
-        else:
-            result.append(a)
+
+        result.append(arg)
+
     return result
 
 
@@ -508,107 +479,602 @@ def get_default_serial_port():
     # Import is done here in order to move it after the check_environment() ensured that pyserial has been installed
     import serial.tools.list_ports
 
-    ports = list(reversed(sorted(
-        p.device for p in serial.tools.list_ports.comports())))
+    ports = list(reversed(sorted(p.device for p in serial.tools.list_ports.comports())))
     try:
-        print("Choosing default port %s (use '-p PORT' option to set a specific serial port)" % ports[0].encode('ascii', 'ignore'))
+        print(
+            "Choosing default port %s (use '-p PORT' option to set a specific serial port)"
+            % ports[0].encode("ascii", "ignore")
+        )
         return ports[0]
     except IndexError:
-        raise RuntimeError("No serial ports found. Connect a device, or use '-p PORT' option to set a specific port.")
-
-
-# Import the actions, arguments extension file
-if os.path.exists(os.path.join(os.getcwd(), "idf_ext.py")):
-    sys.path.append(os.getcwd())
-    try:
-        from idf_ext import add_action_extensions, add_argument_extensions
-    except ImportError:
-        print("Error importing extension file idf_ext.py. Skipping.")
-        print("Please make sure that it contains implementations (even if they're empty implementations) of")
-        print("add_action_extensions and add_argument_extensions.")
-
-
-def main():
-    if sys.version_info[0] != 2 or sys.version_info[1] != 7:
-        print("Note: You are using Python %d.%d.%d. Python 3 support is new, please report any problems "
-              "you encounter. Search for 'Setting the Python Interpreter' in the ESP-IDF docs if you want to use "
-              "Python 2.7." % sys.version_info[:3])
+        raise RuntimeError(
+            "No serial ports found. Connect a device, or use '-p PORT' option to set a specific port."
+        )
+
+
+class PropertyDict(dict):
+    def __init__(self, *args, **kwargs):
+        super(PropertyDict, self).__init__(*args, **kwargs)
+        self.__dict__ = self
+
+
+def init_cli():
+    # Click is imported here to run it after check_environment()
+    import click
+
+    class Task(object):
+        def __init__(
+            self, callback, name, aliases, dependencies, order_dependencies, action_args
+        ):
+            self.callback = callback
+            self.name = name
+            self.dependencies = dependencies
+            self.order_dependencies = order_dependencies
+            self.action_args = action_args
+            self.aliases = aliases
+
+        def run(self, context, global_args):
+            self.callback(self.name, context, global_args, **self.action_args)
+
+    class Action(click.Command):
+        def __init__(
+            self,
+            name=None,
+            aliases=None,
+            dependencies=None,
+            order_dependencies=None,
+            **kwargs
+        ):
+            super(Action, self).__init__(name, **kwargs)
+
+            self.name = self.name or self.callback.__name__
+
+            if aliases is None:
+                aliases = []
+            self.aliases = aliases
+
+            self.help = self.help or self.callback.__doc__
+            if self.help is None:
+                self.help = ""
+
+            if dependencies is None:
+                dependencies = []
+
+            if order_dependencies is None:
+                order_dependencies = []
+
+            # Show first line of help if short help is missing
+            self.short_help = self.short_help or self.help.split("\n")[0]
+
+            # Add aliases to help string
+            if aliases:
+                aliases_help = "Aliases: %s." % ", ".join(aliases)
+
+                self.help = "\n".join([self.help, aliases_help])
+                self.short_help = " ".join([aliases_help, self.short_help])
+
+            if self.callback is not None:
+                callback = self.callback
+
+                def wrapped_callback(**action_args):
+                    return Task(
+                        callback=callback,
+                        name=self.name,
+                        dependencies=dependencies,
+                        order_dependencies=order_dependencies,
+                        action_args=action_args,
+                        aliases=self.aliases,
+                    )
+
+                self.callback = wrapped_callback
+
+    class CLI(click.MultiCommand):
+        """Action list contains all actions with options available for CLI"""
+
+        def __init__(self, action_lists=None, help=None):
+            super(CLI, self).__init__(
+                chain=True,
+                invoke_without_command=True,
+                result_callback=self.execute_tasks,
+                context_settings={"max_content_width": 140},
+                help=help,
+            )
+            self._actions = {}
+            self.global_action_callbacks = []
+            self.commands_with_aliases = {}
+
+            if action_lists is None:
+                action_lists = []
+
+            for action_list in action_lists:
+                # Global options
+                for option_args in action_list.get("global_options", []):
+                    option_args["param_decls"] = option_args.pop("names")
+                    self.params.append(click.Option(**option_args))
+
+                # Global options validators
+                self.global_action_callbacks.extend(
+                    action_list.get("global_action_callbacks", [])
+                )
+
+                # Actions
+                for name, action in action_list.get("actions", {}).items():
+                    options = action.pop("options", [])
+
+                    if options is None:
+                        options = []
+
+                    self._actions[name] = Action(name=name, **action)
+                    for alias in [name] + action.get("aliases", []):
+                        self.commands_with_aliases[alias] = name
+
+                    for option_args in options:
+                        option_args["param_decls"] = option_args.pop("names")
+                        self._actions[name].params.append(click.Option(**option_args))
+
+        def list_commands(self, ctx):
+            return sorted(self._actions)
+
+        def get_command(self, ctx, name):
+            return self._actions.get(self.commands_with_aliases.get(name))
+
+        def _print_closing_message(self, args, actions):
+            # print a closing message of some kind
+            #
+            if "flash" in str(actions):
+                print("Done")
+                return
+
+            # Otherwise, if we built any binaries print a message about
+            # how to flash them
+            def print_flashing_message(title, key):
+                print("\n%s build complete. To flash, run this command:" % title)
+
+                with open(os.path.join(args.build_dir, "flasher_args.json")) as f:
+                    flasher_args = json.load(f)
+
+                def flasher_path(f):
+                    return _safe_relpath(os.path.join(args.build_dir, f))
+
+                if key != "project":  # flashing a single item
+                    cmd = ""
+                    if (
+                        key == "bootloader"
+                    ):  # bootloader needs --flash-mode, etc to be passed in
+                        cmd = " ".join(flasher_args["write_flash_args"]) + " "
+
+                    cmd += flasher_args[key]["offset"] + " "
+                    cmd += flasher_path(flasher_args[key]["file"])
+                else:  # flashing the whole project
+                    cmd = " ".join(flasher_args["write_flash_args"]) + " "
+                    flash_items = sorted(
+                        (
+                            (o, f)
+                            for (o, f) in flasher_args["flash_files"].items()
+                            if len(o) > 0
+                        ),
+                        key=lambda x: int(x[0], 0),
+                    )
+                    for o, f in flash_items:
+                        cmd += o + " " + flasher_path(f) + " "
+
+                print(
+                    "%s -p %s -b %s --after %s write_flash %s"
+                    % (
+                        _safe_relpath(
+                            "%s/components/esptool_py/esptool/esptool.py"
+                            % os.environ["IDF_PATH"]
+                        ),
+                        args.port or "(PORT)",
+                        args.baud,
+                        flasher_args["extra_esptool_args"]["after"],
+                        cmd.strip(),
+                    )
+                )
+                print(
+                    "or run 'idf.py -p %s %s'"
+                    % (
+                        args.port or "(PORT)",
+                        key + "-flash" if key != "project" else "flash",
+                    )
+                )
+
+            if "all" in actions or "build" in actions:
+                print_flashing_message("Project", "project")
+            else:
+                if "app" in actions:
+                    print_flashing_message("App", "app")
+                if "partition_table" in actions:
+                    print_flashing_message("Partition Table", "partition_table")
+                if "bootloader" in actions:
+                    print_flashing_message("Bootloader", "bootloader")
+
+        def execute_tasks(self, tasks, **kwargs):
+            ctx = click.get_current_context()
+
+            # Validate global arguments
+            global_args = PropertyDict(ctx.params)
+
+            for action_callback in ctx.command.global_action_callbacks:
+                action_callback(ctx, global_args, tasks)
+
+            # very simple dependency management
+            completed_tasks = set()
+
+            if not tasks:
+                print(ctx.get_help())
+                ctx.exit()
+
+            while tasks:
+                task = tasks[0]
+                tasks_dict = dict([(t.name, t) for t in tasks])
+
+                name_with_aliases = task.name
+                if task.aliases:
+                    name_with_aliases += " (aliases: %s)" % ", ".join(task.aliases)
+
+                ready_to_run = True
+                for dep in task.dependencies:
+                    if dep not in completed_tasks:
+                        print(
+                            'Adding %s\'s dependency "%s" to list of actions'
+                            % (task.name, dep)
+                        )
+                        dep_task = ctx.invoke(ctx.command.get_command(ctx, dep))
+                        tasks.insert(0, dep_task)
+                        ready_to_run = False
+
+                for dep in task.order_dependencies:
+                    if dep in tasks_dict.keys() and dep not in completed_tasks:
+                        tasks.insert(0, tasks.pop(tasks.index(tasks_dict[dep])))
+                        ready_to_run = False
+
+                if ready_to_run:
+                    tasks.pop(0)
+
+                    if task.name in completed_tasks:
+                        print(
+                            "Skipping action that is already done: %s"
+                            % name_with_aliases
+                        )
+                    else:
+                        print("Executing action: %s" % name_with_aliases)
+                        task.run(ctx, global_args)
+
+                    completed_tasks.add(task.name)
+
+            self._print_closing_message(global_args, completed_tasks)
+
+        @staticmethod
+        def merge_action_lists(*action_lists):
+            merged_actions = {
+                "global_options": [],
+                "actions": {},
+                "global_action_callbacks": [],
+            }
+            for action_list in action_lists:
+                merged_actions["global_options"].extend(
+                    action_list.get("global_options", [])
+                )
+                merged_actions["actions"].update(action_list.get("actions", {}))
+                merged_actions["global_action_callbacks"].extend(
+                    action_list.get("global_action_callbacks", [])
+                )
+            return merged_actions
+
+    # That's a tiny parser that parse project-dir even before constructing
+    # fully featured click parser to be sure that extensions are loaded from the right place
+    @click.command(
+        add_help_option=False,
+        context_settings={"allow_extra_args": True, "ignore_unknown_options": True},
+    )
+    @click.option("-C", "--project-dir", default=os.getcwd())
+    def parse_project_dir(project_dir):
+        return _realpath(project_dir)
+
+    project_dir = parse_project_dir(standalone_mode=False)
+
+    # Load base idf commands
+    def validate_root_options(ctx, args, tasks):
+        args.project_dir = _realpath(args.project_dir)
+        if args.build_dir is not None and args.project_dir == _realpath(args.build_dir):
+            raise FatalError(
+                "Setting the build directory to the project directory is not supported. Suggest dropping "
+                "--build-dir option, the default is a 'build' subdirectory inside the project directory."
+            )
+        if args.build_dir is None:
+            args.build_dir = os.path.join(args.project_dir, "build")
+        args.build_dir = _realpath(args.build_dir)
+
+    # Possible keys for action dict are: global_options, actions and global_action_callbacks
+    root_options = {
+        "global_options": [
+            {
+                "names": ["-C", "--project-dir"],
+                "help": "Project directory",
+                "type": click.Path(),
+                "default": os.getcwd(),
+            },
+            {
+                "names": ["-B", "--build-dir"],
+                "help": "Build directory",
+                "type": click.Path(),
+                "default": None,
+            },
+            {
+                "names": ["-n", "--no-warnings"],
+                "help": "Disable Cmake warnings",
+                "is_flag": True,
+                "default": False,
+            },
+            {
+                "names": ["-v", "--verbose"],
+                "help": "Verbose build output",
+                "is_flag": True,
+                "default": False,
+            },
+            {
+                "names": ["-D", "--define-cache-entry"],
+                "help": "Create a cmake cache entry",
+                "multiple": True,
+            },
+            {
+                "names": ["--no-ccache"],
+                "help": "Disable ccache. Otherwise, if ccache is available on the PATH then it will be used for faster builds.",
+                "is_flag": True,
+                "default": False,
+            },
+            {
+                "names": ["-G", "--generator"],
+                "help": "CMake generator",
+                "type": click.Choice(GENERATOR_CMDS.keys()),
+            },
+        ],
+        "global_action_callbacks": [validate_root_options],
+    }
+
+    build_actions = {
+        "actions": {
+            "all": {
+                "aliases": ["build"],
+                "callback": build_target,
+                "short_help": "Build the project.",
+                "help": "Build the project. This can involve multiple steps:\n\n"
+                + "1. Create the build directory if needed. The sub-directory 'build' is used to hold build output, "
+                + "although this can be changed with the -B option.\n\n"
+                + "2. Run CMake as necessary to configure the project and generate build files for the main build tool.\n\n"
+                + "3. Run the main build tool (Ninja or GNU Make). By default, the build tool is automatically detected "
+                + "but it can be explicitly set by passing the -G option to idf.py.\n\n",
+                "order_dependencies": [
+                    "reconfigure",
+                    "menuconfig",
+                    "clean",
+                    "fullclean",
+                ],
+            },
+            "menuconfig": {
+                "callback": build_target,
+                "help": 'Run "menuconfig" project configuration tool.',
+            },
+            "confserver": {
+                "callback": build_target,
+                "help": "Run JSON configuration server.",
+            },
+            "size": {
+                "callback": build_target,
+                "help": "Print basic size information about the app.",
+                "dependencies": ["app"],
+            },
+            "size-components": {
+                "callback": build_target,
+                "help": "Print per-component size information.",
+                "dependencies": ["app"],
+            },
+            "size-files": {
+                "callback": build_target,
+                "help": "Print per-source-file size information.",
+                "dependencies": ["app"],
+            },
+            "bootloader": {"callback": build_target, "help": "Build only bootloader."},
+            "app": {
+                "callback": build_target,
+                "help": "Build only the app.",
+                "order_dependencies": ["clean", "fullclean", "reconfigure"],
+            },
+            "efuse_common_table": {
+                "callback": build_target,
+                "help": "Genereate C-source for IDF's eFuse fields.",
+                "order_dependencies": ["reconfigure"],
+            },
+            "efuse_custom_table": {
+                "callback": build_target,
+                "help": "Genereate C-source for user's eFuse fields.",
+                "order_dependencies": ["reconfigure"],
+            },
+            "show_efuse_table": {
+                "callback": build_target,
+                "help": "Print eFuse table",
+                "order_dependencies": ["reconfigure"],
+            },
+            "partition_table": {
+                "callback": build_target,
+                "help": "Build only partition table.",
+                "order_dependencies": ["reconfigure"],
+            },
+            "erase_otadata": {
+                "callback": build_target,
+                "help": "Erase otadata partition.",
+            },
+            "read_otadata": {
+                "callback": build_target,
+                "help": "Read otadata partition.",
+            },
+        }
+    }
+
+    clean_actions = {
+        "actions": {
+            "reconfigure": {
+                "callback": reconfigure,
+                "short_help": "Re-run CMake.",
+                "help": "Re-run CMake even if it doesn't seem to need re-running. This isn't necessary during normal usage, "
+                + "but can be useful after adding/removing files from the source tree, or when modifying CMake cache variables. "
+                + "For example, \"idf.py -DNAME='VALUE' reconfigure\" "
+                + 'can be used to set variable "NAME" in CMake cache to value "VALUE".',
+                "order_dependencies": ["menuconfig"],
+            },
+            "clean": {
+                "callback": clean,
+                "short_help": "Delete build output files from the build directory.",
+                "help": "Delete build output files from the build directory , forcing a 'full rebuild' the next time "
+                + "the project is built. Cleaning doesn't delete CMake configuration output and some other files",
+                "order_dependencies": ["fullclean"],
+            },
+            "fullclean": {
+                "callback": fullclean,
+                "short_help": "Delete the entire build directory contents.",
+                "help": "Delete the entire build directory contents. This includes all CMake configuration output."
+                + "The next time the project is built, CMake will configure it from scratch. "
+                + "Note that this option recursively deletes all files in the build directory, so use with care."
+                + "Project configuration is not deleted.",
+            },
+        }
+    }
+
+    serial_actions = {
+        "global_options": [
+            {
+                "names": ["-p", "--port"],
+                "help": "Serial port",
+                "envvar": "ESPPORT",
+                "default": None,
+            },
+            {
+                "names": ["-b", "--baud"],
+                "help": "Baud rate",
+                "envvar": "ESPBAUD",
+                "default": 460800,
+            },
+        ],
+        "actions": {
+            "flash": {
+                "callback": flash,
+                "help": "Flash the project",
+                "dependencies": ["all"],
+                "order_dependencies": ["erase_flash"],
+            },
+            "erase_flash": {
+                "callback": erase_flash,
+                "help": "Erase entire flash chip.",
+            },
+            "monitor": {
+                "callback": monitor,
+                "help": "Display serial output.",
+                "order_dependencies": [
+                    "flash",
+                    "partition_table-flash",
+                    "bootloader-flash",
+                    "app-flash",
+                ],
+            },
+            "partition_table-flash": {
+                "callback": flash,
+                "help": "Flash partition table only.",
+                "dependencies": ["partition_table"],
+                "order_dependencies": ["erase_flash"],
+            },
+            "bootloader-flash": {
+                "callback": flash,
+                "help": "Flash bootloader only.",
+                "dependencies": ["bootloader"],
+                "order_dependencies": ["erase_flash"],
+            },
+            "app-flash": {
+                "callback": flash,
+                "help": "Flash the app only.",
+                "dependencies": ["app"],
+                "order_dependencies": ["erase_flash"],
+            },
+        },
+    }
+
+    base_actions = CLI.merge_action_lists(
+        root_options, build_actions, clean_actions, serial_actions
+    )
+    all_actions = [base_actions]
+
+    # Load extensions
+    if os.path.exists(os.path.join(project_dir, "idf_ext.py")):
+        sys.path.append(project_dir)
+        try:
+            from idf_ext import action_extensions
+        except ImportError:
+            print("Error importing extension file idf_ext.py. Skipping.")
+            print(
+                "Please make sure that it contains implementation (even if it's empty) of add_action_extensions"
+            )
 
     # Add actions extensions
-    try:
-        add_action_extensions({
-            "build_target": build_target,
-            "reconfigure": reconfigure,
-            "flash": flash,
-            "monitor": monitor,
-            "clean": clean,
-            "fullclean": fullclean
-        }, ACTIONS)
-    except NameError:
-        pass
 
-    parser = argparse.ArgumentParser(prog=PROG, description='ESP-IDF build management tool')
-    parser.add_argument('-p', '--port', help="Serial port",
-                        default=os.environ.get('ESPPORT', None))
-    parser.add_argument('-b', '--baud', help="Baud rate",
-                        default=os.environ.get('ESPBAUD', 460800))
-    parser.add_argument('-C', '--project-dir', help="Project directory", default=os.getcwd())
-    parser.add_argument('-B', '--build-dir', help="Build directory", default=None)
-    parser.add_argument('-G', '--generator', help="Cmake generator", choices=GENERATOR_CMDS.keys())
-    parser.add_argument('-n', '--no-warnings', help="Disable Cmake warnings", action="store_true")
-    parser.add_argument('-v', '--verbose', help="Verbose build output", action="store_true")
-    parser.add_argument('-D', '--define-cache-entry', help="Create a cmake cache entry", nargs='+')
-    parser.add_argument('--no-ccache', help="Disable ccache. Otherwise, if ccache is available on the PATH then it will be used for faster builds.",
-                        action="store_true")
-    parser.add_argument('actions', help="Actions (build targets or other operations)", nargs='+',
-                        choices=ACTIONS.keys())
-
-    # Add arguments extensions
     try:
-        add_argument_extensions(parser)
+        all_actions.append(action_extensions(base_actions, project_dir))
     except NameError:
         pass
 
-    args = parser.parse_args()
+    return CLI(help="ESP-IDF build management", action_lists=all_actions)
+
 
+def main():
     check_environment()
+    cli = init_cli()
+    cli(prog_name=PROG)
 
-    # Advanced parameter checks
-    if args.build_dir is not None and _realpath(args.project_dir) == _realpath(args.build_dir):
-        raise FatalError("Setting the build directory to the project directory is not supported. Suggest dropping "
-                         "--build-dir option, the default is a 'build' subdirectory inside the project directory.")
-    if args.build_dir is None:
-        args.build_dir = os.path.join(args.project_dir, "build")
-    args.build_dir = _realpath(args.build_dir)
-
-    completed_actions = set()
-
-    def execute_action(action, remaining_actions):
-        (function, dependencies, order_dependencies) = ACTIONS[action]
-        # very simple dependency management, build a set of completed actions and make sure
-        # all dependencies are in it
-        for dep in dependencies:
-            if dep not in completed_actions:
-                execute_action(dep, remaining_actions)
-        for dep in order_dependencies:
-            if dep in remaining_actions and dep not in completed_actions:
-                execute_action(dep, remaining_actions)
-
-        if action in completed_actions:
-            pass  # we've already done this, don't do it twice...
-        elif function in ACTIONS:  # alias of another action
-            execute_action(function, remaining_actions)
-        else:
-            function(action, args)
 
-        completed_actions.add(action)
+def _valid_unicode_config():
+    # Python 2 is always good
+    if sys.version_info[0] == 2:
+        return True
+
+    # With python 3 unicode environment is required
+    try:
+        return codecs.lookup(locale.getpreferredencoding()).name != "ascii"
+    except Exception:
+        return False
 
-    actions = list(args.actions)
-    while len(actions) > 0:
-        execute_action(actions[0], actions[1:])
-        actions.pop(0)
 
-    print_closing_message(args)
+def _find_usable_locale():
+    try:
+        locales = subprocess.Popen(
+            ["locale", "-a"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
+        ).communicate()[0]
+    except OSError:
+        locales = ""
+    if isinstance(locales, bytes):
+        locales = locales.decode("ascii", "replace")
+
+    usable_locales = []
+    for line in locales.splitlines():
+        locale = line.strip()
+        locale_name = locale.lower().replace("-", "")
+
+        # C.UTF-8 is the best option, if supported
+        if locale_name == "c.utf8":
+            return locale
+
+        if locale_name.endswith(".utf8"):
+            # Make a preference of english locales
+            if locale.startswith("en_"):
+                usable_locales.insert(0, locale)
+            else:
+                usable_locales.append(locale)
+
+    if not usable_locales:
+        FatalError(
+            "Support for Unicode filenames is required, but no suitable UTF-8 locale was found on your system."
+            " Please refer to the manual for your operating system for details on locale reconfiguration."
+        )
+
+    return usable_locales[0]
 
 
 if __name__ == "__main__":
@@ -617,16 +1083,37 @@ if __name__ == "__main__":
         # keyboard interrupt (CTRL+C).
         # Using an own global variable for indicating that we are running with "winpty" seems to be the most suitable
         # option as os.environment['_'] contains "winpty" only when it is run manually from console.
-        WINPTY_VAR = 'WINPTY'
-        WINPTY_EXE = 'winpty'
-        if ('MSYSTEM' in os.environ) and (not os.environ['_'].endswith(WINPTY_EXE) and WINPTY_VAR not in os.environ):
-            os.environ[WINPTY_VAR] = '1'    # the value is of no interest to us
+        WINPTY_VAR = "WINPTY"
+        WINPTY_EXE = "winpty"
+        if ("MSYSTEM" in os.environ) and (
+            not os.environ["_"].endswith(WINPTY_EXE) and WINPTY_VAR not in os.environ
+        ):
+            os.environ[WINPTY_VAR] = "1"  # the value is of no interest to us
             # idf.py calls itself with "winpty" and WINPTY global variable set
-            ret = subprocess.call([WINPTY_EXE, sys.executable] + sys.argv, env=os.environ)
+            ret = subprocess.call(
+                [WINPTY_EXE, sys.executable] + sys.argv, env=os.environ
+            )
             if ret:
                 raise SystemExit(ret)
+
+        elif os.name == "posix" and not _valid_unicode_config():
+            # Trying to find best utf-8 locale available on the system and restart python with it
+            best_locale = _find_usable_locale()
+
+            print(
+                "Your environment is not configured to handle unicode filenames outside of ASCII range."
+                " Environment variable LC_ALL is temporary set to %s for unicode support."
+                % best_locale
+            )
+
+            os.environ["LC_ALL"] = best_locale
+            ret = subprocess.call([sys.executable] + sys.argv, env=os.environ)
+            if ret:
+                raise SystemExit(ret)
+
         else:
             main()
+
     except FatalError as e:
         print(e)
         sys.exit(2)
index b5539b3b9887f473124fa5363ace70774cbb49be..23ce65289b67d16818b7674d85394acbe42511eb 100644 (file)
@@ -20,9 +20,9 @@ ESP-IDF unit tests are run using Unit Test App. The app can be built with the un
 * Set IDF_PATH environment variable to point to the path to the esp-idf top-level directory.
 * Change into `tools/unit-test-app` directory
 * `idf.py menuconfig` to configure the Unit Test App.
-* `idf.py build -T <component> <component> ...` with `component` set to names of the components to be included in the test app. Or `idf.py build -T all` to build the test app with all the tests for components having `test` subdirectory.
-* Follow the printed instructions to flash, or run `idf.py flash -p PORT`.
-* Unit test have a few preset sdkconfigs. It provides command `idf.py ut-clean-config_name` and `idf.py ut-build-config_name` (where `config_name` is the file name under `unit-test-app/configs` folder) to build with preset configs. For example, you can use `idf.py ut-build-default -T all` to build with config file `unit-test-app/configs/default`. Built binary for this config will be copied to `unit-test-app/output/config_name` folder.
+* `idf.py -T <component> -T <component> ... build` with `component` set to names of the components to be included in the test app. Or `idf.py -T all build` to build the test app with all the tests for components having `test` subdirectory.
+* Follow the printed instructions to flash, or run `idf.py -p PORT flash`.
+* Unit test have a few preset sdkconfigs. It provides command `idf.py ut-clean-config_name` and `idf.py ut-build-config_name` (where `config_name` is the file name under `unit-test-app/configs` folder) to build with preset configs. For example, you can use `idf.py -T all ut-build-default` to build with config file `unit-test-app/configs/default`. Built binary for this config will be copied to `unit-test-app/output/config_name` folder.
 
 # Flash Size
 
index 06b843f0108fe7f28fcf238dcaf7f7bd562fba4e..d2e11d88a0695411166f7b34479e0e3a9987e05e 100644 (file)
@@ -1,96 +1,40 @@
+import copy
 import glob
-import tempfile
 import os
 import os.path
 import re
 import shutil
-import argparse
-import copy
-
-PROJECT_NAME = "unit-test-app"
-PROJECT_PATH = os.getcwd()
-
-# List of unit-test-app configurations.
-# Each file in configs/ directory defines a configuration. The format is the
-# same as sdkconfig file. Configuration is applied on top of sdkconfig.defaults
-# file from the project directory
-CONFIG_NAMES = os.listdir(os.path.join(PROJECT_PATH, "configs"))
-
-# Build (intermediate) and output (artifact) directories
-BUILDS_DIR = os.path.join(PROJECT_PATH, "builds")
-BINARIES_DIR = os.path.join(PROJECT_PATH, "output")
-
-
-# Convert the values passed to the -T parameter to corresponding cache entry definitions
-# TESTS_ALL and TEST_COMPONENTS
-class TestComponentAction(argparse.Action):
-    def __call__(self, parser, namespace, values, option_string=None):
-        # Create a new of cache definition entry, adding previous elements
-        cache_entries = list()
-
-        existing_entries = getattr(namespace, "define_cache_entry", [])
-
-        if existing_entries:
-            cache_entries.extend(existing_entries)
-
-        # Form -D arguments
-        if "all" in values:
-            cache_entries.append("TESTS_ALL=1")
-            cache_entries.append("TEST_COMPONENTS=''")
-        else:
-            cache_entries.append("TESTS_ALL=0")
-            cache_entries.append("TEST_COMPONENTS='%s'" % " ".join(values))
-
-        setattr(namespace, "define_cache_entry", cache_entries)
-
-        # Brute force add reconfigure at the very beginning
-        existing_actions = getattr(namespace, "actions", [])
-        if "reconfigure" not in existing_actions:
-            existing_actions = ["reconfigure"] + existing_actions
-        setattr(namespace, "actions", existing_actions)
-
-
-class TestExcludeComponentAction(argparse.Action):
-    def __call__(self, parser, namespace, values, option_string=None):
-        # Create a new of cache definition entry, adding previous elements
-        cache_entries = list()
-
-        existing_entries = getattr(namespace, "define_cache_entry", [])
-
-        if existing_entries:
-            cache_entries.extend(existing_entries)
-
-        cache_entries.append("TEST_EXCLUDE_COMPONENTS='%s'" % " ".join(values))
-
-        setattr(namespace, "define_cache_entry", cache_entries)
+import tempfile
 
-        # Brute force add reconfigure at the very beginning
-        existing_actions = getattr(namespace, "actions", [])
-        if "reconfigure" not in existing_actions:
-            existing_actions = ["reconfigure"] + existing_actions
-        setattr(namespace, "actions", existing_actions)
 
+def action_extensions(base_actions, project_path=os.getcwd()):
+    """ Describes extensions for unit tests. This function expects that actions "all" and "reconfigure" """
 
-def add_argument_extensions(parser):
-    # For convenience, define a -T argument that gets converted to -D arguments
-    parser.add_argument('-T', '--test-component', help="Specify the components to test", nargs='+', action=TestComponentAction)
-    # For convenience, define a -T argument that gets converted to -D arguments
-    parser.add_argument('-E', '--test-exclude-components', help="Specify the components to exclude from testing", nargs='+', action=TestExcludeComponentAction)
+    PROJECT_NAME = "unit-test-app"
 
+    # List of unit-test-app configurations.
+    # Each file in configs/ directory defines a configuration. The format is the
+    # same as sdkconfig file. Configuration is applied on top of sdkconfig.defaults
+    # file from the project directory
+    CONFIG_NAMES = os.listdir(os.path.join(project_path, "configs"))
 
-def add_action_extensions(base_functions, base_actions):
+    # Build (intermediate) and output (artifact) directories
+    BUILDS_DIR = os.path.join(project_path, "builds")
+    BINARIES_DIR = os.path.join(project_path, "output")
 
-    def ut_apply_config(ut_apply_config_name, args):
+    def ut_apply_config(ut_apply_config_name, ctx, args):
         config_name = re.match(r"ut-apply-config-(.*)", ut_apply_config_name).group(1)
 
         def set_config_build_variables(prop, defval=None):
-            property_value = re.findall(r"^%s=(.+)" % prop, config_file_content, re.MULTILINE)
-            if (property_value):
+            property_value = re.findall(
+                r"^%s=(.+)" % prop, config_file_content, re.MULTILINE
+            )
+            if property_value:
                 property_value = property_value[0]
             else:
                 property_value = defval
 
-            if (property_value):
+            if property_value:
                 try:
                     args.define_cache_entry.append("%s=" % prop + property_value)
                 except AttributeError:
@@ -116,7 +60,7 @@ def add_action_extensions(base_functions, base_actions):
 
         if config_name in CONFIG_NAMES:
             # Parse the sdkconfig for components to be included/excluded and tests to be run
-            config = os.path.join(PROJECT_PATH, "configs", config_name)
+            config = os.path.join(project_path, "configs", config_name)
 
             with open(config, "r") as config_file:
                 config_file_content = config_file.read()
@@ -136,17 +80,17 @@ def add_action_extensions(base_functions, base_actions):
                 except AttributeError:
                     args.define_cache_entry = [tests_all]
 
-                set_config_build_variables("TEST_EXCLUDE_COMPONENTS","''")
+                set_config_build_variables("TEST_EXCLUDE_COMPONENTS", "''")
 
             with tempfile.NamedTemporaryFile() as sdkconfig_temp:
                 # Use values from the combined defaults and the values from
                 # config folder to build config
-                sdkconfig_default = os.path.join(PROJECT_PATH, "sdkconfig.defaults")
+                sdkconfig_default = os.path.join(project_path, "sdkconfig.defaults")
 
                 with open(sdkconfig_default, "rb") as sdkconfig_default_file:
                     sdkconfig_temp.write(sdkconfig_default_file.read())
 
-                sdkconfig_config = os.path.join(PROJECT_PATH, "configs", config_name)
+                sdkconfig_config = os.path.join(project_path, "configs", config_name)
                 with open(sdkconfig_config, "rb") as sdkconfig_config_file:
                     sdkconfig_temp.write(b"\n")
                     sdkconfig_temp.write(sdkconfig_config_file.read())
@@ -154,21 +98,28 @@ def add_action_extensions(base_functions, base_actions):
                 sdkconfig_temp.flush()
 
                 try:
-                    args.define_cache_entry.append("SDKCONFIG_DEFAULTS=" + sdkconfig_temp.name)
+                    args.define_cache_entry.append(
+                        "SDKCONFIG_DEFAULTS=" + sdkconfig_temp.name
+                    )
                 except AttributeError:
-                    args.define_cache_entry = ["SDKCONFIG_DEFAULTS=" + sdkconfig_temp.name]
+                    args.define_cache_entry = [
+                        "SDKCONFIG_DEFAULTS=" + sdkconfig_temp.name
+                    ]
 
-                reconfigure = base_functions["reconfigure"]
-                reconfigure(None, args)
+                reconfigure = base_actions["actions"]["reconfigure"]["callback"]
+                reconfigure(None, ctx, args)
         else:
             if not config_name == "all-configs":
-                print("unknown unit test app config for action '%s'" % ut_apply_config_name)
+                print(
+                    "unknown unit test app config for action '%s'"
+                    % ut_apply_config_name
+                )
 
     # This target builds the configuration. It does not currently track dependencies,
     # but is good enough for CI builds if used together with clean-all-configs.
     # For local builds, use 'apply-config-NAME' target and then use normal 'all'
     # and 'flash' targets.
-    def ut_build(ut_build_name, args):
+    def ut_build(ut_build_name, ctx, args):
         # Create a copy of the passed arguments to prevent arg modifications to accrue if
         # all configs are being built
         build_args = copy.copy(args)
@@ -187,14 +138,17 @@ def add_action_extensions(base_functions, base_actions):
                 pass
 
             # Build, tweaking paths to sdkconfig and sdkconfig.defaults
-            ut_apply_config("ut-apply-config-" + config_name, build_args)
+            ut_apply_config("ut-apply-config-" + config_name, ctx, build_args)
 
-            build_target = base_functions["build_target"]
+            build_target = base_actions["actions"]["all"]["callback"]
 
-            build_target("all", build_args)
+            build_target("all", ctx, build_args)
 
             # Copy artifacts to the output directory
-            shutil.copyfile(os.path.join(build_args.project_dir, "sdkconfig"), os.path.join(dest, "sdkconfig"))
+            shutil.copyfile(
+                os.path.join(build_args.project_dir, "sdkconfig"),
+                os.path.join(dest, "sdkconfig"),
+            )
 
             binaries = [PROJECT_NAME + x for x in [".elf", ".bin", ".map"]]
 
@@ -206,16 +160,29 @@ def add_action_extensions(base_functions, base_actions):
             except OSError:
                 pass
 
-            shutil.copyfile(os.path.join(src, "bootloader", "bootloader.bin"), os.path.join(dest, "bootloader", "bootloader.bin"))
+            shutil.copyfile(
+                os.path.join(src, "bootloader", "bootloader.bin"),
+                os.path.join(dest, "bootloader", "bootloader.bin"),
+            )
 
-            for partition_table in glob.glob(os.path.join(src, "partition_table", "partition-table*.bin")):
+            for partition_table in glob.glob(
+                os.path.join(src, "partition_table", "partition-table*.bin")
+            ):
                 try:
                     os.mkdir(os.path.join(dest, "partition_table"))
                 except OSError:
                     pass
-                shutil.copyfile(partition_table, os.path.join(dest, "partition_table", os.path.basename(partition_table)))
-
-            shutil.copyfile(os.path.join(src, "flasher_args.json"), os.path.join(dest, "flasher_args.json"))
+                shutil.copyfile(
+                    partition_table,
+                    os.path.join(
+                        dest, "partition_table", os.path.basename(partition_table)
+                    ),
+                )
+
+            shutil.copyfile(
+                os.path.join(src, "flasher_args.json"),
+                os.path.join(dest, "flasher_args.json"),
+            )
 
             binaries = glob.glob(os.path.join(src, "*.bin"))
             binaries = [os.path.basename(s) for s in binaries]
@@ -227,7 +194,7 @@ def add_action_extensions(base_functions, base_actions):
             if not config_name == "all-configs":
                 print("unknown unit test app config for action '%s'" % ut_build_name)
 
-    def ut_clean(ut_clean_name, args):
+    def ut_clean(ut_clean_name, ctx, args):
         config_name = re.match(r"ut-clean-(.*)", ut_clean_name).group(1)
         if config_name in CONFIG_NAMES:
             shutil.rmtree(os.path.join(BUILDS_DIR, config_name), ignore_errors=True)
@@ -236,26 +203,52 @@ def add_action_extensions(base_functions, base_actions):
             if not config_name == "all-configs":
                 print("unknown unit test app config for action '%s'" % ut_clean_name)
 
-    def ut_help(action, args):
-        HELP_STRING = """
-Additional unit-test-app specific targets
-
-idf.py ut-build-NAME - Build unit-test-app with configuration provided in configs/NAME.
-                    Build directory will be builds/NAME/, output binaries will be
-                    under output/NAME/
+    def test_component_callback(ctx, global_args, tasks):
+        """ Convert the values passed to the -T and -E parameter to corresponding cache entry definitions TESTS_ALL and TEST_COMPONENTS """
+        test_components = global_args.test_components
+        test_exclude_components = global_args.test_exclude_components
 
-idf.py ut-clean-NAME - Remove build and output directories for configuration NAME.
+        cache_entries = []
 
-idf.py ut-build-all-configs - Build all configurations defined in configs/ directory.
-
-idf.py ut-apply-config-NAME - Generates configuration based on configs/NAME in sdkconfig
-                    file. After this, normal all/flash targets can be used.
-                    Useful for development/debugging.
-"""
-        print(HELP_STRING)
-
-    # Build dictionary of action extensions
-    extensions = dict()
+        if test_components:
+            if "all" in test_components:
+                cache_entries.append("TESTS_ALL=1")
+                cache_entries.append("TEST_COMPONENTS=''")
+            else:
+                cache_entries.append("TESTS_ALL=0")
+                cache_entries.append("TEST_COMPONENTS='%s'" % " ".join(test_components))
+
+        if test_exclude_components:
+            cache_entries.append(
+                "TEST_EXCLUDE_COMPONENTS='%s'" % " ".join(test_exclude_components)
+            )
+
+        if cache_entries:
+            global_args.define_cache_entry = list(global_args.define_cache_entry)
+            global_args.define_cache_entry.extend(cache_entries)
+
+            # Brute force add reconfigure at the very beginning
+            reconfigure_task = ctx.invoke(ctx.command.get_command(ctx, "reconfigure"))
+            tasks.insert(0, reconfigure_task)
+
+    # Add global options
+    extensions = {
+        "global_options": [
+            # For convenience, define a -T and -E argument that gets converted to -D arguments
+            {
+                "names": ["-T", "--test-components"],
+                "help": "Specify the components to test",
+                "multiple": True,
+            },
+            {
+                "names": ["-E", "--test-exclude-components"],
+                "help": "Specify the components to exclude from testing",
+                "multiple": True,
+            },
+        ],
+        "global_action_callbacks": [test_component_callback],
+        "actions": {},
+    }
 
     # This generates per-config targets (clean, build, apply-config).
     build_all_config_deps = []
@@ -266,16 +259,39 @@ idf.py ut-apply-config-NAME - Generates configuration based on configs/NAME in s
         config_clean_action_name = "ut-clean-" + config
         config_apply_config_action_name = "ut-apply-config-" + config
 
-        extensions[config_build_action_name] = (ut_build, [], [])
-        extensions[config_clean_action_name] = (ut_clean, [], [])
-        extensions[config_apply_config_action_name] = (ut_apply_config, [], [])
+        extensions["actions"][config_build_action_name] = {
+            "callback": ut_build,
+            "help": "Build unit-test-app with configuration provided in configs/NAME. "
+            + "Build directory will be builds/%s/, " % config_build_action_name
+            + "output binaries will be under output/%s/" % config_build_action_name,
+        }
+
+        extensions["actions"][config_clean_action_name] = {
+            "callback": ut_clean,
+            "help": "Remove build and output directories for configuration %s."
+            % config_clean_action_name,
+        }
+
+        extensions["actions"][config_apply_config_action_name] = {
+            "callback": ut_apply_config,
+            "help": "Generates configuration based on configs/%s in sdkconfig file."
+            % config_apply_config_action_name
+            + "After this, normal all/flash targets can be used. Useful for development/debugging.",
+        }
 
         build_all_config_deps.append(config_build_action_name)
         clean_all_config_deps.append(config_clean_action_name)
 
-    extensions["ut-build-all-configs"] = (ut_build, build_all_config_deps, [])
-    extensions["ut-clean-all-configs"] = (ut_clean, clean_all_config_deps, [])
+    extensions["actions"]["ut-build-all-configs"] = {
+        "callback": ut_build,
+        "help": "Build all configurations defined in configs/ directory.",
+        "dependencies": build_all_config_deps,
+    }
 
-    extensions["ut-help"] = (ut_help, [], [])
+    extensions["actions"]["ut-clean-all-configs"] = {
+        "callback": ut_clean,
+        "help": "Remove build and output directories for all configurations defined in configs/ directory.",
+        "dependencies": clean_all_config_deps,
+    }
 
-    base_actions.update(extensions)
+    return extensions