scripts: remove west from scripts/
west will now be installed via pip in order for the bootstrapper to be decoupled from the west runners. Signed-off-by: Carles Cufi <carles.cufi@nordicsemi.no>
This commit is contained in:
parent
33dae59a57
commit
9b4eb37f38
|
@ -1,5 +0,0 @@
|
||||||
# Copyright 2018 Open Source Foundries Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
# Empty file.
|
|
|
@ -1,439 +0,0 @@
|
||||||
# Copyright 2018 Open Source Foundries Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''West's bootstrap/wrapper script.
|
|
||||||
'''
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import configparser
|
|
||||||
import os
|
|
||||||
import platform
|
|
||||||
import pykwalify.core
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
import west._bootstrap.version as version
|
|
||||||
|
|
||||||
if sys.version_info < (3,):
|
|
||||||
sys.exit('fatal error: you are running Python 2')
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# Special files and directories in the west installation.
|
|
||||||
#
|
|
||||||
# These are given variable names for clarity, but they can't be
|
|
||||||
# changed without propagating the changes into west itself.
|
|
||||||
#
|
|
||||||
|
|
||||||
# Top-level west directory, containing west itself and the manifest.
|
|
||||||
WEST_DIR = 'west'
|
|
||||||
# Subdirectory to check out the west source repository into.
|
|
||||||
WEST = 'west'
|
|
||||||
# Default west repository URL.
|
|
||||||
WEST_URL_DEFAULT = 'https://github.com/zephyrproject-rtos/west'
|
|
||||||
# Default revision to check out of the west repository.
|
|
||||||
WEST_REV_DEFAULT = 'master'
|
|
||||||
# File inside of WEST_DIR which marks it as the top level of the
|
|
||||||
# Zephyr project installation.
|
|
||||||
#
|
|
||||||
# (The WEST_DIR name is not distinct enough to use when searching for
|
|
||||||
# the top level; other directories named "west" may exist elsewhere,
|
|
||||||
# e.g. zephyr/doc/west.)
|
|
||||||
WEST_MARKER = '.west_topdir'
|
|
||||||
|
|
||||||
# Manifest repository directory under WEST_DIR.
|
|
||||||
MANIFEST = 'manifest'
|
|
||||||
# Default manifest repository URL.
|
|
||||||
MANIFEST_URL_DEFAULT = 'https://github.com/zephyrproject-rtos/manifest'
|
|
||||||
# Default revision to check out of the manifest repository.
|
|
||||||
MANIFEST_REV_DEFAULT = 'master'
|
|
||||||
|
|
||||||
_SCHEMA_PATH = os.path.join(os.path.dirname(__file__), "west-schema.yml")
|
|
||||||
|
|
||||||
#
|
|
||||||
# Helpers shared between init and wrapper mode
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
class WestError(RuntimeError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class WestNotFound(WestError):
|
|
||||||
'''Neither the current directory nor any parent has a West installation.'''
|
|
||||||
|
|
||||||
|
|
||||||
def west_dir(start=None):
|
|
||||||
'''
|
|
||||||
Returns the path to the west/ directory, searching ``start`` and its
|
|
||||||
parents.
|
|
||||||
|
|
||||||
Raises WestNotFound if no west directory is found.
|
|
||||||
'''
|
|
||||||
return os.path.join(west_topdir(start), WEST_DIR)
|
|
||||||
|
|
||||||
|
|
||||||
def manifest_dir(start=None):
|
|
||||||
'''
|
|
||||||
Returns the path to the manifest/ directory, searching ``start`` and its
|
|
||||||
parents.
|
|
||||||
|
|
||||||
Raises WestNotFound if no west directory is found.
|
|
||||||
'''
|
|
||||||
return os.path.join(west_topdir(start), MANIFEST)
|
|
||||||
|
|
||||||
|
|
||||||
def west_topdir(start=None):
|
|
||||||
'''
|
|
||||||
Like west_dir(), but returns the path to the parent directory of the west/
|
|
||||||
directory instead, where project repositories are stored
|
|
||||||
'''
|
|
||||||
# If you change this function, make sure to update west.util.west_topdir().
|
|
||||||
|
|
||||||
cur_dir = start or os.getcwd()
|
|
||||||
|
|
||||||
while True:
|
|
||||||
if os.path.isfile(os.path.join(cur_dir, WEST_DIR, WEST_MARKER)):
|
|
||||||
return cur_dir
|
|
||||||
|
|
||||||
parent_dir = os.path.dirname(cur_dir)
|
|
||||||
if cur_dir == parent_dir:
|
|
||||||
# At the root
|
|
||||||
raise WestNotFound('Could not find a West installation '
|
|
||||||
'in this or any parent directory')
|
|
||||||
cur_dir = parent_dir
|
|
||||||
|
|
||||||
|
|
||||||
def clone(desc, url, rev, dest):
|
|
||||||
if os.path.exists(dest):
|
|
||||||
raise WestError('refusing to clone into existing location ' + dest)
|
|
||||||
|
|
||||||
print('=== Cloning {} from {}, rev. {} ==='.format(desc, url, rev))
|
|
||||||
subprocess.check_call(('git', 'clone', '-b', rev, '--', url, dest))
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# west init
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
def init(argv):
|
|
||||||
'''Command line handler for ``west init`` invocations.
|
|
||||||
|
|
||||||
This exits the program with a nonzero exit code if fatal errors occur.'''
|
|
||||||
|
|
||||||
# Remember to update scripts/west-completion.bash if you add or remove
|
|
||||||
# flags
|
|
||||||
|
|
||||||
init_parser = argparse.ArgumentParser(
|
|
||||||
prog='west init',
|
|
||||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
||||||
description='''
|
|
||||||
Initializes a Zephyr installation. Use "west clone" afterwards to fetch the
|
|
||||||
sources.
|
|
||||||
|
|
||||||
In more detail, does the following:
|
|
||||||
|
|
||||||
1. Clones the manifest repository to west/manifest, and the west repository
|
|
||||||
to west/west
|
|
||||||
|
|
||||||
2. Creates a marker file west/{}
|
|
||||||
|
|
||||||
3. Creates an initial configuration file west/config
|
|
||||||
|
|
||||||
As an alternative to manually editing west/config, 'west init' can be rerun on
|
|
||||||
an already initialized West instance to update configuration settings. Only
|
|
||||||
explicitly passed configuration values (e.g. --mr MANIFEST_REVISION) are
|
|
||||||
updated.
|
|
||||||
|
|
||||||
Updating the manifest URL or revision via 'west init' automatically runs 'west
|
|
||||||
update --reset-manifest --reset-projects' afterwards to reset the manifest to
|
|
||||||
the new revision, and all projects to their new manifest revisions.
|
|
||||||
|
|
||||||
Updating the west URL or revision also runs 'west update --reset-west'.
|
|
||||||
|
|
||||||
To suppress the reset of the manifest, west, and projects, pass --no-reset.
|
|
||||||
With --no-reset, only the configuration file will be updated, and you will have
|
|
||||||
to handle any resetting yourself.
|
|
||||||
'''.format(WEST_MARKER))
|
|
||||||
|
|
||||||
init_parser.add_argument(
|
|
||||||
'-m', '--manifest-url',
|
|
||||||
help='Manifest repository URL (default: {})'
|
|
||||||
.format(MANIFEST_URL_DEFAULT))
|
|
||||||
|
|
||||||
init_parser.add_argument(
|
|
||||||
'--mr', '--manifest-rev', dest='manifest_rev',
|
|
||||||
help='Manifest revision to fetch (default: {})'
|
|
||||||
.format(MANIFEST_REV_DEFAULT))
|
|
||||||
|
|
||||||
init_parser.add_argument(
|
|
||||||
'--nr', '--no-reset', dest='reset', action='store_false',
|
|
||||||
help='''Suppress the automatic reset of the manifest, west, and project
|
|
||||||
repositories when re-running 'west init' in an existing
|
|
||||||
installation to update the manifest or west URL/revision''')
|
|
||||||
|
|
||||||
init_parser.add_argument(
|
|
||||||
'directory', nargs='?', default=None,
|
|
||||||
help='''Directory to initialize West in. Missing directories will be
|
|
||||||
created automatically. (default: current directory)''')
|
|
||||||
|
|
||||||
args = init_parser.parse_args(args=argv)
|
|
||||||
|
|
||||||
try:
|
|
||||||
reinit(os.path.join(west_dir(args.directory), 'config'), args)
|
|
||||||
except WestNotFound:
|
|
||||||
bootstrap(args)
|
|
||||||
|
|
||||||
|
|
||||||
def bootstrap(args):
|
|
||||||
'''Bootstrap a new manifest + West installation.'''
|
|
||||||
|
|
||||||
west_url = WEST_URL_DEFAULT
|
|
||||||
manifest_url = args.manifest_url or MANIFEST_URL_DEFAULT
|
|
||||||
|
|
||||||
west_rev = WEST_REV_DEFAULT
|
|
||||||
manifest_rev = args.manifest_rev or MANIFEST_REV_DEFAULT
|
|
||||||
|
|
||||||
directory = args.directory or os.getcwd()
|
|
||||||
|
|
||||||
if not os.path.isdir(directory):
|
|
||||||
try:
|
|
||||||
print('Initializing in new directory', directory)
|
|
||||||
os.makedirs(directory, exist_ok=False)
|
|
||||||
except PermissionError:
|
|
||||||
sys.exit('Cannot initialize in {}: permission denied'.format(
|
|
||||||
directory))
|
|
||||||
except FileExistsError:
|
|
||||||
sys.exit('Something else created {} concurrently; quitting'.format(
|
|
||||||
directory))
|
|
||||||
except Exception as e:
|
|
||||||
sys.exit("Can't create directory {}: {}".format(
|
|
||||||
directory, e.args))
|
|
||||||
else:
|
|
||||||
print('Initializing in', directory)
|
|
||||||
|
|
||||||
# Clone the west source code and the manifest into west/. Git will create
|
|
||||||
# the west/ directory if it does not exist.
|
|
||||||
|
|
||||||
clone('manifest repository', manifest_url, manifest_rev,
|
|
||||||
os.path.join(directory, WEST_DIR, MANIFEST))
|
|
||||||
|
|
||||||
# Parse the manifest and look for a section named "west"
|
|
||||||
manifest_file = os.path.join(directory, WEST_DIR, MANIFEST, 'default.yml')
|
|
||||||
with open(manifest_file, 'r') as f:
|
|
||||||
data = yaml.safe_load(f.read())
|
|
||||||
|
|
||||||
if 'west' in data:
|
|
||||||
wdata = data['west']
|
|
||||||
try:
|
|
||||||
pykwalify.core.Core(
|
|
||||||
source_data=wdata,
|
|
||||||
schema_files=[_SCHEMA_PATH]
|
|
||||||
).validate()
|
|
||||||
except pykwalify.errors.SchemaError as e:
|
|
||||||
sys.exit("Error: Failed to parse manifest file '{}': {}"
|
|
||||||
.format(manifest_file, e))
|
|
||||||
|
|
||||||
if 'url' in wdata:
|
|
||||||
west_url = wdata['url']
|
|
||||||
if 'revision' in wdata:
|
|
||||||
west_rev = wdata['revision']
|
|
||||||
|
|
||||||
print("cloning {} at revision {}".format(west_url, west_rev))
|
|
||||||
clone('west repository', west_url, west_rev,
|
|
||||||
os.path.join(directory, WEST_DIR, WEST))
|
|
||||||
|
|
||||||
# Create an initial configuration file
|
|
||||||
|
|
||||||
config_path = os.path.join(directory, WEST_DIR, 'config')
|
|
||||||
update_conf(config_path, manifest_url, manifest_rev)
|
|
||||||
print('=== Initial configuration written to {} ==='.format(config_path))
|
|
||||||
|
|
||||||
# Create a dotfile to mark the installation. Hide it on Windows.
|
|
||||||
|
|
||||||
with open(os.path.join(directory, WEST_DIR, WEST_MARKER), 'w') as f:
|
|
||||||
hide_file(f.name)
|
|
||||||
|
|
||||||
print('=== West initialized. Now run "west clone" in {}. ==='.
|
|
||||||
format(directory))
|
|
||||||
|
|
||||||
|
|
||||||
def reinit(config_path, args):
|
|
||||||
'''
|
|
||||||
Reinitialize an existing installation.
|
|
||||||
|
|
||||||
This updates the west/config configuration file, and optionally resets the
|
|
||||||
manifest, west, and project repositories to the new revision.
|
|
||||||
'''
|
|
||||||
manifest_url = args.manifest_url
|
|
||||||
|
|
||||||
if not (manifest_url or args.manifest_rev):
|
|
||||||
sys.exit('West already initialized. Please pass any settings you '
|
|
||||||
'want to change.')
|
|
||||||
|
|
||||||
update_conf(config_path, manifest_url, args.manifest_rev)
|
|
||||||
|
|
||||||
print('=== Updated configuration written to {} ==='.format(config_path))
|
|
||||||
|
|
||||||
if args.reset:
|
|
||||||
cmd = ['update', '--reset-manifest', '--reset-projects',
|
|
||||||
'--reset-west']
|
|
||||||
print("=== Running 'west {}' to update repositories ==="
|
|
||||||
.format(' '.join(cmd)))
|
|
||||||
wrap(cmd)
|
|
||||||
|
|
||||||
|
|
||||||
def update_conf(config_path, manifest_url, manifest_rev):
|
|
||||||
'''
|
|
||||||
Creates or updates the configuration file at 'config_path' with the
|
|
||||||
specified values. Values that are None/empty are ignored.
|
|
||||||
'''
|
|
||||||
config = configparser.ConfigParser()
|
|
||||||
|
|
||||||
# This is a no-op if the file doesn't exist, so no need to check
|
|
||||||
config.read(config_path)
|
|
||||||
|
|
||||||
update_key(config, 'manifest', 'remote', manifest_url)
|
|
||||||
update_key(config, 'manifest', 'revision', manifest_rev)
|
|
||||||
|
|
||||||
with open(config_path, 'w') as f:
|
|
||||||
config.write(f)
|
|
||||||
|
|
||||||
|
|
||||||
def update_key(config, section, key, value):
|
|
||||||
'''
|
|
||||||
Updates 'key' in section 'section' in ConfigParser 'config', creating
|
|
||||||
'section' if it does not exist.
|
|
||||||
|
|
||||||
If value is None/empty, 'key' is left as-is.
|
|
||||||
'''
|
|
||||||
if not value:
|
|
||||||
return
|
|
||||||
|
|
||||||
if section not in config:
|
|
||||||
config[section] = {}
|
|
||||||
|
|
||||||
config[section][key] = value
|
|
||||||
|
|
||||||
|
|
||||||
def hide_file(path):
|
|
||||||
'''Ensure path is a hidden file.
|
|
||||||
|
|
||||||
On Windows, this uses attrib to hide the file manually.
|
|
||||||
|
|
||||||
On UNIX systems, this just checks that the path's basename begins
|
|
||||||
with a period ('.'), for it to be hidden already. It's a fatal
|
|
||||||
error if it does not begin with a period in this case.
|
|
||||||
|
|
||||||
On other systems, this just prints a warning.
|
|
||||||
'''
|
|
||||||
system = platform.system()
|
|
||||||
|
|
||||||
if system == 'Windows':
|
|
||||||
subprocess.check_call(['attrib', '+H', path])
|
|
||||||
elif os.name == 'posix': # Try to check for all Unix, not just macOS/Linux
|
|
||||||
if not os.path.basename(path).startswith('.'):
|
|
||||||
sys.exit("internal error: {} can't be hidden on UNIX".format(path))
|
|
||||||
else:
|
|
||||||
print("warning: unknown platform {}; {} may not be hidden"
|
|
||||||
.format(system, path), file=sys.stderr)
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# Wrap a West command
|
|
||||||
#
|
|
||||||
|
|
||||||
def append_to_pythonpath(directory):
|
|
||||||
pp = os.environ.get('PYTHONPATH')
|
|
||||||
os.environ['PYTHONPATH'] = ':'.join(([pp] if pp else []) + [directory])
|
|
||||||
|
|
||||||
|
|
||||||
def wrap(argv):
|
|
||||||
printing_version = False
|
|
||||||
printing_help_only = False
|
|
||||||
|
|
||||||
if argv:
|
|
||||||
if argv[0] in ('-V', '--version'):
|
|
||||||
print('West bootstrapper version: v{} ({})'.
|
|
||||||
format(version.__version__, os.path.dirname(__file__)))
|
|
||||||
printing_version = True
|
|
||||||
elif len(argv) == 1 and argv[0] in ('-h', '--help'):
|
|
||||||
# This only matters if we're called outside of an
|
|
||||||
# installation directory. We delegate to the main help if
|
|
||||||
# called from within one, because it includes a list of
|
|
||||||
# available commands, etc.
|
|
||||||
printing_help_only = True
|
|
||||||
|
|
||||||
start = os.getcwd()
|
|
||||||
try:
|
|
||||||
topdir = west_topdir(start)
|
|
||||||
except WestNotFound:
|
|
||||||
if printing_version:
|
|
||||||
sys.exit(0) # run outside of an installation directory
|
|
||||||
elif printing_help_only:
|
|
||||||
# We call print multiple times here and below instead of using
|
|
||||||
# \n to be newline agnostic.
|
|
||||||
print('To set up a Zephyr installation here, run "west init".')
|
|
||||||
print('Run "west init -h" for additional information.')
|
|
||||||
sys.exit(0)
|
|
||||||
else:
|
|
||||||
print('Error: "{}" is not a Zephyr installation directory.'.
|
|
||||||
format(start), file=sys.stderr)
|
|
||||||
print('Things to try:', file=sys.stderr)
|
|
||||||
print(' - Run "west init" to set up an installation here.',
|
|
||||||
file=sys.stderr)
|
|
||||||
print(' - Run "west init -h" for additional information.',
|
|
||||||
file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
west_git_repo = os.path.join(topdir, WEST_DIR, WEST)
|
|
||||||
if printing_version:
|
|
||||||
try:
|
|
||||||
git_describe = subprocess.check_output(
|
|
||||||
['git', 'describe', '--tags'],
|
|
||||||
stderr=subprocess.DEVNULL,
|
|
||||||
cwd=west_git_repo).decode(sys.getdefaultencoding()).strip()
|
|
||||||
print('West repository version: {} ({})'.format(git_describe,
|
|
||||||
west_git_repo))
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
print('West repository version: unknown; no tags were found')
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
# Import the west package from the installation and run its main
|
|
||||||
# function with the given command-line arguments.
|
|
||||||
#
|
|
||||||
# This can't be done as a subprocess: that would break the
|
|
||||||
# runners' debug handling for GDB, which needs to block the usual
|
|
||||||
# control-C signal handling. GDB uses Ctrl-C to halt the debug
|
|
||||||
# target. So we really do need to import west and delegate within
|
|
||||||
# this bootstrap process.
|
|
||||||
#
|
|
||||||
# Put this at position 1 to make sure it comes before random stuff
|
|
||||||
# that might be on a developer's PYTHONPATH in the import order.
|
|
||||||
sys.path.insert(1, os.path.join(west_git_repo, 'src'))
|
|
||||||
import west.main
|
|
||||||
west.main.main(argv)
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# Main entry point
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
def main(wrap_argv=None):
|
|
||||||
'''Entry point to the wrapper script.'''
|
|
||||||
if wrap_argv is None:
|
|
||||||
wrap_argv = sys.argv[1:]
|
|
||||||
|
|
||||||
if not wrap_argv or wrap_argv[0] != 'init':
|
|
||||||
wrap(wrap_argv)
|
|
||||||
else:
|
|
||||||
init(wrap_argv[1:])
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
|
@ -1,5 +0,0 @@
|
||||||
# Don't put anything else in here!
|
|
||||||
#
|
|
||||||
# This is the Python 3 version of option 3 in:
|
|
||||||
# https://packaging.python.org/guides/single-sourcing-package-version/#single-sourcing-the-version
|
|
||||||
__version__ = '0.4.1'
|
|
|
@ -1,17 +0,0 @@
|
||||||
## A pykwalify schema for basic validation of the structure of a
|
|
||||||
## west YAML file. (Full validation would require additional work,
|
|
||||||
## e.g. to validate that remote URLs obey the URL format specified in
|
|
||||||
## rfc1738.)
|
|
||||||
##
|
|
||||||
|
|
||||||
# The top-level west yaml is a map. The only top-level element is
|
|
||||||
# 'west'. All other elements are contained within it. This allows
|
|
||||||
# us a bit of future-proofing.
|
|
||||||
type: map
|
|
||||||
mapping:
|
|
||||||
url:
|
|
||||||
required: false
|
|
||||||
type: str
|
|
||||||
revision:
|
|
||||||
required: false
|
|
||||||
type: str
|
|
|
@ -1,42 +0,0 @@
|
||||||
# Copyright 2018 (c) Foundries.io.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''Common definitions for building Zephyr applications.
|
|
||||||
|
|
||||||
This provides some default settings and convenience wrappers for
|
|
||||||
building Zephyr applications needed by multiple commands.
|
|
||||||
|
|
||||||
See west.cmd.build for the build command itself.
|
|
||||||
'''
|
|
||||||
|
|
||||||
from west import cmake
|
|
||||||
from west import log
|
|
||||||
|
|
||||||
DEFAULT_BUILD_DIR = 'build'
|
|
||||||
'''Name of the default Zephyr build directory.'''
|
|
||||||
|
|
||||||
DEFAULT_CMAKE_GENERATOR = 'Ninja'
|
|
||||||
'''Name of the default CMake generator.'''
|
|
||||||
|
|
||||||
|
|
||||||
def is_zephyr_build(path):
|
|
||||||
'''Return true if and only if `path` appears to be a valid Zephyr
|
|
||||||
build directory.
|
|
||||||
|
|
||||||
"Valid" means the given path is a directory which contains a CMake
|
|
||||||
cache with a 'ZEPHYR_TOOLCHAIN_VARIANT' key.
|
|
||||||
'''
|
|
||||||
try:
|
|
||||||
cache = cmake.CMakeCache.from_build_dir(path)
|
|
||||||
except FileNotFoundError:
|
|
||||||
cache = {}
|
|
||||||
|
|
||||||
if 'ZEPHYR_TOOLCHAIN_VARIANT' in cache:
|
|
||||||
log.dbg('{} is a zephyr build directory'.format(path),
|
|
||||||
level=log.VERBOSE_EXTREME)
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
log.dbg('{} is NOT a valid zephyr build directory'.format(path),
|
|
||||||
level=log.VERBOSE_EXTREME)
|
|
||||||
return False
|
|
|
@ -1,220 +0,0 @@
|
||||||
# Copyright (c) 2018 Open Source Foundries Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''Helpers for dealing with CMake'''
|
|
||||||
|
|
||||||
from collections import OrderedDict
|
|
||||||
import os.path
|
|
||||||
import re
|
|
||||||
import subprocess
|
|
||||||
import shutil
|
|
||||||
|
|
||||||
from west import log
|
|
||||||
from west.util import quote_sh_list
|
|
||||||
|
|
||||||
__all__ = ['run_cmake', 'run_build',
|
|
||||||
'make_c_identifier',
|
|
||||||
'CMakeCacheEntry', 'CMakeCache']
|
|
||||||
|
|
||||||
DEFAULT_CACHE = 'CMakeCache.txt'
|
|
||||||
|
|
||||||
|
|
||||||
def run_cmake(args, quiet=False):
|
|
||||||
'''Run cmake to (re)generate a build system'''
|
|
||||||
cmake = shutil.which('cmake')
|
|
||||||
if cmake is None:
|
|
||||||
log.die('CMake is not installed or cannot be found; cannot build.')
|
|
||||||
cmd = [cmake] + args
|
|
||||||
kwargs = dict()
|
|
||||||
if quiet:
|
|
||||||
kwargs['stdout'] = subprocess.DEVNULL
|
|
||||||
kwargs['stderr'] = subprocess.STDOUT
|
|
||||||
log.dbg('Running CMake:', cmd, level=log.VERBOSE_VERY)
|
|
||||||
log.dbg('As command:', quote_sh_list(cmd), level=log.VERBOSE_VERY)
|
|
||||||
subprocess.check_call(cmd, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def run_build(build_directory, extra_args=(), quiet=False):
|
|
||||||
'''Run cmake in build tool mode in `build_directory`'''
|
|
||||||
run_cmake(['--build', build_directory] + list(extra_args), quiet=quiet)
|
|
||||||
|
|
||||||
|
|
||||||
def make_c_identifier(string):
|
|
||||||
'''Make a C identifier from a string in the same way CMake does.
|
|
||||||
'''
|
|
||||||
# The behavior of CMake's string(MAKE_C_IDENTIFIER ...) is not
|
|
||||||
# precisely documented. This behavior matches the test case
|
|
||||||
# that introduced the function:
|
|
||||||
#
|
|
||||||
# https://gitlab.kitware.com/cmake/cmake/commit/0ab50aea4c4d7099b339fb38b4459d0debbdbd85
|
|
||||||
ret = []
|
|
||||||
|
|
||||||
alpha_under = re.compile('[A-Za-z_]')
|
|
||||||
alpha_num_under = re.compile('[A-Za-z0-9_]')
|
|
||||||
|
|
||||||
if not alpha_under.match(string):
|
|
||||||
ret.append('_')
|
|
||||||
for c in string:
|
|
||||||
if alpha_num_under.match(c):
|
|
||||||
ret.append(c)
|
|
||||||
else:
|
|
||||||
ret.append('_')
|
|
||||||
|
|
||||||
return ''.join(ret)
|
|
||||||
|
|
||||||
|
|
||||||
class CMakeCacheEntry:
|
|
||||||
'''Represents a CMake cache entry.
|
|
||||||
|
|
||||||
This class understands the type system in a CMakeCache.txt, and
|
|
||||||
converts the following cache types to Python types:
|
|
||||||
|
|
||||||
Cache Type Python type
|
|
||||||
---------- -------------------------------------------
|
|
||||||
FILEPATH str
|
|
||||||
PATH str
|
|
||||||
STRING str OR list of str (if ';' is in the value)
|
|
||||||
BOOL bool
|
|
||||||
INTERNAL str OR list of str (if ';' is in the value)
|
|
||||||
---------- -------------------------------------------
|
|
||||||
'''
|
|
||||||
|
|
||||||
# Regular expression for a cache entry.
|
|
||||||
#
|
|
||||||
# CMake variable names can include escape characters, allowing a
|
|
||||||
# wider set of names than is easy to match with a regular
|
|
||||||
# expresion. To be permissive here, use a non-greedy match up to
|
|
||||||
# the first colon (':'). This breaks if the variable name has a
|
|
||||||
# colon inside, but it's good enough.
|
|
||||||
CACHE_ENTRY = re.compile(
|
|
||||||
r'''(?P<name>.*?) # name
|
|
||||||
:(?P<type>FILEPATH|PATH|STRING|BOOL|INTERNAL) # type
|
|
||||||
=(?P<value>.*) # value
|
|
||||||
''', re.X)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def _to_bool(cls, val):
|
|
||||||
# Convert a CMake BOOL string into a Python bool.
|
|
||||||
#
|
|
||||||
# "True if the constant is 1, ON, YES, TRUE, Y, or a
|
|
||||||
# non-zero number. False if the constant is 0, OFF, NO,
|
|
||||||
# FALSE, N, IGNORE, NOTFOUND, the empty string, or ends in
|
|
||||||
# the suffix -NOTFOUND. Named boolean constants are
|
|
||||||
# case-insensitive. If the argument is not one of these
|
|
||||||
# constants, it is treated as a variable."
|
|
||||||
#
|
|
||||||
# https://cmake.org/cmake/help/v3.0/command/if.html
|
|
||||||
val = val.upper()
|
|
||||||
if val in ('ON', 'YES', 'TRUE', 'Y'):
|
|
||||||
return True
|
|
||||||
elif val in ('OFF', 'NO', 'FALSE', 'N', 'IGNORE', 'NOTFOUND', ''):
|
|
||||||
return False
|
|
||||||
elif val.endswith('-NOTFOUND'):
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
v = int(val)
|
|
||||||
return v != 0
|
|
||||||
except ValueError as exc:
|
|
||||||
raise ValueError('invalid bool {}'.format(val)) from exc
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_line(cls, line, line_no):
|
|
||||||
# Comments can only occur at the beginning of a line.
|
|
||||||
# (The value of an entry could contain a comment character).
|
|
||||||
if line.startswith('//') or line.startswith('#'):
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Whitespace-only lines do not contain cache entries.
|
|
||||||
if not line.strip():
|
|
||||||
return None
|
|
||||||
|
|
||||||
m = cls.CACHE_ENTRY.match(line)
|
|
||||||
if not m:
|
|
||||||
return None
|
|
||||||
|
|
||||||
name, type_, value = (m.group(g) for g in ('name', 'type', 'value'))
|
|
||||||
if type_ == 'BOOL':
|
|
||||||
try:
|
|
||||||
value = cls._to_bool(value)
|
|
||||||
except ValueError as exc:
|
|
||||||
args = exc.args + ('on line {}: {}'.format(line_no, line),)
|
|
||||||
raise ValueError(args) from exc
|
|
||||||
elif type_ == 'STRING' or type_ == 'INTERNAL':
|
|
||||||
# If the value is a CMake list (i.e. is a string which
|
|
||||||
# contains a ';'), convert to a Python list.
|
|
||||||
if ';' in value:
|
|
||||||
value = value.split(';')
|
|
||||||
|
|
||||||
return CMakeCacheEntry(name, value)
|
|
||||||
|
|
||||||
def __init__(self, name, value):
|
|
||||||
self.name = name
|
|
||||||
self.value = value
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
fmt = 'CMakeCacheEntry(name={}, value={})'
|
|
||||||
return fmt.format(self.name, self.value)
|
|
||||||
|
|
||||||
|
|
||||||
class CMakeCache:
|
|
||||||
'''Parses and represents a CMake cache file.'''
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_build_dir(build_dir):
|
|
||||||
return CMakeCache(os.path.join(build_dir, DEFAULT_CACHE))
|
|
||||||
|
|
||||||
def __init__(self, cache_file):
|
|
||||||
self.cache_file = cache_file
|
|
||||||
self.load(cache_file)
|
|
||||||
|
|
||||||
def load(self, cache_file):
|
|
||||||
entries = []
|
|
||||||
with open(cache_file, 'r') as cache:
|
|
||||||
for line_no, line in enumerate(cache):
|
|
||||||
entry = CMakeCacheEntry.from_line(line, line_no)
|
|
||||||
if entry:
|
|
||||||
entries.append(entry)
|
|
||||||
self._entries = OrderedDict((e.name, e) for e in entries)
|
|
||||||
|
|
||||||
def get(self, name, default=None):
|
|
||||||
entry = self._entries.get(name)
|
|
||||||
if entry is not None:
|
|
||||||
return entry.value
|
|
||||||
else:
|
|
||||||
return default
|
|
||||||
|
|
||||||
def get_list(self, name, default=None):
|
|
||||||
if default is None:
|
|
||||||
default = []
|
|
||||||
entry = self._entries.get(name)
|
|
||||||
if entry is not None:
|
|
||||||
value = entry.value
|
|
||||||
if isinstance(value, list):
|
|
||||||
return value
|
|
||||||
elif isinstance(value, str):
|
|
||||||
return [value] if value else []
|
|
||||||
else:
|
|
||||||
msg = 'invalid value {} type {}'
|
|
||||||
raise RuntimeError(msg.format(value, type(value)))
|
|
||||||
else:
|
|
||||||
return default
|
|
||||||
|
|
||||||
def __contains__(self, name):
|
|
||||||
return name in self._entries
|
|
||||||
|
|
||||||
def __getitem__(self, name):
|
|
||||||
return self._entries[name].value
|
|
||||||
|
|
||||||
def __setitem__(self, name, entry):
|
|
||||||
if not isinstance(entry, CMakeCacheEntry):
|
|
||||||
msg = 'improper type {} for value {}, expecting CMakeCacheEntry'
|
|
||||||
raise TypeError(msg.format(type(entry), entry))
|
|
||||||
self._entries[name] = entry
|
|
||||||
|
|
||||||
def __delitem__(self, name):
|
|
||||||
del self._entries[name]
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return iter(self._entries.values())
|
|
|
@ -1,74 +0,0 @@
|
||||||
# Copyright 2018 Open Source Foundries Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''West's commands subpackage.
|
|
||||||
|
|
||||||
All commands should be implemented within modules in this package.
|
|
||||||
'''
|
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
|
||||||
|
|
||||||
__all__ = ['CommandContextError', 'WestCommand']
|
|
||||||
|
|
||||||
|
|
||||||
class CommandContextError(RuntimeError):
|
|
||||||
'''Indicates that a context-dependent command could not be run.'''
|
|
||||||
|
|
||||||
|
|
||||||
class WestCommand(ABC):
|
|
||||||
'''Abstract superclass for a west command.
|
|
||||||
|
|
||||||
All top-level commands supported by west implement this interface.'''
|
|
||||||
|
|
||||||
def __init__(self, name, description, accepts_unknown_args=False):
|
|
||||||
'''Create a command instance.
|
|
||||||
|
|
||||||
`name`: the command's name, as entered by the user.
|
|
||||||
`description`: one-line command description to show to the user.
|
|
||||||
|
|
||||||
`accepts_unknown_args`: if true, the command can handle
|
|
||||||
arbitrary unknown command line arguments in its run()
|
|
||||||
method. Otherwise, passing unknown arguments will cause
|
|
||||||
UnknownArgumentsError to be raised.
|
|
||||||
'''
|
|
||||||
self.name = name
|
|
||||||
self.description = description
|
|
||||||
self._accept_unknown = accepts_unknown_args
|
|
||||||
|
|
||||||
def run(self, args, unknown):
|
|
||||||
'''Run the command.
|
|
||||||
|
|
||||||
`args`: known arguments parsed via `register_arguments()`
|
|
||||||
`unknown`: unknown arguments present on the command line
|
|
||||||
'''
|
|
||||||
if unknown and not self._accept_unknown:
|
|
||||||
self.parser.error('unexpected arguments: {}'.format(unknown))
|
|
||||||
self.do_run(args, unknown)
|
|
||||||
|
|
||||||
def add_parser(self, parser_adder):
|
|
||||||
'''Registers a parser for this command, and returns it.
|
|
||||||
'''
|
|
||||||
self.parser = self.do_add_parser(parser_adder)
|
|
||||||
return self.parser
|
|
||||||
|
|
||||||
#
|
|
||||||
# Mandatory subclass hooks
|
|
||||||
#
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def do_add_parser(self, parser_adder):
|
|
||||||
'''Subclass method for registering command line arguments.
|
|
||||||
|
|
||||||
`parser_adder` is an argparse argument subparsers adder.'''
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
def do_run(self, args, unknown):
|
|
||||||
'''Subclasses must implement; called when the command is run.
|
|
||||||
|
|
||||||
`args` is the namespace of parsed known arguments.
|
|
||||||
|
|
||||||
If `accepts_unknown_args` was False when constructing this
|
|
||||||
object, `unknown` will be empty. Otherwise, it is an iterable
|
|
||||||
containing all unknown arguments present on the command line.
|
|
||||||
'''
|
|
|
@ -1,288 +0,0 @@
|
||||||
# Copyright (c) 2018 Foundries.io
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import os
|
|
||||||
|
|
||||||
from west import log
|
|
||||||
from west import cmake
|
|
||||||
from west.build import DEFAULT_BUILD_DIR, DEFAULT_CMAKE_GENERATOR, \
|
|
||||||
is_zephyr_build
|
|
||||||
from west.commands import WestCommand
|
|
||||||
|
|
||||||
BUILD_HELP = '''\
|
|
||||||
Convenience wrapper for building Zephyr applications.
|
|
||||||
|
|
||||||
This command attempts to do what you mean when run from a Zephyr
|
|
||||||
application source or a pre-existing build directory:
|
|
||||||
|
|
||||||
- When "west build" is run from a Zephyr build directory, the source
|
|
||||||
directory is obtained from the CMake cache, and that build directory
|
|
||||||
is re-compiled.
|
|
||||||
|
|
||||||
- Otherwise, the source directory defaults to the current working
|
|
||||||
directory, so running "west build" from a Zephyr application's
|
|
||||||
source directory compiles it.
|
|
||||||
|
|
||||||
The source and build directories can be explicitly set with the
|
|
||||||
--source-dir and --build-dir options. The build directory defaults to
|
|
||||||
'build' if it is not auto-detected. The build directory is always
|
|
||||||
created if it does not exist.
|
|
||||||
|
|
||||||
This command runs CMake to generate a build system if one is not
|
|
||||||
present in the build directory, then builds the application.
|
|
||||||
Subsequent builds try to avoid re-running CMake; you can force it
|
|
||||||
to run by setting --cmake.
|
|
||||||
|
|
||||||
To pass additional options to CMake, give them as extra arguments
|
|
||||||
after a '--' For example, "west build -- -DOVERLAY_CONFIG=some.conf" sets
|
|
||||||
an overlay config file. (Doing this forces a CMake run.)'''
|
|
||||||
|
|
||||||
|
|
||||||
class Build(WestCommand):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super(Build, self).__init__(
|
|
||||||
'build',
|
|
||||||
BUILD_HELP,
|
|
||||||
accepts_unknown_args=False)
|
|
||||||
|
|
||||||
self.source_dir = None
|
|
||||||
'''Source directory for the build, or None on error.'''
|
|
||||||
|
|
||||||
self.build_dir = None
|
|
||||||
'''Final build directory used to run the build, or None on error.'''
|
|
||||||
|
|
||||||
self.created_build_dir = False
|
|
||||||
'''True if the build directory was created; False otherwise.'''
|
|
||||||
|
|
||||||
self.run_cmake = False
|
|
||||||
'''True if CMake was run; False otherwise.
|
|
||||||
|
|
||||||
Note: this only describes CMake runs done by this command. The
|
|
||||||
build system generated by CMake may also update itself due to
|
|
||||||
internal logic.'''
|
|
||||||
|
|
||||||
self.cmake_cache = None
|
|
||||||
'''Final parsed CMake cache for the build, or None on error.'''
|
|
||||||
|
|
||||||
def do_add_parser(self, parser_adder):
|
|
||||||
parser = parser_adder.add_parser(
|
|
||||||
self.name,
|
|
||||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
||||||
description=self.description)
|
|
||||||
|
|
||||||
# Remember to update scripts/west-completion.bash if you add or remove
|
|
||||||
# flags
|
|
||||||
|
|
||||||
parser.add_argument('-b', '--board',
|
|
||||||
help='''Board to build for (must be given for the
|
|
||||||
first build, can be omitted later)''')
|
|
||||||
parser.add_argument('-s', '--source-dir',
|
|
||||||
help='''Explicitly set the source directory.
|
|
||||||
If not given and rebuilding an existing Zephyr
|
|
||||||
build directory, this is taken from the CMake
|
|
||||||
cache. Otherwise, the current directory is
|
|
||||||
assumed.''')
|
|
||||||
parser.add_argument('-d', '--build-dir',
|
|
||||||
help='''Explicitly sets the build directory.
|
|
||||||
If not given and the current directory is a Zephyr
|
|
||||||
build directory, it will be used; otherwise, "{}"
|
|
||||||
is assumed. The directory will be created if
|
|
||||||
it doesn't exist.'''.format(DEFAULT_BUILD_DIR))
|
|
||||||
parser.add_argument('-t', '--target',
|
|
||||||
help='''Override the build system target (e.g.
|
|
||||||
'clean', 'pristine', etc.)''')
|
|
||||||
parser.add_argument('-c', '--cmake', action='store_true',
|
|
||||||
help='Force CMake to run')
|
|
||||||
parser.add_argument('-f', '--force', action='store_true',
|
|
||||||
help='Ignore any errors and try to build anyway')
|
|
||||||
parser.add_argument('cmake_opts', nargs='*', metavar='cmake_opt',
|
|
||||||
help='Extra option to pass to CMake; implies -c')
|
|
||||||
|
|
||||||
return parser
|
|
||||||
|
|
||||||
def do_run(self, args, ignored):
|
|
||||||
self.args = args # Avoid having to pass them around
|
|
||||||
log.dbg('args:', args, level=log.VERBOSE_EXTREME)
|
|
||||||
self._sanity_precheck()
|
|
||||||
self._setup_build_dir()
|
|
||||||
if is_zephyr_build(self.build_dir):
|
|
||||||
self._update_cache()
|
|
||||||
if self.args.cmake or self.args.cmake_opts:
|
|
||||||
self.run_cmake = True
|
|
||||||
else:
|
|
||||||
self.run_cmake = True
|
|
||||||
self._setup_source_dir()
|
|
||||||
self._sanity_check()
|
|
||||||
|
|
||||||
log.inf('source directory: {}'.format(self.source_dir), colorize=True)
|
|
||||||
log.inf('build directory: {}{}'.
|
|
||||||
format(self.build_dir,
|
|
||||||
(' (created)' if self.created_build_dir
|
|
||||||
else '')),
|
|
||||||
colorize=True)
|
|
||||||
if self.cmake_cache:
|
|
||||||
board = self.cmake_cache.get('CACHED_BOARD')
|
|
||||||
elif self.args.board:
|
|
||||||
board = self.args.board
|
|
||||||
else:
|
|
||||||
board = 'UNKNOWN' # shouldn't happen
|
|
||||||
log.inf('BOARD:', board, colorize=True)
|
|
||||||
|
|
||||||
self._run_cmake(self.args.cmake_opts)
|
|
||||||
self._sanity_check()
|
|
||||||
self._update_cache()
|
|
||||||
|
|
||||||
extra_args = ['--target', args.target] if args.target else []
|
|
||||||
cmake.run_build(self.build_dir, extra_args=extra_args)
|
|
||||||
|
|
||||||
def _sanity_precheck(self):
|
|
||||||
app = self.args.source_dir
|
|
||||||
if app:
|
|
||||||
if not os.path.isdir(app):
|
|
||||||
self._check_force('source directory {} does not exist'.
|
|
||||||
format(app))
|
|
||||||
elif 'CMakeLists.txt' not in os.listdir(app):
|
|
||||||
self._check_force("{} doesn't contain a CMakeLists.txt".
|
|
||||||
format(app))
|
|
||||||
|
|
||||||
def _update_cache(self):
|
|
||||||
try:
|
|
||||||
self.cmake_cache = cmake.CMakeCache.from_build_dir(self.build_dir)
|
|
||||||
except FileNotFoundError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _setup_build_dir(self):
|
|
||||||
# Initialize build_dir and created_build_dir attributes.
|
|
||||||
log.dbg('setting up build directory', level=log.VERBOSE_EXTREME)
|
|
||||||
if self.args.build_dir:
|
|
||||||
build_dir = self.args.build_dir
|
|
||||||
else:
|
|
||||||
cwd = os.getcwd()
|
|
||||||
if is_zephyr_build(cwd):
|
|
||||||
build_dir = cwd
|
|
||||||
else:
|
|
||||||
build_dir = DEFAULT_BUILD_DIR
|
|
||||||
build_dir = os.path.abspath(build_dir)
|
|
||||||
|
|
||||||
if os.path.exists(build_dir):
|
|
||||||
if not os.path.isdir(build_dir):
|
|
||||||
log.die('build directory {} exists and is not a directory'.
|
|
||||||
format(build_dir))
|
|
||||||
else:
|
|
||||||
os.makedirs(build_dir, exist_ok=False)
|
|
||||||
self.created_build_dir = True
|
|
||||||
self.run_cmake = True
|
|
||||||
|
|
||||||
self.build_dir = build_dir
|
|
||||||
|
|
||||||
def _setup_source_dir(self):
|
|
||||||
# Initialize source_dir attribute, either from command line argument,
|
|
||||||
# implicitly from the build directory's CMake cache, or using the
|
|
||||||
# default (current working directory).
|
|
||||||
log.dbg('setting up source directory', level=log.VERBOSE_EXTREME)
|
|
||||||
if self.args.source_dir:
|
|
||||||
source_dir = self.args.source_dir
|
|
||||||
elif self.cmake_cache:
|
|
||||||
source_dir = self.cmake_cache.get('APPLICATION_SOURCE_DIR')
|
|
||||||
if not source_dir:
|
|
||||||
# Maybe Zephyr changed the key? Give the user a way
|
|
||||||
# to retry, at least.
|
|
||||||
log.die("can't determine application from build directory "
|
|
||||||
"{}, please specify an application to build".
|
|
||||||
format(self.build_dir))
|
|
||||||
else:
|
|
||||||
source_dir = os.getcwd()
|
|
||||||
self.source_dir = os.path.abspath(source_dir)
|
|
||||||
|
|
||||||
def _sanity_check(self):
|
|
||||||
# Sanity check the build configuration.
|
|
||||||
# Side effect: may update cmake_cache attribute.
|
|
||||||
log.dbg('sanity checking the build', level=log.VERBOSE_EXTREME)
|
|
||||||
if self.source_dir == self.build_dir:
|
|
||||||
# There's no forcing this.
|
|
||||||
log.die('source and build directory {} cannot be the same; '
|
|
||||||
'use --build-dir {} to specify a build directory'.
|
|
||||||
format(self.source_dir, self.build_dir))
|
|
||||||
|
|
||||||
srcrel = os.path.relpath(self.source_dir)
|
|
||||||
if is_zephyr_build(self.source_dir):
|
|
||||||
self._check_force('it looks like {srcrel} is a build directory: '
|
|
||||||
'did you mean -build-dir {srcrel} instead?'.
|
|
||||||
format(srcrel=srcrel))
|
|
||||||
elif 'CMakeLists.txt' not in os.listdir(self.source_dir):
|
|
||||||
self._check_force('source directory "{srcrel}" does not contain '
|
|
||||||
'a CMakeLists.txt; is that really what you '
|
|
||||||
'want to build? (Use -s SOURCE_DIR to specify '
|
|
||||||
'the application source directory)'.
|
|
||||||
format(srcrel=srcrel))
|
|
||||||
|
|
||||||
if not is_zephyr_build(self.build_dir) and not self.args.board:
|
|
||||||
self._check_force('this looks like a new or clean build, '
|
|
||||||
'please provide --board')
|
|
||||||
|
|
||||||
if not self.cmake_cache:
|
|
||||||
return # That's all we can check without a cache.
|
|
||||||
|
|
||||||
cached_app = self.cmake_cache.get('APPLICATION_SOURCE_DIR')
|
|
||||||
log.dbg('APPLICATION_SOURCE_DIR:', cached_app,
|
|
||||||
level=log.VERBOSE_EXTREME)
|
|
||||||
source_abs = (os.path.abspath(self.args.source_dir)
|
|
||||||
if self.args.source_dir else None)
|
|
||||||
cached_abs = os.path.abspath(cached_app) if cached_app else None
|
|
||||||
if cached_abs and source_abs and source_abs != cached_abs:
|
|
||||||
self._check_force('build directory "{}" is for application "{}", '
|
|
||||||
'but source directory "{}" was specified; '
|
|
||||||
'please clean it or use --build-dir to set '
|
|
||||||
'another build directory'.
|
|
||||||
format(self.build_dir, cached_abs,
|
|
||||||
source_abs))
|
|
||||||
self.run_cmake = True # If they insist, we need to re-run cmake.
|
|
||||||
|
|
||||||
cached_board = self.cmake_cache.get('CACHED_BOARD')
|
|
||||||
log.dbg('CACHED_BOARD:', cached_board, level=log.VERBOSE_EXTREME)
|
|
||||||
if not cached_board and not self.args.board:
|
|
||||||
if self.created_build_dir:
|
|
||||||
self._check_force(
|
|
||||||
'Building for the first time: you must provide --board')
|
|
||||||
else:
|
|
||||||
self._check_force(
|
|
||||||
'Board is missing or unknown, please provide --board')
|
|
||||||
if self.args.board and cached_board and \
|
|
||||||
self.args.board != cached_board:
|
|
||||||
self._check_force('Build directory {} targets board {}, '
|
|
||||||
'but board {} was specified. (Clean that '
|
|
||||||
'directory or use --build-dir to specify '
|
|
||||||
'a different one.)'.
|
|
||||||
format(self.build_dir, cached_board,
|
|
||||||
self.args.board))
|
|
||||||
|
|
||||||
def _check_force(self, msg):
|
|
||||||
if not self.args.force:
|
|
||||||
log.err(msg)
|
|
||||||
log.die('refusing to proceed without --force due to above error')
|
|
||||||
|
|
||||||
def _run_cmake(self, cmake_opts):
|
|
||||||
if not self.run_cmake:
|
|
||||||
log.dbg('not running cmake; build system is present')
|
|
||||||
return
|
|
||||||
|
|
||||||
# It's unfortunate to have to use the undocumented -B and -H
|
|
||||||
# options to set the source and binary directories.
|
|
||||||
#
|
|
||||||
# However, it's the only known way to set that directory and
|
|
||||||
# run CMake from the current working directory. This is
|
|
||||||
# important because users expect invocations like this to Just
|
|
||||||
# Work:
|
|
||||||
#
|
|
||||||
# west build -- -DOVERLAY_CONFIG=relative-path.conf
|
|
||||||
final_cmake_args = ['-B{}'.format(self.build_dir),
|
|
||||||
'-H{}'.format(self.source_dir),
|
|
||||||
'-G{}'.format(DEFAULT_CMAKE_GENERATOR)]
|
|
||||||
if self.args.board:
|
|
||||||
final_cmake_args.append('-DBOARD={}'.format(self.args.board))
|
|
||||||
if cmake_opts:
|
|
||||||
final_cmake_args.extend(cmake_opts)
|
|
||||||
cmake.run_cmake(final_cmake_args)
|
|
|
@ -1,70 +0,0 @@
|
||||||
# Copyright (c) 2018 Open Source Foundries Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''west "debug" and "debugserver" commands.'''
|
|
||||||
|
|
||||||
from textwrap import dedent
|
|
||||||
|
|
||||||
from west.commands.run_common import desc_common, add_parser_common, \
|
|
||||||
do_run_common
|
|
||||||
from west.commands import WestCommand
|
|
||||||
|
|
||||||
|
|
||||||
class Debug(WestCommand):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super(Debug, self).__init__(
|
|
||||||
'debug',
|
|
||||||
dedent('''
|
|
||||||
Connect to the board, program the flash, and start a
|
|
||||||
debugging session.\n\n''') +
|
|
||||||
desc_common('debug'),
|
|
||||||
accepts_unknown_args=True)
|
|
||||||
|
|
||||||
def do_add_parser(self, parser_adder):
|
|
||||||
return add_parser_common(parser_adder, self)
|
|
||||||
|
|
||||||
def do_run(self, my_args, runner_args):
|
|
||||||
do_run_common(self, my_args, runner_args,
|
|
||||||
'ZEPHYR_BOARD_DEBUG_RUNNER')
|
|
||||||
|
|
||||||
|
|
||||||
class DebugServer(WestCommand):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super(DebugServer, self).__init__(
|
|
||||||
'debugserver',
|
|
||||||
dedent('''
|
|
||||||
Connect to the board and accept debug networking connections.
|
|
||||||
|
|
||||||
The debug server binds to a known port, and allows client software
|
|
||||||
started elsewhere to connect to it and debug the running
|
|
||||||
Zephyr image.\n\n''') +
|
|
||||||
desc_common('debugserver'),
|
|
||||||
accepts_unknown_args=True)
|
|
||||||
|
|
||||||
def do_add_parser(self, parser_adder):
|
|
||||||
return add_parser_common(parser_adder, self)
|
|
||||||
|
|
||||||
def do_run(self, my_args, runner_args):
|
|
||||||
do_run_common(self, my_args, runner_args,
|
|
||||||
'ZEPHYR_BOARD_DEBUG_RUNNER')
|
|
||||||
|
|
||||||
class Attach(WestCommand):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super(Attach, self).__init__(
|
|
||||||
'attach',
|
|
||||||
dedent('''
|
|
||||||
Connect to the board without programming the flash, and
|
|
||||||
start a debugging session.\n\n''') +
|
|
||||||
desc_common('attach'),
|
|
||||||
accepts_unknown_args=True)
|
|
||||||
|
|
||||||
def do_add_parser(self, parser_adder):
|
|
||||||
return add_parser_common(parser_adder, self)
|
|
||||||
|
|
||||||
def do_run(self, my_args, runner_args):
|
|
||||||
do_run_common(self, my_args, runner_args,
|
|
||||||
'ZEPHYR_BOARD_DEBUG_RUNNER')
|
|
|
@ -1,26 +0,0 @@
|
||||||
# Copyright (c) 2018 Open Source Foundries Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''west "flash" command'''
|
|
||||||
|
|
||||||
from west.commands.run_common import desc_common, add_parser_common, \
|
|
||||||
do_run_common
|
|
||||||
from west.commands import WestCommand
|
|
||||||
|
|
||||||
|
|
||||||
class Flash(WestCommand):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
super(Flash, self).__init__(
|
|
||||||
'flash',
|
|
||||||
'Flash and run a binary on a board.\n\n' +
|
|
||||||
desc_common('flash'),
|
|
||||||
accepts_unknown_args=True)
|
|
||||||
|
|
||||||
def do_add_parser(self, parser_adder):
|
|
||||||
return add_parser_common(parser_adder, self)
|
|
||||||
|
|
||||||
def do_run(self, my_args, runner_args):
|
|
||||||
do_run_common(self, my_args, runner_args,
|
|
||||||
'ZEPHYR_BOARD_FLASH_RUNNER')
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,452 +0,0 @@
|
||||||
# Copyright (c) 2018 Open Source Foundries Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''Common code used by commands which execute runners.
|
|
||||||
'''
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
from os import getcwd, path
|
|
||||||
from subprocess import CalledProcessError
|
|
||||||
import textwrap
|
|
||||||
|
|
||||||
from west import cmake
|
|
||||||
from west import log
|
|
||||||
from west import util
|
|
||||||
from west.build import DEFAULT_BUILD_DIR, is_zephyr_build
|
|
||||||
from west.runners import get_runner_cls, ZephyrBinaryRunner
|
|
||||||
from west.runners.core import RunnerConfig
|
|
||||||
from west.commands import CommandContextError
|
|
||||||
|
|
||||||
# Context-sensitive help indentation.
|
|
||||||
# Don't change this, or output from argparse won't match up.
|
|
||||||
INDENT = ' ' * 2
|
|
||||||
|
|
||||||
|
|
||||||
def add_parser_common(parser_adder, command):
|
|
||||||
parser = parser_adder.add_parser(
|
|
||||||
command.name,
|
|
||||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
||||||
description=command.description)
|
|
||||||
|
|
||||||
# Remember to update scripts/west-completion.bash if you add or remove
|
|
||||||
# flags
|
|
||||||
|
|
||||||
parser.add_argument('-H', '--context', action='store_true',
|
|
||||||
help='''Rebuild application and print context-sensitive
|
|
||||||
help; this may be combined with --runner to restrict
|
|
||||||
output to a given runner.''')
|
|
||||||
|
|
||||||
group = parser.add_argument_group(title='General Options')
|
|
||||||
|
|
||||||
group.add_argument('-d', '--build-dir',
|
|
||||||
help='''Build directory to obtain runner information
|
|
||||||
from. If not given, this command tries to use build/
|
|
||||||
and then the current working directory, in that
|
|
||||||
order.''')
|
|
||||||
group.add_argument('-c', '--cmake-cache',
|
|
||||||
help='''Path to CMake cache file containing runner
|
|
||||||
configuration (this is generated by the Zephyr
|
|
||||||
build system when compiling binaries);
|
|
||||||
default: {}.
|
|
||||||
|
|
||||||
If this is a relative path, it is assumed relative to
|
|
||||||
the build directory. An absolute path can also be
|
|
||||||
given instead.'''.format(cmake.DEFAULT_CACHE))
|
|
||||||
group.add_argument('-r', '--runner',
|
|
||||||
help='''If given, overrides any cached {}
|
|
||||||
runner.'''.format(command.name))
|
|
||||||
group.add_argument('--skip-rebuild', action='store_true',
|
|
||||||
help='''If given, do not rebuild the application
|
|
||||||
before running {} commands.'''.format(command.name))
|
|
||||||
|
|
||||||
group = parser.add_argument_group(
|
|
||||||
title='Configuration overrides',
|
|
||||||
description=textwrap.dedent('''\
|
|
||||||
These values usually come from the Zephyr build system itself
|
|
||||||
as stored in the CMake cache; providing these options
|
|
||||||
overrides those settings.'''))
|
|
||||||
|
|
||||||
# Important:
|
|
||||||
#
|
|
||||||
# 1. The destination variables of these options must match
|
|
||||||
# the RunnerConfig slots.
|
|
||||||
# 2. The default values for all of these must be None.
|
|
||||||
#
|
|
||||||
# This is how we detect if the user provided them or not when
|
|
||||||
# overriding values from the cached configuration.
|
|
||||||
|
|
||||||
command_verb = "flash" if command == "flash" else "debug"
|
|
||||||
|
|
||||||
group.add_argument('--board-dir',
|
|
||||||
help='Zephyr board directory')
|
|
||||||
group.add_argument('--elf-file',
|
|
||||||
help='Path to elf file to {0}'.format(command_verb))
|
|
||||||
group.add_argument('--hex-file',
|
|
||||||
help='Path to hex file to {0}'.format(command_verb))
|
|
||||||
group.add_argument('--bin-file',
|
|
||||||
help='Path to binary file to {0}'.format(command_verb))
|
|
||||||
group.add_argument('--gdb',
|
|
||||||
help='Path to GDB, if applicable')
|
|
||||||
group.add_argument('--openocd',
|
|
||||||
help='Path to OpenOCD, if applicable')
|
|
||||||
group.add_argument(
|
|
||||||
'--openocd-search',
|
|
||||||
help='Path to add to OpenOCD search path, if applicable')
|
|
||||||
|
|
||||||
return parser
|
|
||||||
|
|
||||||
|
|
||||||
def desc_common(command_name):
|
|
||||||
return textwrap.dedent('''\
|
|
||||||
Any options not recognized by this command are passed to the
|
|
||||||
back-end {command} runner (run "west {command} --context"
|
|
||||||
for help on available runner-specific options).
|
|
||||||
|
|
||||||
If you need to pass an option to a runner which has the
|
|
||||||
same name as one recognized by this command, you can
|
|
||||||
end argument parsing with a '--', like so:
|
|
||||||
|
|
||||||
west {command} --{command}-arg=value -- --runner-arg=value2
|
|
||||||
'''.format(**{'command': command_name}))
|
|
||||||
|
|
||||||
|
|
||||||
def cached_runner_config(build_dir, cache):
|
|
||||||
'''Parse the RunnerConfig from a build directory and CMake Cache.'''
|
|
||||||
board_dir = cache['ZEPHYR_RUNNER_CONFIG_BOARD_DIR']
|
|
||||||
elf_file = cache.get('ZEPHYR_RUNNER_CONFIG_ELF_FILE',
|
|
||||||
cache['ZEPHYR_RUNNER_CONFIG_KERNEL_ELF'])
|
|
||||||
hex_file = cache.get('ZEPHYR_RUNNER_CONFIG_HEX_FILE',
|
|
||||||
cache['ZEPHYR_RUNNER_CONFIG_KERNEL_HEX'])
|
|
||||||
bin_file = cache.get('ZEPHYR_RUNNER_CONFIG_BIN_FILE',
|
|
||||||
cache['ZEPHYR_RUNNER_CONFIG_KERNEL_BIN'])
|
|
||||||
gdb = cache.get('ZEPHYR_RUNNER_CONFIG_GDB')
|
|
||||||
openocd = cache.get('ZEPHYR_RUNNER_CONFIG_OPENOCD')
|
|
||||||
openocd_search = cache.get('ZEPHYR_RUNNER_CONFIG_OPENOCD_SEARCH')
|
|
||||||
|
|
||||||
return RunnerConfig(build_dir, board_dir,
|
|
||||||
elf_file, hex_file, bin_file,
|
|
||||||
gdb=gdb, openocd=openocd,
|
|
||||||
openocd_search=openocd_search)
|
|
||||||
|
|
||||||
|
|
||||||
def _override_config_from_namespace(cfg, namespace):
|
|
||||||
'''Override a RunnerConfig's contents with command-line values.'''
|
|
||||||
for var in cfg.__slots__:
|
|
||||||
if var in namespace:
|
|
||||||
val = getattr(namespace, var)
|
|
||||||
if val is not None:
|
|
||||||
setattr(cfg, var, val)
|
|
||||||
|
|
||||||
|
|
||||||
def _build_dir(args, die_if_none=True):
|
|
||||||
# Get the build directory for the given argument list and environment.
|
|
||||||
if args.build_dir:
|
|
||||||
return args.build_dir
|
|
||||||
|
|
||||||
cwd = getcwd()
|
|
||||||
default = path.join(cwd, DEFAULT_BUILD_DIR)
|
|
||||||
if is_zephyr_build(default):
|
|
||||||
return default
|
|
||||||
elif is_zephyr_build(cwd):
|
|
||||||
return cwd
|
|
||||||
elif die_if_none:
|
|
||||||
log.die('--build-dir was not given, and neither {} '
|
|
||||||
'nor {} are zephyr build directories.'.
|
|
||||||
format(default, cwd))
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def do_run_common(command, args, runner_args, cached_runner_var):
|
|
||||||
if args.context:
|
|
||||||
_dump_context(command, args, runner_args, cached_runner_var)
|
|
||||||
return
|
|
||||||
|
|
||||||
command_name = command.name
|
|
||||||
build_dir = _build_dir(args)
|
|
||||||
|
|
||||||
if not args.skip_rebuild:
|
|
||||||
try:
|
|
||||||
cmake.run_build(build_dir)
|
|
||||||
except CalledProcessError:
|
|
||||||
if args.build_dir:
|
|
||||||
log.die('cannot run {}, build in {} failed'.format(
|
|
||||||
command_name, args.build_dir))
|
|
||||||
else:
|
|
||||||
log.die('cannot run {}; no --build-dir given and build in '
|
|
||||||
'current directory {} failed'.format(command_name,
|
|
||||||
build_dir))
|
|
||||||
|
|
||||||
# Runner creation, phase 1.
|
|
||||||
#
|
|
||||||
# Get the default runner name from the cache, allowing a command
|
|
||||||
# line override. Get the ZephyrBinaryRunner class by name, and
|
|
||||||
# make sure it supports the command.
|
|
||||||
|
|
||||||
cache_file = path.join(build_dir, args.cmake_cache or cmake.DEFAULT_CACHE)
|
|
||||||
cache = cmake.CMakeCache(cache_file)
|
|
||||||
board = cache['CACHED_BOARD']
|
|
||||||
available = cache.get_list('ZEPHYR_RUNNERS')
|
|
||||||
if not available:
|
|
||||||
log.wrn('No cached runners are available in', cache_file)
|
|
||||||
runner = args.runner or cache.get(cached_runner_var)
|
|
||||||
|
|
||||||
if runner is None:
|
|
||||||
raise CommandContextError(textwrap.dedent("""
|
|
||||||
No {} runner available for {}. Please either specify one
|
|
||||||
manually, or check your board's documentation for
|
|
||||||
alternative instructions.""".format(command_name, board)))
|
|
||||||
|
|
||||||
log.inf('Using runner:', runner)
|
|
||||||
if runner not in available:
|
|
||||||
log.wrn('Runner {} is not configured for use with {}, '
|
|
||||||
'this may not work'.format(runner, board))
|
|
||||||
runner_cls = get_runner_cls(runner)
|
|
||||||
if command_name not in runner_cls.capabilities().commands:
|
|
||||||
log.die('Runner {} does not support command {}'.format(
|
|
||||||
runner, command_name))
|
|
||||||
|
|
||||||
# Runner creation, phase 2.
|
|
||||||
#
|
|
||||||
# At this point, the common options above are already parsed in
|
|
||||||
# 'args', and unrecognized arguments are in 'runner_args'.
|
|
||||||
#
|
|
||||||
# - Pull the RunnerConfig out of the cache
|
|
||||||
# - Override cached values with applicable command-line options
|
|
||||||
|
|
||||||
cfg = cached_runner_config(build_dir, cache)
|
|
||||||
_override_config_from_namespace(cfg, args)
|
|
||||||
|
|
||||||
# Runner creation, phase 3.
|
|
||||||
#
|
|
||||||
# - Pull out cached runner arguments, and append command-line
|
|
||||||
# values (which should override the cache)
|
|
||||||
# - Construct a runner-specific argument parser to handle cached
|
|
||||||
# values plus overrides given in runner_args
|
|
||||||
# - Parse arguments and create runner instance from final
|
|
||||||
# RunnerConfig and parsed arguments.
|
|
||||||
|
|
||||||
cached_runner_args = cache.get_list(
|
|
||||||
'ZEPHYR_RUNNER_ARGS_{}'.format(cmake.make_c_identifier(runner)))
|
|
||||||
assert isinstance(runner_args, list), runner_args
|
|
||||||
# If the user passed -- to force the parent argument parser to stop
|
|
||||||
# parsing, it will show up here, and needs to be filtered out.
|
|
||||||
runner_args = [arg for arg in runner_args if arg != '--']
|
|
||||||
final_runner_args = cached_runner_args + runner_args
|
|
||||||
parser = argparse.ArgumentParser(prog=runner)
|
|
||||||
runner_cls.add_parser(parser)
|
|
||||||
parsed_args, unknown = parser.parse_known_args(args=final_runner_args)
|
|
||||||
if unknown:
|
|
||||||
raise CommandContextError('Runner', runner,
|
|
||||||
'received unknown arguments', unknown)
|
|
||||||
runner = runner_cls.create(cfg, parsed_args)
|
|
||||||
runner.run(command_name)
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# Context-specific help
|
|
||||||
#
|
|
||||||
|
|
||||||
def _dump_context(command, args, runner_args, cached_runner_var):
|
|
||||||
build_dir = _build_dir(args, die_if_none=False)
|
|
||||||
|
|
||||||
# Try to figure out the CMake cache file based on the build
|
|
||||||
# directory or an explicit argument.
|
|
||||||
if build_dir is not None:
|
|
||||||
cache_file = path.abspath(
|
|
||||||
path.join(build_dir, args.cmake_cache or cmake.DEFAULT_CACHE))
|
|
||||||
elif args.cmake_cache:
|
|
||||||
cache_file = path.abspath(args.cmake_cache)
|
|
||||||
else:
|
|
||||||
cache_file = None
|
|
||||||
|
|
||||||
# Load the cache itself, if possible.
|
|
||||||
if cache_file is None:
|
|
||||||
log.wrn('No build directory (--build-dir) or CMake cache '
|
|
||||||
'(--cache-file) given or found; output will be limited')
|
|
||||||
cache = None
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
cache = cmake.CMakeCache(cache_file)
|
|
||||||
except Exception:
|
|
||||||
log.die('Cannot load cache {}.'.format(cache_file))
|
|
||||||
|
|
||||||
# If we have a build directory, try to ensure build artifacts are
|
|
||||||
# up to date. If that doesn't work, still try to print information
|
|
||||||
# on a best-effort basis.
|
|
||||||
if build_dir and not args.skip_rebuild:
|
|
||||||
try:
|
|
||||||
cmake.run_build(build_dir)
|
|
||||||
except CalledProcessError:
|
|
||||||
msg = 'Failed re-building application; cannot load context. '
|
|
||||||
if args.build_dir:
|
|
||||||
msg += 'Is {} the right --build-dir?'.format(args.build_dir)
|
|
||||||
else:
|
|
||||||
msg += textwrap.dedent('''\
|
|
||||||
Use --build-dir (-d) to specify a build directory; the one
|
|
||||||
used was {}.'''.format(build_dir))
|
|
||||||
log.die('\n'.join(textwrap.wrap(msg, initial_indent='',
|
|
||||||
subsequent_indent=INDENT,
|
|
||||||
break_on_hyphens=False)))
|
|
||||||
|
|
||||||
if cache is None:
|
|
||||||
_dump_no_context_info(command, args)
|
|
||||||
if not args.runner:
|
|
||||||
return
|
|
||||||
|
|
||||||
if args.runner:
|
|
||||||
# Just information on one runner was requested.
|
|
||||||
_dump_one_runner_info(cache, args, build_dir, INDENT)
|
|
||||||
return
|
|
||||||
|
|
||||||
board = cache['CACHED_BOARD']
|
|
||||||
|
|
||||||
all_cls = {cls.name(): cls for cls in ZephyrBinaryRunner.get_runners() if
|
|
||||||
command.name in cls.capabilities().commands}
|
|
||||||
available = [r for r in cache.get_list('ZEPHYR_RUNNERS') if r in all_cls]
|
|
||||||
available_cls = {r: all_cls[r] for r in available if r in all_cls}
|
|
||||||
|
|
||||||
default_runner = cache.get(cached_runner_var)
|
|
||||||
cfg = cached_runner_config(build_dir, cache)
|
|
||||||
|
|
||||||
log.inf('All Zephyr runners which support {}:'.format(command.name),
|
|
||||||
colorize=True)
|
|
||||||
for line in util.wrap(', '.join(all_cls.keys()), INDENT):
|
|
||||||
log.inf(line)
|
|
||||||
log.inf('(Not all may work with this build, see available runners below.)',
|
|
||||||
colorize=True)
|
|
||||||
|
|
||||||
if cache is None:
|
|
||||||
log.warn('Missing or invalid CMake cache {}; there is no context.',
|
|
||||||
'Use --build-dir to specify the build directory.')
|
|
||||||
return
|
|
||||||
|
|
||||||
log.inf('Build directory:', colorize=True)
|
|
||||||
log.inf(INDENT + build_dir)
|
|
||||||
log.inf('Board:', colorize=True)
|
|
||||||
log.inf(INDENT + board)
|
|
||||||
log.inf('CMake cache:', colorize=True)
|
|
||||||
log.inf(INDENT + cache_file)
|
|
||||||
|
|
||||||
if not available:
|
|
||||||
# Bail with a message if no runners are available.
|
|
||||||
msg = ('No runners available for {}. '
|
|
||||||
'Consult the documentation for instructions on how to run '
|
|
||||||
'binaries on this target.').format(board)
|
|
||||||
for line in util.wrap(msg, ''):
|
|
||||||
log.inf(line, colorize=True)
|
|
||||||
return
|
|
||||||
|
|
||||||
log.inf('Available {} runners:'.format(command.name), colorize=True)
|
|
||||||
log.inf(INDENT + ', '.join(available))
|
|
||||||
log.inf('Additional options for available', command.name, 'runners:',
|
|
||||||
colorize=True)
|
|
||||||
for runner in available:
|
|
||||||
_dump_runner_opt_help(runner, all_cls[runner])
|
|
||||||
log.inf('Default {} runner:'.format(command.name), colorize=True)
|
|
||||||
log.inf(INDENT + default_runner)
|
|
||||||
_dump_runner_config(cfg, '', INDENT)
|
|
||||||
log.inf('Runner-specific information:', colorize=True)
|
|
||||||
for runner in available:
|
|
||||||
log.inf('{}{}:'.format(INDENT, runner), colorize=True)
|
|
||||||
_dump_runner_cached_opts(cache, runner, INDENT * 2, INDENT * 3)
|
|
||||||
_dump_runner_caps(available_cls[runner], INDENT * 2)
|
|
||||||
|
|
||||||
if len(available) > 1:
|
|
||||||
log.inf('(Add -r RUNNER to just print information about one runner.)',
|
|
||||||
colorize=True)
|
|
||||||
|
|
||||||
|
|
||||||
def _dump_no_context_info(command, args):
|
|
||||||
all_cls = {cls.name(): cls for cls in ZephyrBinaryRunner.get_runners() if
|
|
||||||
command.name in cls.capabilities().commands}
|
|
||||||
log.inf('All Zephyr runners which support {}:'.format(command.name),
|
|
||||||
colorize=True)
|
|
||||||
for line in util.wrap(', '.join(all_cls.keys()), INDENT):
|
|
||||||
log.inf(line)
|
|
||||||
if not args.runner:
|
|
||||||
log.inf('Add -r RUNNER to print more information about any runner.',
|
|
||||||
colorize=True)
|
|
||||||
|
|
||||||
|
|
||||||
def _dump_one_runner_info(cache, args, build_dir, indent):
|
|
||||||
runner = args.runner
|
|
||||||
cls = get_runner_cls(runner)
|
|
||||||
|
|
||||||
if cache is None:
|
|
||||||
_dump_runner_opt_help(runner, cls)
|
|
||||||
_dump_runner_caps(cls, '')
|
|
||||||
return
|
|
||||||
|
|
||||||
available = runner in cache.get_list('ZEPHYR_RUNNERS')
|
|
||||||
cfg = cached_runner_config(build_dir, cache)
|
|
||||||
|
|
||||||
log.inf('Build directory:', colorize=True)
|
|
||||||
log.inf(INDENT + build_dir)
|
|
||||||
log.inf('Board:', colorize=True)
|
|
||||||
log.inf(INDENT + cache['CACHED_BOARD'])
|
|
||||||
log.inf('CMake cache:', colorize=True)
|
|
||||||
log.inf(INDENT + cache.cache_file)
|
|
||||||
log.inf(runner, 'is available:', 'yes' if available else 'no',
|
|
||||||
colorize=True)
|
|
||||||
_dump_runner_opt_help(runner, cls)
|
|
||||||
_dump_runner_config(cfg, '', indent)
|
|
||||||
if available:
|
|
||||||
_dump_runner_cached_opts(cache, runner, '', indent)
|
|
||||||
_dump_runner_caps(cls, '')
|
|
||||||
if not available:
|
|
||||||
log.wrn('Runner', runner, 'is not configured in this build.')
|
|
||||||
|
|
||||||
|
|
||||||
def _dump_runner_caps(cls, base_indent):
|
|
||||||
log.inf('{}Capabilities:'.format(base_indent), colorize=True)
|
|
||||||
log.inf('{}{}'.format(base_indent + INDENT, cls.capabilities()))
|
|
||||||
|
|
||||||
|
|
||||||
def _dump_runner_opt_help(runner, cls):
|
|
||||||
# Construct and print the usage text
|
|
||||||
dummy_parser = argparse.ArgumentParser(prog='', add_help=False)
|
|
||||||
cls.add_parser(dummy_parser)
|
|
||||||
formatter = dummy_parser._get_formatter()
|
|
||||||
for group in dummy_parser._action_groups:
|
|
||||||
# Break the abstraction to filter out the 'flash', 'debug', etc.
|
|
||||||
# TODO: come up with something cleaner (may require changes
|
|
||||||
# in the runner core).
|
|
||||||
actions = group._group_actions
|
|
||||||
if len(actions) == 1 and actions[0].dest == 'command':
|
|
||||||
# This is the lone positional argument. Skip it.
|
|
||||||
continue
|
|
||||||
formatter.start_section('REMOVE ME')
|
|
||||||
formatter.add_text(group.description)
|
|
||||||
formatter.add_arguments(actions)
|
|
||||||
formatter.end_section()
|
|
||||||
# Get the runner help, with the "REMOVE ME" string gone
|
|
||||||
runner_help = '\n'.join(formatter.format_help().splitlines()[1:])
|
|
||||||
|
|
||||||
log.inf('{} options:'.format(runner), colorize=True)
|
|
||||||
log.inf(runner_help)
|
|
||||||
|
|
||||||
|
|
||||||
def _dump_runner_config(cfg, initial_indent, subsequent_indent):
|
|
||||||
log.inf('{}Cached common runner configuration:'.format(initial_indent),
|
|
||||||
colorize=True)
|
|
||||||
for var in cfg.__slots__:
|
|
||||||
log.inf('{}--{}={}'.format(subsequent_indent, var, getattr(cfg, var)))
|
|
||||||
|
|
||||||
|
|
||||||
def _dump_runner_cached_opts(cache, runner, initial_indent, subsequent_indent):
|
|
||||||
runner_args = _get_runner_args(cache, runner)
|
|
||||||
if not runner_args:
|
|
||||||
return
|
|
||||||
|
|
||||||
log.inf('{}Cached runner-specific options:'.format(initial_indent),
|
|
||||||
colorize=True)
|
|
||||||
for arg in runner_args:
|
|
||||||
log.inf('{}{}'.format(subsequent_indent, arg))
|
|
||||||
|
|
||||||
|
|
||||||
def _get_runner_args(cache, runner):
|
|
||||||
runner_ident = cmake.make_c_identifier(runner)
|
|
||||||
args_var = 'ZEPHYR_RUNNER_ARGS_{}'.format(runner_ident)
|
|
||||||
return cache.get_list(args_var)
|
|
|
@ -1,95 +0,0 @@
|
||||||
# Copyright (c) 2018, Nordic Semiconductor ASA
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''
|
|
||||||
Configuration file handling, using the standard configparser module.
|
|
||||||
'''
|
|
||||||
|
|
||||||
import configparser
|
|
||||||
import os
|
|
||||||
import platform
|
|
||||||
|
|
||||||
from west.util import west_dir
|
|
||||||
|
|
||||||
|
|
||||||
# Configuration values.
|
|
||||||
#
|
|
||||||
# Initially empty, populated in read_config(). Always having this available is
|
|
||||||
# nice in case something checks configuration values before the configuration
|
|
||||||
# file has been read (e.g. the log.py functions, to check color settings, and
|
|
||||||
# tests).
|
|
||||||
config = configparser.ConfigParser()
|
|
||||||
|
|
||||||
|
|
||||||
def read_config():
|
|
||||||
'''
|
|
||||||
Reads all configuration files, making the configuration values available as
|
|
||||||
a configparser.ConfigParser object in config.config. This object works
|
|
||||||
similarly to a dictionary: config.config['foo']['bar'] gets the value for
|
|
||||||
key 'bar' in section 'foo'.
|
|
||||||
|
|
||||||
Git conventions for configuration file locations are used. See the FILES
|
|
||||||
section in the git-config(1) man page.
|
|
||||||
|
|
||||||
The following configuration files are read.
|
|
||||||
|
|
||||||
System-wide:
|
|
||||||
|
|
||||||
Linux: /etc/westconfig
|
|
||||||
Mac OS: /usr/local/etc/westconfig
|
|
||||||
Windows: %PROGRAMDATA%\\west\\config
|
|
||||||
|
|
||||||
User-specific:
|
|
||||||
|
|
||||||
$XDG_CONFIG_HOME/west/config (on Linux)
|
|
||||||
and
|
|
||||||
~/.westconfig
|
|
||||||
|
|
||||||
($XDG_CONFIG_DIR defaults to ~/.config/ if unset.)
|
|
||||||
|
|
||||||
Instance-specific:
|
|
||||||
|
|
||||||
<West base directory>/west/config
|
|
||||||
|
|
||||||
Configuration values from later configuration files override configuration
|
|
||||||
from earlier ones. Instance-specific configuration values have the highest
|
|
||||||
precedence, and system-wide the lowest.
|
|
||||||
'''
|
|
||||||
|
|
||||||
# Gather (potential) configuration file paths
|
|
||||||
|
|
||||||
# System-wide and user-specific
|
|
||||||
|
|
||||||
if platform.system() == 'Linux':
|
|
||||||
# Probably wouldn't hurt to check $XDG_CONFIG_HOME (defaults to
|
|
||||||
# ~/.config) on all systems. It's listed in git-config(1). People were
|
|
||||||
# iffy about it as of writing though.
|
|
||||||
files = ['/etc/westconfig',
|
|
||||||
os.path.join(os.environ.get('XDG_CONFIG_HOME',
|
|
||||||
os.path.expanduser('~/.config')),
|
|
||||||
'west', 'config')]
|
|
||||||
|
|
||||||
elif platform.system() == 'Darwin': # Mac OS
|
|
||||||
# This was seen on a local machine ($(prefix) = /usr/local)
|
|
||||||
files = ['/usr/local/etc/westconfig']
|
|
||||||
elif platform.system() == 'Windows':
|
|
||||||
# Seen on a local machine
|
|
||||||
files = [os.path.expandvars('%PROGRAMDATA%\\west\\config')]
|
|
||||||
|
|
||||||
files.append(os.path.expanduser('~/.westconfig'))
|
|
||||||
|
|
||||||
# Repository-specific
|
|
||||||
|
|
||||||
files.append(os.path.join(west_dir(), 'config'))
|
|
||||||
|
|
||||||
#
|
|
||||||
# Parse all existing configuration files
|
|
||||||
#
|
|
||||||
|
|
||||||
config.read(files, encoding='utf-8')
|
|
||||||
|
|
||||||
|
|
||||||
def use_colors():
|
|
||||||
# Convenience function for reading the color.ui setting
|
|
||||||
return config.getboolean('color', 'ui', fallback=True)
|
|
|
@ -1,105 +0,0 @@
|
||||||
# Copyright 2018 Open Source Foundries Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''Logging module for west
|
|
||||||
|
|
||||||
Provides common methods for logging messages to display to the user.'''
|
|
||||||
|
|
||||||
from west import config
|
|
||||||
|
|
||||||
import colorama
|
|
||||||
import sys
|
|
||||||
|
|
||||||
VERBOSE_NONE = 0
|
|
||||||
'''Base verbosity level (zero), no verbose messages printed.'''
|
|
||||||
|
|
||||||
VERBOSE_NORMAL = 1
|
|
||||||
'''Base verbosity level, some verbose messages printed.'''
|
|
||||||
|
|
||||||
VERBOSE_VERY = 2
|
|
||||||
'''Very verbose output messages will be printed.'''
|
|
||||||
|
|
||||||
VERBOSE_EXTREME = 3
|
|
||||||
'''Extremely verbose output messages will be printed.'''
|
|
||||||
|
|
||||||
VERBOSE = VERBOSE_NONE
|
|
||||||
'''Global verbosity level. VERBOSE_NONE is the default.'''
|
|
||||||
|
|
||||||
|
|
||||||
def set_verbosity(value):
|
|
||||||
'''Set the logging verbosity level.'''
|
|
||||||
global VERBOSE
|
|
||||||
VERBOSE = int(value)
|
|
||||||
|
|
||||||
|
|
||||||
def dbg(*args, level=VERBOSE_NORMAL):
|
|
||||||
'''Print a verbose debug logging message.
|
|
||||||
|
|
||||||
The message is only printed if level is at least the current
|
|
||||||
verbosity level.'''
|
|
||||||
if level > VERBOSE:
|
|
||||||
return
|
|
||||||
print(*args)
|
|
||||||
|
|
||||||
|
|
||||||
def inf(*args, colorize=False):
|
|
||||||
'''Print an informational message.
|
|
||||||
|
|
||||||
colorize (default: False):
|
|
||||||
If True, the message is printed in bright green if stdout is a terminal.
|
|
||||||
'''
|
|
||||||
|
|
||||||
if not config.use_colors():
|
|
||||||
colorize = False
|
|
||||||
|
|
||||||
# This approach colorizes any sep= and end= text too, as expected.
|
|
||||||
#
|
|
||||||
# colorama automatically strips the ANSI escapes when stdout isn't a
|
|
||||||
# terminal (by wrapping sys.stdout).
|
|
||||||
if colorize:
|
|
||||||
print(colorama.Fore.LIGHTGREEN_EX, end='')
|
|
||||||
|
|
||||||
print(*args)
|
|
||||||
|
|
||||||
if colorize:
|
|
||||||
_reset_colors(sys.stdout)
|
|
||||||
|
|
||||||
|
|
||||||
def wrn(*args):
|
|
||||||
'''Print a warning.'''
|
|
||||||
|
|
||||||
if config.use_colors():
|
|
||||||
print(colorama.Fore.LIGHTRED_EX, end='', file=sys.stderr)
|
|
||||||
|
|
||||||
print('WARNING: ', end='', file=sys.stderr)
|
|
||||||
print(*args, file=sys.stderr)
|
|
||||||
|
|
||||||
if config.use_colors():
|
|
||||||
_reset_colors(sys.stderr)
|
|
||||||
|
|
||||||
|
|
||||||
def err(*args, fatal=False):
|
|
||||||
'''Print an error.'''
|
|
||||||
|
|
||||||
if config.use_colors():
|
|
||||||
print(colorama.Fore.LIGHTRED_EX, end='', file=sys.stderr)
|
|
||||||
|
|
||||||
print('FATAL ERROR: ' if fatal else 'ERROR: ', end='', file=sys.stderr)
|
|
||||||
print(*args, file=sys.stderr)
|
|
||||||
|
|
||||||
if config.use_colors():
|
|
||||||
_reset_colors(sys.stderr)
|
|
||||||
|
|
||||||
|
|
||||||
def die(*args, exit_code=1):
|
|
||||||
'''Print a fatal error, and abort with the given exit code.'''
|
|
||||||
err(*args, fatal=True)
|
|
||||||
sys.exit(exit_code)
|
|
||||||
|
|
||||||
|
|
||||||
def _reset_colors(file):
|
|
||||||
# The flush=True avoids issues with unrelated output from commands (usually
|
|
||||||
# Git) becoming colorized, due to the final attribute reset ANSI escape
|
|
||||||
# getting line-buffered
|
|
||||||
print(colorama.Style.RESET_ALL, end='', file=file, flush=True)
|
|
|
@ -1,241 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# Copyright 2018 Open Source Foundries Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''Zephyr RTOS meta-tool (west) main module
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import colorama
|
|
||||||
from functools import partial
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from subprocess import CalledProcessError, check_output, DEVNULL
|
|
||||||
|
|
||||||
from west import log
|
|
||||||
from west import config
|
|
||||||
from west.commands import CommandContextError
|
|
||||||
from west.commands.build import Build
|
|
||||||
from west.commands.flash import Flash
|
|
||||||
from west.commands.debug import Debug, DebugServer, Attach
|
|
||||||
from west.commands.project import List, Clone, Fetch, Pull, Rebase, Branch, \
|
|
||||||
Checkout, Diff, Status, Update, ForAll, \
|
|
||||||
WestUpdated
|
|
||||||
from west.manifest import Manifest
|
|
||||||
from west.util import quote_sh_list, in_multirepo_install, west_dir
|
|
||||||
|
|
||||||
IN_MULTIREPO_INSTALL = in_multirepo_install(os.path.dirname(__file__))
|
|
||||||
|
|
||||||
BUILD_FLASH_COMMANDS = [
|
|
||||||
Build(),
|
|
||||||
Flash(),
|
|
||||||
Debug(),
|
|
||||||
DebugServer(),
|
|
||||||
Attach(),
|
|
||||||
]
|
|
||||||
|
|
||||||
PROJECT_COMMANDS = [
|
|
||||||
List(),
|
|
||||||
Clone(),
|
|
||||||
Fetch(),
|
|
||||||
Pull(),
|
|
||||||
Rebase(),
|
|
||||||
Branch(),
|
|
||||||
Checkout(),
|
|
||||||
Diff(),
|
|
||||||
Status(),
|
|
||||||
Update(),
|
|
||||||
ForAll(),
|
|
||||||
]
|
|
||||||
|
|
||||||
# Built-in commands in this West. For compatibility with monorepo
|
|
||||||
# installations of West within the Zephyr tree, we only expose the
|
|
||||||
# project commands if this is a multirepo installation.
|
|
||||||
COMMANDS = BUILD_FLASH_COMMANDS
|
|
||||||
|
|
||||||
if IN_MULTIREPO_INSTALL:
|
|
||||||
COMMANDS += PROJECT_COMMANDS
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidWestContext(RuntimeError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def command_handler(command, known_args, unknown_args):
|
|
||||||
command.run(known_args, unknown_args)
|
|
||||||
|
|
||||||
|
|
||||||
def set_zephyr_base(args):
|
|
||||||
'''Ensure ZEPHYR_BASE is set, emitting warnings if that's not
|
|
||||||
possible, or if the user is pointing it somewhere different than
|
|
||||||
what the manifest expects.'''
|
|
||||||
zb_env = os.environ.get('ZEPHYR_BASE')
|
|
||||||
|
|
||||||
if args.zephyr_base:
|
|
||||||
# The command line --zephyr-base takes precedence over
|
|
||||||
# everything else.
|
|
||||||
zb = os.path.abspath(args.zephyr_base)
|
|
||||||
zb_origin = 'command line'
|
|
||||||
else:
|
|
||||||
# If the user doesn't specify it concretely, use the project
|
|
||||||
# with path 'zephyr' if that exists, or the ZEPHYR_BASE value
|
|
||||||
# in the calling environment.
|
|
||||||
#
|
|
||||||
# At some point, we need a more flexible way to set environment
|
|
||||||
# variables based on manifest contents, but this is good enough
|
|
||||||
# to get started with and to ask for wider testing.
|
|
||||||
manifest = Manifest.from_file()
|
|
||||||
for project in manifest.projects:
|
|
||||||
if project.path == 'zephyr':
|
|
||||||
zb = project.abspath
|
|
||||||
zb_origin = 'manifest file {}'.format(manifest.path)
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
if zb_env is None:
|
|
||||||
log.wrn('no --zephyr-base given, ZEPHYR_BASE is unset,',
|
|
||||||
'and no manifest project has path "zephyr"')
|
|
||||||
zb = None
|
|
||||||
zb_origin = None
|
|
||||||
else:
|
|
||||||
zb = zb_env
|
|
||||||
zb_origin = 'environment'
|
|
||||||
|
|
||||||
if zb_env and os.path.abspath(zb) != os.path.abspath(zb_env):
|
|
||||||
# The environment ZEPHYR_BASE takes precedence over either the
|
|
||||||
# command line or the manifest, but in normal multi-repo
|
|
||||||
# operation we shouldn't expect to need to set ZEPHYR_BASE to
|
|
||||||
# point to some random place. In practice, this is probably
|
|
||||||
# happening because zephyr-env.sh/cmd was run in some other
|
|
||||||
# zephyr installation, and the user forgot about that.
|
|
||||||
log.wrn('ZEPHYR_BASE={}'.format(zb_env),
|
|
||||||
'in the calling environment, but has been set to',
|
|
||||||
zb, 'instead by the', zb_origin)
|
|
||||||
|
|
||||||
os.environ['ZEPHYR_BASE'] = zb
|
|
||||||
|
|
||||||
log.dbg('ZEPHYR_BASE={} (origin: {})'.format(zb, zb_origin))
|
|
||||||
|
|
||||||
|
|
||||||
def print_version_info():
|
|
||||||
# The bootstrapper will print its own version, as well as that of
|
|
||||||
# the west repository itself, then exit. So if this file is being
|
|
||||||
# asked to print the version, it's because it's being run
|
|
||||||
# directly, and not via the bootstrapper.
|
|
||||||
#
|
|
||||||
# Rather than play tricks like invoking "pip show west" (which
|
|
||||||
# assumes the bootstrapper was installed via pip, the common but
|
|
||||||
# not universal case), refuse the temptation to make guesses and
|
|
||||||
# print an honest answer.
|
|
||||||
log.inf('West bootstrapper version: N/A, not run via bootstrapper')
|
|
||||||
|
|
||||||
# The running west installation.
|
|
||||||
if IN_MULTIREPO_INSTALL:
|
|
||||||
try:
|
|
||||||
desc = check_output(['git', 'describe', '--tags'],
|
|
||||||
stderr=DEVNULL,
|
|
||||||
cwd=os.path.dirname(__file__))
|
|
||||||
west_version = desc.decode(sys.getdefaultencoding()).strip()
|
|
||||||
except CalledProcessError:
|
|
||||||
west_version = 'unknown'
|
|
||||||
else:
|
|
||||||
west_version = 'N/A, monorepo installation'
|
|
||||||
west_src_west = os.path.dirname(__file__)
|
|
||||||
print('West repository version: {} ({})'.
|
|
||||||
format(west_version,
|
|
||||||
os.path.dirname(os.path.dirname(west_src_west))))
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args(argv):
|
|
||||||
# The prog='west' override avoids the absolute path of the main.py script
|
|
||||||
# showing up when West is run via the wrapper
|
|
||||||
west_parser = argparse.ArgumentParser(
|
|
||||||
prog='west', description='The Zephyr RTOS meta-tool.',
|
|
||||||
epilog='Run "west <command> -h" for help on each command.')
|
|
||||||
|
|
||||||
# Remember to update scripts/west-completion.bash if you add or remove
|
|
||||||
# flags
|
|
||||||
|
|
||||||
west_parser.add_argument('-z', '--zephyr-base', default=None,
|
|
||||||
help='''Override the Zephyr base directory. The
|
|
||||||
default is the manifest project with path
|
|
||||||
"zephyr".''')
|
|
||||||
|
|
||||||
west_parser.add_argument('-v', '--verbose', default=0, action='count',
|
|
||||||
help='''Display verbose output. May be given
|
|
||||||
multiple times to increase verbosity.''')
|
|
||||||
|
|
||||||
west_parser.add_argument('-V', '--version', action='store_true')
|
|
||||||
|
|
||||||
subparser_gen = west_parser.add_subparsers(title='commands',
|
|
||||||
dest='command')
|
|
||||||
|
|
||||||
for command in COMMANDS:
|
|
||||||
parser = command.add_parser(subparser_gen)
|
|
||||||
parser.set_defaults(handler=partial(command_handler, command))
|
|
||||||
|
|
||||||
args, unknown = west_parser.parse_known_args(args=argv)
|
|
||||||
|
|
||||||
if args.version:
|
|
||||||
print_version_info()
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
# Set up logging verbosity before doing anything else, so
|
|
||||||
# e.g. verbose messages related to argument handling errors
|
|
||||||
# work properly.
|
|
||||||
log.set_verbosity(args.verbose)
|
|
||||||
|
|
||||||
if IN_MULTIREPO_INSTALL:
|
|
||||||
set_zephyr_base(args)
|
|
||||||
|
|
||||||
if 'handler' not in args:
|
|
||||||
if IN_MULTIREPO_INSTALL:
|
|
||||||
log.err('west installation found (in {}), but no command given'.
|
|
||||||
format(west_dir()))
|
|
||||||
else:
|
|
||||||
log.err('no west command given')
|
|
||||||
west_parser.print_help(file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
return args, unknown
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
|
||||||
# Makes ANSI color escapes work on Windows, and strips them when
|
|
||||||
# stdout/stderr isn't a terminal
|
|
||||||
colorama.init()
|
|
||||||
|
|
||||||
if argv is None:
|
|
||||||
argv = sys.argv[1:]
|
|
||||||
args, unknown = parse_args(argv)
|
|
||||||
|
|
||||||
if IN_MULTIREPO_INSTALL:
|
|
||||||
# Read the configuration files
|
|
||||||
config.read_config()
|
|
||||||
|
|
||||||
for_stack_trace = 'run as "west -v ... {} ..." for a stack trace'.format(
|
|
||||||
args.command)
|
|
||||||
try:
|
|
||||||
args.handler(args, unknown)
|
|
||||||
except WestUpdated:
|
|
||||||
# West has been automatically updated. Restart ourselves to run the
|
|
||||||
# latest version, with the same arguments that we were given.
|
|
||||||
os.execv(sys.executable, [sys.executable] + argv)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
sys.exit(0)
|
|
||||||
except CalledProcessError as cpe:
|
|
||||||
log.err('command exited with status {}: {}'.format(
|
|
||||||
cpe.args[0], quote_sh_list(cpe.args[1])))
|
|
||||||
if args.verbose:
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
log.inf(for_stack_trace)
|
|
||||||
except CommandContextError as cce:
|
|
||||||
log.die('command', args.command, 'cannot be run in this context:',
|
|
||||||
*cce.args)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,132 +0,0 @@
|
||||||
## A pykwalify schema for basic validation of the structure of a
|
|
||||||
## manifest YAML file. (Full validation would require additional work,
|
|
||||||
## e.g. to validate that remote URLs obey the URL format specified in
|
|
||||||
## rfc1738.)
|
|
||||||
##
|
|
||||||
## This schema has similar semantics to the repo XML format:
|
|
||||||
##
|
|
||||||
## https://gerrit.googlesource.com/git-repo/+/master/docs/manifest-format.txt
|
|
||||||
##
|
|
||||||
## However, the features don't map 1:1.
|
|
||||||
|
|
||||||
# The top-level manifest is a map. There may be multiple sections in the
|
|
||||||
# manifest file. Each section can be validated by their own schema.
|
|
||||||
# This schema validates the 'manifest' section.
|
|
||||||
type: map
|
|
||||||
mapping:
|
|
||||||
# The "defaults" key specifies some default values used in the
|
|
||||||
# rest of the manifest.
|
|
||||||
#
|
|
||||||
# The value is a map with the following keys:
|
|
||||||
#
|
|
||||||
# - remote: if given, this is the default remote in each project
|
|
||||||
# - revision: if given, this is the default revision to check
|
|
||||||
# out of each project
|
|
||||||
#
|
|
||||||
# See below for more information about remotes and projects.
|
|
||||||
#
|
|
||||||
# Examples:
|
|
||||||
#
|
|
||||||
# default:
|
|
||||||
# remote: zephyrproject-rtos
|
|
||||||
# revision: master
|
|
||||||
defaults:
|
|
||||||
required: false
|
|
||||||
type: map
|
|
||||||
mapping:
|
|
||||||
remote:
|
|
||||||
required: false
|
|
||||||
type: str
|
|
||||||
revision:
|
|
||||||
required: false
|
|
||||||
type: str
|
|
||||||
|
|
||||||
# The "remotes" key specifies a sequence of remotes, each of
|
|
||||||
# which has a name and a fetch URL.
|
|
||||||
#
|
|
||||||
# These work like repo remotes, in that they specify a URL
|
|
||||||
# prefix which remote-specific Git repositories hang off of.
|
|
||||||
# (This saves typing and makes it easier to move things around
|
|
||||||
# when most repositories are on the same server or GitHub
|
|
||||||
# organization.)
|
|
||||||
#
|
|
||||||
# Example:
|
|
||||||
#
|
|
||||||
# remotes:
|
|
||||||
# - name: zephyrproject-rtos
|
|
||||||
# url-base: https://github.com/zephyrproject-rtos
|
|
||||||
# - name: developer-fork
|
|
||||||
# url-base: https://github.com/a-developer
|
|
||||||
remotes:
|
|
||||||
required: true
|
|
||||||
type: seq
|
|
||||||
sequence:
|
|
||||||
- type: map
|
|
||||||
mapping:
|
|
||||||
name:
|
|
||||||
required: true
|
|
||||||
type: str
|
|
||||||
url-base:
|
|
||||||
required: true
|
|
||||||
type: str
|
|
||||||
|
|
||||||
# The "projects" key specifies a sequence of "projects",
|
|
||||||
# i.e. Git repositories. These work like repo projects, in that
|
|
||||||
# each project has a name, a remote, and optional additional
|
|
||||||
# metadata.
|
|
||||||
#
|
|
||||||
# Each project is a map with the following keys:
|
|
||||||
#
|
|
||||||
# - name: Mandatory, the name of the git repository. The clone
|
|
||||||
# URL is formed by remote url-base + '/' + name. The name cannot
|
|
||||||
# be one of the reserved values "west" and "manifest".
|
|
||||||
# - remote: Optional, the name of the remote to pull it from.
|
|
||||||
# If the remote is missing, the remote'key in the top-level
|
|
||||||
# defaults key is used instead. If both are missing, it's an error.
|
|
||||||
# - revision: Optional, the name of the revision to check out.
|
|
||||||
# If not given, the value from the default element will be used.
|
|
||||||
# If both are missing, then the default is 'master'.
|
|
||||||
# - path: Where to clone the repository locally. If missing,
|
|
||||||
# it's cloned at top level in a directory given by its name.
|
|
||||||
# - clone-depth: if given, it is a number which creates a shallow
|
|
||||||
# history in the cloned repository limited to the given number
|
|
||||||
# of commits.
|
|
||||||
#
|
|
||||||
# Example, using default and non-default remotes:
|
|
||||||
#
|
|
||||||
# projects:
|
|
||||||
# # Uses default remote (zephyrproject-rtos), so clone URL is:
|
|
||||||
# # https://github.com/zephyrproject-rtos/zephyr
|
|
||||||
# - name: zephyr
|
|
||||||
# # Manually specified remote; clone URL is:
|
|
||||||
# # https://github.com/a-developer/west
|
|
||||||
# - name: west
|
|
||||||
# remote: developer-fork
|
|
||||||
# # Manually specified remote, clone URL is:
|
|
||||||
# # https://github.com/zephyrproject-rtos/some-vendor-hal
|
|
||||||
# # Local clone path (relative to installation root) is:
|
|
||||||
# # ext/hal/some-vendor
|
|
||||||
# - name: some-vendor-hal
|
|
||||||
# remote: zephyrproject-rtos
|
|
||||||
# path: ext/hal/some-vendor
|
|
||||||
projects:
|
|
||||||
required: true
|
|
||||||
type: seq
|
|
||||||
sequence:
|
|
||||||
- type: map
|
|
||||||
mapping:
|
|
||||||
name:
|
|
||||||
required: true
|
|
||||||
type: str
|
|
||||||
remote:
|
|
||||||
required: false
|
|
||||||
type: str
|
|
||||||
revision:
|
|
||||||
required: false
|
|
||||||
type: text # SHAs could be only numbers
|
|
||||||
path:
|
|
||||||
required: false
|
|
||||||
type: str
|
|
||||||
clone-depth:
|
|
||||||
required: false
|
|
||||||
type: int
|
|
|
@ -1,400 +0,0 @@
|
||||||
# Copyright (c) 2018, Nordic Semiconductor ASA
|
|
||||||
# Copyright 2018, Foundries.io Ltd
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''Parser and abstract data types for west manifests.
|
|
||||||
|
|
||||||
The main class is Manifest. The recommended method for creating a
|
|
||||||
Manifest instance is via its from_file() or from_data() helper
|
|
||||||
methods.
|
|
||||||
|
|
||||||
There are additionally Defaults, Remote, and Project types defined,
|
|
||||||
which represent the values by the same names in a west
|
|
||||||
manifest. (I.e. "Remote" represents one of the elements in the
|
|
||||||
"remote" sequence in the manifest, and so on.) Some Default values,
|
|
||||||
such as the default project revision, may be supplied by this module
|
|
||||||
if they are not present in the manifest data.'''
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
import pykwalify.core
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
from west import util, log
|
|
||||||
|
|
||||||
# Todo: take from _bootstrap?
|
|
||||||
# Default west repository URL.
|
|
||||||
WEST_URL_DEFAULT = 'https://github.com/zephyrproject-rtos/west'
|
|
||||||
# Default revision to check out of the west repository.
|
|
||||||
WEST_REV_DEFAULT = 'master'
|
|
||||||
|
|
||||||
META_NAMES = ['west', 'manifest']
|
|
||||||
'''Names of the special "meta-projects", which are reserved and cannot
|
|
||||||
be used to name a project in the manifest file.'''
|
|
||||||
|
|
||||||
MANIFEST_SECTIONS = ['manifest', 'west']
|
|
||||||
'''Sections in the manifest file'''
|
|
||||||
|
|
||||||
|
|
||||||
def default_path():
|
|
||||||
'''Return the path to the default manifest in the west directory.
|
|
||||||
|
|
||||||
Raises WestNotFound if called from outside of a west working directory.'''
|
|
||||||
return os.path.join(util.west_dir(), 'manifest', 'default.yml')
|
|
||||||
|
|
||||||
|
|
||||||
class Manifest:
|
|
||||||
'''Represents the contents of a West manifest file.
|
|
||||||
|
|
||||||
The most convenient way to construct an instance is using the
|
|
||||||
from_file and from_data helper methods.'''
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_file(source_file=None, sections=MANIFEST_SECTIONS):
|
|
||||||
'''Create and return a new Manifest object given a source YAML file.
|
|
||||||
|
|
||||||
:param source_file: Path to a YAML file containing the manifest.
|
|
||||||
:param sections: Only parse specified sections from YAML file,
|
|
||||||
default: all sections are parsed.
|
|
||||||
|
|
||||||
If source_file is None, the value returned by default_path()
|
|
||||||
is used.
|
|
||||||
|
|
||||||
Raises MalformedManifest in case of validation errors.'''
|
|
||||||
if source_file is None:
|
|
||||||
source_file = default_path()
|
|
||||||
return Manifest(source_file=source_file, sections=sections)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def from_data(source_data, sections=MANIFEST_SECTIONS):
|
|
||||||
'''Create and return a new Manifest object given parsed YAML data.
|
|
||||||
|
|
||||||
:param source_data: Parsed YAML data as a Python object.
|
|
||||||
:param sections: Only parse specified sections from YAML data,
|
|
||||||
default: all sections are parsed.
|
|
||||||
|
|
||||||
Raises MalformedManifest in case of validation errors.'''
|
|
||||||
return Manifest(source_data=source_data, sections=sections)
|
|
||||||
|
|
||||||
def __init__(self, source_file=None, source_data=None,
|
|
||||||
sections=MANIFEST_SECTIONS):
|
|
||||||
'''Create a new Manifest object.
|
|
||||||
|
|
||||||
:param source_file: Path to a YAML file containing the manifest.
|
|
||||||
:param source_data: Parsed YAML data as a Python object.
|
|
||||||
:param sections: Only parse specified sections from YAML file,
|
|
||||||
default: all sections are parsed.
|
|
||||||
|
|
||||||
Normally, it is more convenient to use the `from_file` and
|
|
||||||
`from_data` convenience factories than calling the constructor
|
|
||||||
directly.
|
|
||||||
|
|
||||||
Exactly one of the source_file and source_data parameters must
|
|
||||||
be given.
|
|
||||||
|
|
||||||
Raises MalformedManifest in case of validation errors.'''
|
|
||||||
if source_file and source_data:
|
|
||||||
raise ValueError('both source_file and source_data were given')
|
|
||||||
|
|
||||||
if source_file:
|
|
||||||
with open(source_file, 'r') as f:
|
|
||||||
self._data = yaml.safe_load(f.read())
|
|
||||||
path = source_file
|
|
||||||
else:
|
|
||||||
self._data = source_data
|
|
||||||
path = None
|
|
||||||
|
|
||||||
self.path = path
|
|
||||||
'''Path to the file containing the manifest, or None if created
|
|
||||||
from data rather than the file system.'''
|
|
||||||
|
|
||||||
if not self._data:
|
|
||||||
self._malformed('manifest contains no data')
|
|
||||||
|
|
||||||
if 'manifest' not in self._data:
|
|
||||||
self._malformed('manifest contains no manifest element')
|
|
||||||
|
|
||||||
for key in self._data:
|
|
||||||
if key in sections:
|
|
||||||
try:
|
|
||||||
pykwalify.core.Core(
|
|
||||||
source_data=self._data[key],
|
|
||||||
schema_files=[_SCHEMA_PATH[key]]
|
|
||||||
).validate()
|
|
||||||
except pykwalify.errors.SchemaError as e:
|
|
||||||
self._malformed(e, key)
|
|
||||||
|
|
||||||
self.defaults = None
|
|
||||||
'''west.manifest.Defaults object representing default values
|
|
||||||
in the manifest, either as specified by the user or west itself.'''
|
|
||||||
|
|
||||||
self.remotes = None
|
|
||||||
'''Sequence of west.manifest.Remote objects representing manifest
|
|
||||||
remotes.'''
|
|
||||||
|
|
||||||
self.projects = None
|
|
||||||
'''Sequence of west.manifest.Project objects representing manifest
|
|
||||||
projects.
|
|
||||||
|
|
||||||
Each element's values are fully initialized; there is no need
|
|
||||||
to consult the defaults field to supply missing values.'''
|
|
||||||
|
|
||||||
self.west_project = None
|
|
||||||
'''west.manifest.SpecialProject object representing the west meta
|
|
||||||
project.'''
|
|
||||||
|
|
||||||
# Set up the public attributes documented above, as well as
|
|
||||||
# any internal attributes needed to implement the public API.
|
|
||||||
self._load(self._data, sections)
|
|
||||||
|
|
||||||
def get_remote(self, name):
|
|
||||||
'''Get a manifest Remote, given its name.'''
|
|
||||||
return self._remotes_dict[name]
|
|
||||||
|
|
||||||
def _malformed(self, complaint, section='manifest'):
|
|
||||||
context = (' file {} '.format(self.path) if self.path
|
|
||||||
else ' data:\n{}\n'.format(self._data))
|
|
||||||
raise MalformedManifest('Malformed manifest{}(schema: {}):\n{}'
|
|
||||||
.format(context, _SCHEMA_PATH[section],
|
|
||||||
complaint))
|
|
||||||
|
|
||||||
def _load(self, data, sections):
|
|
||||||
# Initialize this instance's fields from values given in the
|
|
||||||
# manifest data, which must be validated according to the schema.
|
|
||||||
if 'west' in sections:
|
|
||||||
west = data.get('west', {})
|
|
||||||
|
|
||||||
url = west.get('url') or WEST_URL_DEFAULT
|
|
||||||
revision = west.get('revision') or WEST_REV_DEFAULT
|
|
||||||
|
|
||||||
self.west_project = SpecialProject('west',
|
|
||||||
url=url,
|
|
||||||
revision=revision,
|
|
||||||
path=os.path.join('west',
|
|
||||||
'west'))
|
|
||||||
|
|
||||||
# Next is the manifest section
|
|
||||||
if 'manifest' not in sections:
|
|
||||||
return
|
|
||||||
|
|
||||||
projects = []
|
|
||||||
project_abspaths = set()
|
|
||||||
|
|
||||||
manifest = data.get('manifest')
|
|
||||||
|
|
||||||
# Map from each remote's name onto that remote's data in the manifest.
|
|
||||||
remotes = tuple(Remote(r['name'], r['url-base']) for r in
|
|
||||||
manifest['remotes'])
|
|
||||||
remotes_dict = {r.name: r for r in remotes}
|
|
||||||
|
|
||||||
# Get any defaults out of the manifest.
|
|
||||||
#
|
|
||||||
# md = manifest defaults (dictionary with values parsed from
|
|
||||||
# the manifest)
|
|
||||||
md = manifest.get('defaults', dict())
|
|
||||||
mdrem = md.get('remote')
|
|
||||||
if mdrem:
|
|
||||||
# The default remote name, if provided, must refer to a
|
|
||||||
# well-defined remote.
|
|
||||||
if mdrem not in remotes_dict:
|
|
||||||
self._malformed('default remote {} is not defined'.
|
|
||||||
format(mdrem))
|
|
||||||
default_remote = remotes_dict[mdrem]
|
|
||||||
default_remote_name = mdrem
|
|
||||||
else:
|
|
||||||
default_remote = None
|
|
||||||
default_remote_name = None
|
|
||||||
defaults = Defaults(remote=default_remote, revision=md.get('revision'))
|
|
||||||
|
|
||||||
# mp = manifest project (dictionary with values parsed from
|
|
||||||
# the manifest)
|
|
||||||
for mp in manifest['projects']:
|
|
||||||
# Validate the project name.
|
|
||||||
name = mp['name']
|
|
||||||
if name in META_NAMES:
|
|
||||||
self._malformed('the name "{}" is reserved and cannot '.
|
|
||||||
format(name) +
|
|
||||||
'be used to name a manifest project')
|
|
||||||
|
|
||||||
# Validate the project remote.
|
|
||||||
remote_name = mp.get('remote', default_remote_name)
|
|
||||||
if remote_name is None:
|
|
||||||
self._malformed('project {} does not specify a remote'.
|
|
||||||
format(name))
|
|
||||||
if remote_name not in remotes_dict:
|
|
||||||
self._malformed('project {} remote {} is not defined'.
|
|
||||||
format(name, remote_name))
|
|
||||||
project = Project(name,
|
|
||||||
remotes_dict[remote_name],
|
|
||||||
defaults,
|
|
||||||
path=mp.get('path'),
|
|
||||||
clone_depth=mp.get('clone-depth'),
|
|
||||||
revision=mp.get('revision'))
|
|
||||||
|
|
||||||
# Two projects cannot have the same path. We use absolute
|
|
||||||
# paths to check for collisions to ensure paths are
|
|
||||||
# normalized (e.g. for case-insensitive file systems or
|
|
||||||
# in cases like on Windows where / or \ may serve as a
|
|
||||||
# path component separator).
|
|
||||||
if project.abspath in project_abspaths:
|
|
||||||
self._malformed('project {} path {} is already in use'.
|
|
||||||
format(project.name, project.path))
|
|
||||||
|
|
||||||
project_abspaths.add(project.abspath)
|
|
||||||
projects.append(project)
|
|
||||||
|
|
||||||
self.defaults = defaults
|
|
||||||
self.remotes = remotes
|
|
||||||
self._remotes_dict = remotes_dict
|
|
||||||
self.projects = tuple(projects)
|
|
||||||
|
|
||||||
|
|
||||||
class MalformedManifest(Exception):
|
|
||||||
'''Exception indicating that west manifest parsing failed due to a
|
|
||||||
malformed value.'''
|
|
||||||
|
|
||||||
|
|
||||||
# Definitions for Manifest attribute types.
|
|
||||||
|
|
||||||
class Defaults:
|
|
||||||
'''Represents default values in a manifest, either specified by the
|
|
||||||
user or by west itself.
|
|
||||||
|
|
||||||
Defaults are neither comparable nor hashable.'''
|
|
||||||
|
|
||||||
__slots__ = 'remote revision'.split()
|
|
||||||
|
|
||||||
def __init__(self, remote=None, revision=None):
|
|
||||||
'''Initialize a defaults value from manifest data.
|
|
||||||
|
|
||||||
:param remote: Remote instance corresponding to the default remote,
|
|
||||||
or None (an actual Remote object, not the name of
|
|
||||||
a remote as a string).
|
|
||||||
:param revision: Default Git revision; 'master' if not given.'''
|
|
||||||
if remote is not None:
|
|
||||||
_wrn_if_not_remote(remote)
|
|
||||||
if revision is None:
|
|
||||||
revision = 'master'
|
|
||||||
|
|
||||||
self.remote = remote
|
|
||||||
self.revision = revision
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'Defaults(remote={}, revision={})'.format(repr(self.remote),
|
|
||||||
repr(self.revision))
|
|
||||||
|
|
||||||
|
|
||||||
class Remote:
|
|
||||||
'''Represents a remote defined in a west manifest.
|
|
||||||
|
|
||||||
Remotes may be compared for equality, but are not hashable.'''
|
|
||||||
|
|
||||||
__slots__ = 'name url_base'.split()
|
|
||||||
|
|
||||||
def __init__(self, name, url_base):
|
|
||||||
'''Initialize a remote from manifest data.
|
|
||||||
|
|
||||||
:param name: remote's name
|
|
||||||
:param url_base: remote's URL base.'''
|
|
||||||
if url_base.endswith('/'):
|
|
||||||
log.wrn('Remote', name, 'URL base', url_base,
|
|
||||||
'ends with a slash ("/"); these are automatically',
|
|
||||||
'appended by West')
|
|
||||||
|
|
||||||
self.name = name
|
|
||||||
self.url_base = url_base
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return self.name == other.name and self.url_base == other.url_base
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'Remote(name={}, url_base={})'.format(repr(self.name),
|
|
||||||
repr(self.url_base))
|
|
||||||
|
|
||||||
|
|
||||||
class Project:
|
|
||||||
'''Represents a project defined in a west manifest.
|
|
||||||
|
|
||||||
Projects are neither comparable nor hashable.'''
|
|
||||||
|
|
||||||
__slots__ = 'name remote url path abspath clone_depth revision'.split()
|
|
||||||
|
|
||||||
def __init__(self, name, remote, defaults, path=None, clone_depth=None,
|
|
||||||
revision=None):
|
|
||||||
'''Specify a Project by name, Remote, and optional information.
|
|
||||||
|
|
||||||
:param name: Project's user-defined name in the manifest.
|
|
||||||
:param remote: Remote instance corresponding to this Project as
|
|
||||||
specified in the manifest. This is used to build
|
|
||||||
the project's URL, and is also stored as an attribute.
|
|
||||||
:param defaults: If the revision parameter is not given, the project's
|
|
||||||
revision is set to defaults.revision.
|
|
||||||
:param path: Relative path to the project in the west
|
|
||||||
installation, if present in the manifest. If not given,
|
|
||||||
the project's ``name`` is used.
|
|
||||||
:param clone_depth: Nonnegative integer clone depth if present in
|
|
||||||
the manifest.
|
|
||||||
:param revision: Project revision as given in the manifest, if present.
|
|
||||||
If not given, defaults.revision is used instead.
|
|
||||||
'''
|
|
||||||
_wrn_if_not_remote(remote)
|
|
||||||
|
|
||||||
self.name = name
|
|
||||||
self.remote = remote
|
|
||||||
self.url = remote.url_base + '/' + name
|
|
||||||
self.path = os.path.normpath(path or name)
|
|
||||||
self.abspath = os.path.realpath(os.path.join(util.west_topdir(),
|
|
||||||
self.path))
|
|
||||||
self.clone_depth = clone_depth
|
|
||||||
self.revision = revision or defaults.revision
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return NotImplemented
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
reprs = [repr(x) for x in
|
|
||||||
(self.name, self.remote, self.url, self.path,
|
|
||||||
self.abspath, self.clone_depth, self.revision)]
|
|
||||||
return ('Project(name={}, remote={}, url={}, path={}, abspath={}, '
|
|
||||||
'clone_depth={}, revision={})').format(*reprs)
|
|
||||||
|
|
||||||
|
|
||||||
class SpecialProject(Project):
|
|
||||||
'''Represents a special project, e.g. the west or manifest project.
|
|
||||||
|
|
||||||
Projects are neither comparable nor hashable.'''
|
|
||||||
|
|
||||||
def __init__(self, name, path=None, revision=None, url=None):
|
|
||||||
'''Specify a Special Project by name, and url, and optional information.
|
|
||||||
|
|
||||||
:param name: Special Project's user-defined name in the manifest
|
|
||||||
:param path: Relative path to the project in the west
|
|
||||||
installation, if present in the manifest. If None,
|
|
||||||
the project's ``name`` is used.
|
|
||||||
:param revision: Project revision as given in the manifest, if present.
|
|
||||||
:param url: Complete URL for special project.
|
|
||||||
'''
|
|
||||||
self.name = name
|
|
||||||
self.url = url
|
|
||||||
self.path = path or name
|
|
||||||
self.abspath = os.path.realpath(os.path.join(util.west_topdir(),
|
|
||||||
self.path))
|
|
||||||
self.revision = revision
|
|
||||||
self.remote = None
|
|
||||||
self.clone_depth = None
|
|
||||||
|
|
||||||
|
|
||||||
def _wrn_if_not_remote(remote):
|
|
||||||
if not isinstance(remote, Remote):
|
|
||||||
log.wrn('Remote', remote, 'is not a Remote instance')
|
|
||||||
|
|
||||||
|
|
||||||
_SCHEMA_PATH = {'manifest': os.path.join(os.path.dirname(__file__),
|
|
||||||
"manifest-schema.yml"),
|
|
||||||
'west': os.path.join(os.path.dirname(__file__),
|
|
||||||
"_bootstrap",
|
|
||||||
"west-schema.yml")}
|
|
|
@ -1,35 +0,0 @@
|
||||||
# Copyright (c) 2017 Linaro Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
from west.runners.core import ZephyrBinaryRunner
|
|
||||||
|
|
||||||
# We import these here to ensure the ZephyrBinaryRunner subclasses are
|
|
||||||
# defined; otherwise, ZephyrBinaryRunner.create_for_shell_script()
|
|
||||||
# won't work.
|
|
||||||
|
|
||||||
# Explicitly silence the unused import warning.
|
|
||||||
# flake8: noqa: F401
|
|
||||||
from west.runners import arc
|
|
||||||
from west.runners import bossac
|
|
||||||
from west.runners import dfu
|
|
||||||
from west.runners import esp32
|
|
||||||
from west.runners import jlink
|
|
||||||
from west.runners import nios2
|
|
||||||
from west.runners import nrfjprog
|
|
||||||
from west.runners import nsim
|
|
||||||
from west.runners import openocd
|
|
||||||
from west.runners import pyocd
|
|
||||||
from west.runners import qemu
|
|
||||||
from west.runners import xtensa
|
|
||||||
from west.runners import intel_s1000
|
|
||||||
from west.runners import blackmagicprobe
|
|
||||||
|
|
||||||
def get_runner_cls(runner):
|
|
||||||
'''Get a runner's class object, given its name.'''
|
|
||||||
for cls in ZephyrBinaryRunner.get_runners():
|
|
||||||
if cls.name() == runner:
|
|
||||||
return cls
|
|
||||||
raise ValueError('unknown runner "{}"'.format(runner))
|
|
||||||
|
|
||||||
__all__ = ['ZephyrBinaryRunner', 'get_runner_cls']
|
|
|
@ -1,107 +0,0 @@
|
||||||
# Copyright (c) 2017 Linaro Limited.
|
|
||||||
# Copyright (c) 2017 Open Source Foundries Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''ARC architecture-specific runners.'''
|
|
||||||
|
|
||||||
from os import path
|
|
||||||
|
|
||||||
from west.runners.core import ZephyrBinaryRunner
|
|
||||||
|
|
||||||
DEFAULT_ARC_TCL_PORT = 6333
|
|
||||||
DEFAULT_ARC_TELNET_PORT = 4444
|
|
||||||
DEFAULT_ARC_GDB_PORT = 3333
|
|
||||||
|
|
||||||
|
|
||||||
class EmStarterKitBinaryRunner(ZephyrBinaryRunner):
|
|
||||||
'''Runner front-end for the EM Starterkit board, using openocd.'''
|
|
||||||
|
|
||||||
# This unusual 'flash' implementation matches the original shell script.
|
|
||||||
#
|
|
||||||
# It works by starting a GDB server in a separate session, connecting a
|
|
||||||
# client to it to load the program, and running 'continue' within the
|
|
||||||
# client to execute the application.
|
|
||||||
#
|
|
||||||
|
|
||||||
def __init__(self, cfg,
|
|
||||||
tui=False, tcl_port=DEFAULT_ARC_TCL_PORT,
|
|
||||||
telnet_port=DEFAULT_ARC_TELNET_PORT,
|
|
||||||
gdb_port=DEFAULT_ARC_GDB_PORT):
|
|
||||||
super(EmStarterKitBinaryRunner, self).__init__(cfg)
|
|
||||||
self.gdb_cmd = [cfg.gdb] + (['-tui'] if tui else [])
|
|
||||||
search_args = []
|
|
||||||
if cfg.openocd_search is not None:
|
|
||||||
search_args = ['-s', cfg.openocd_search]
|
|
||||||
self.openocd_cmd = [cfg.openocd or 'openocd'] + search_args
|
|
||||||
self.tcl_port = tcl_port
|
|
||||||
self.telnet_port = telnet_port
|
|
||||||
self.gdb_port = gdb_port
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def name(cls):
|
|
||||||
return 'em-starterkit'
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def do_add_parser(cls, parser):
|
|
||||||
parser.add_argument('--tui', default=False, action='store_true',
|
|
||||||
help='if given, GDB uses -tui')
|
|
||||||
parser.add_argument('--tcl-port', default=DEFAULT_ARC_TCL_PORT,
|
|
||||||
help='openocd TCL port, defaults to 6333')
|
|
||||||
parser.add_argument('--telnet-port', default=DEFAULT_ARC_TELNET_PORT,
|
|
||||||
help='openocd telnet port, defaults to 4444')
|
|
||||||
parser.add_argument('--gdb-port', default=DEFAULT_ARC_GDB_PORT,
|
|
||||||
help='openocd gdb port, defaults to 3333')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, cfg, args):
|
|
||||||
if cfg.gdb is None:
|
|
||||||
raise ValueError('--gdb not provided at command line')
|
|
||||||
|
|
||||||
return EmStarterKitBinaryRunner(
|
|
||||||
cfg,
|
|
||||||
tui=args.tui, tcl_port=args.tcl_port, telnet_port=args.telnet_port,
|
|
||||||
gdb_port=args.gdb_port)
|
|
||||||
|
|
||||||
def do_run(self, command, **kwargs):
|
|
||||||
kwargs['openocd-cfg'] = path.join(self.cfg.board_dir, 'support',
|
|
||||||
'openocd.cfg')
|
|
||||||
|
|
||||||
if command in {'flash', 'debug'}:
|
|
||||||
self.flash_debug(command, **kwargs)
|
|
||||||
else:
|
|
||||||
self.debugserver(**kwargs)
|
|
||||||
|
|
||||||
def flash_debug(self, command, **kwargs):
|
|
||||||
config = kwargs['openocd-cfg']
|
|
||||||
|
|
||||||
server_cmd = (self.openocd_cmd +
|
|
||||||
['-f', config] +
|
|
||||||
['-c', 'tcl_port {}'.format(self.tcl_port),
|
|
||||||
'-c', 'telnet_port {}'.format(self.telnet_port),
|
|
||||||
'-c', 'gdb_port {}'.format(self.gdb_port),
|
|
||||||
'-c', 'init',
|
|
||||||
'-c', 'targets',
|
|
||||||
'-c', 'halt'])
|
|
||||||
|
|
||||||
continue_arg = []
|
|
||||||
if command == 'flash':
|
|
||||||
continue_arg = ['-ex', 'set confirm off', '-ex', 'monitor resume',
|
|
||||||
'-ex', 'quit']
|
|
||||||
|
|
||||||
gdb_cmd = (self.gdb_cmd +
|
|
||||||
['-ex', 'target remote :{}'.format(self.gdb_port),
|
|
||||||
'-ex', 'load'] +
|
|
||||||
continue_arg +
|
|
||||||
[self.cfg.elf_file])
|
|
||||||
|
|
||||||
self.run_server_and_client(server_cmd, gdb_cmd)
|
|
||||||
|
|
||||||
def debugserver(self, **kwargs):
|
|
||||||
config = kwargs['openocd-cfg']
|
|
||||||
cmd = (self.openocd_cmd +
|
|
||||||
['-f', config,
|
|
||||||
'-c', 'init',
|
|
||||||
'-c', 'targets',
|
|
||||||
'-c', 'reset halt'])
|
|
||||||
self.check_call(cmd)
|
|
|
@ -1,96 +0,0 @@
|
||||||
# Copyright (c) 2018 Roman Tataurov <diytronic@yandex.ru>
|
|
||||||
# Modified 2018 Tavish Naruka <tavishnaruka@gmail.com>
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
'''Runner for flashing with Black Magic Probe.'''
|
|
||||||
# https://github.com/blacksphere/blackmagic/wiki
|
|
||||||
|
|
||||||
from west.runners.core import ZephyrBinaryRunner, RunnerCaps
|
|
||||||
|
|
||||||
|
|
||||||
class BlackMagicProbeRunner(ZephyrBinaryRunner):
|
|
||||||
'''Runner front-end for Black Magic probe.'''
|
|
||||||
|
|
||||||
def __init__(self, cfg, gdb_serial):
|
|
||||||
super(BlackMagicProbeRunner, self).__init__(cfg)
|
|
||||||
self.gdb = [cfg.gdb] if cfg.gdb else None
|
|
||||||
self.elf_file = cfg.elf_file
|
|
||||||
self.gdb_serial = gdb_serial
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def name(cls):
|
|
||||||
return 'blackmagicprobe'
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def capabilities(cls):
|
|
||||||
return RunnerCaps(commands={'flash', 'debug', 'attach'})
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, cfg, args):
|
|
||||||
return BlackMagicProbeRunner(cfg, args.gdb_serial)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def do_add_parser(cls, parser):
|
|
||||||
parser.add_argument('--gdb-serial', default='/dev/ttyACM0',
|
|
||||||
help='GDB serial port')
|
|
||||||
|
|
||||||
def bmp_flash(self, command, **kwargs):
|
|
||||||
if self.gdb is None:
|
|
||||||
raise ValueError('Cannot flash; gdb is missing')
|
|
||||||
if self.elf_file is None:
|
|
||||||
raise ValueError('Cannot debug; elf file is missing')
|
|
||||||
command = (self.gdb +
|
|
||||||
['-ex', "set confirm off",
|
|
||||||
'-ex', "target extended-remote {}".format(self.gdb_serial),
|
|
||||||
'-ex', "monitor swdp_scan",
|
|
||||||
'-ex', "attach 1",
|
|
||||||
'-ex', "load {}".format(self.elf_file),
|
|
||||||
'-ex', "kill",
|
|
||||||
'-ex', "quit",
|
|
||||||
'-silent'])
|
|
||||||
self.check_call(command)
|
|
||||||
|
|
||||||
def bmp_attach(self, command, **kwargs):
|
|
||||||
if self.gdb is None:
|
|
||||||
raise ValueError('Cannot attach; gdb is missing')
|
|
||||||
if self.elf_file is None:
|
|
||||||
command = (self.gdb +
|
|
||||||
['-ex', "set confirm off",
|
|
||||||
'-ex', "target extended-remote {}".format(
|
|
||||||
self.gdb_serial),
|
|
||||||
'-ex', "monitor swdp_scan",
|
|
||||||
'-ex', "attach 1"])
|
|
||||||
else:
|
|
||||||
command = (self.gdb +
|
|
||||||
['-ex', "set confirm off",
|
|
||||||
'-ex', "target extended-remote {}".format(
|
|
||||||
self.gdb_serial),
|
|
||||||
'-ex', "monitor swdp_scan",
|
|
||||||
'-ex', "attach 1",
|
|
||||||
'-ex', "file {}".format(self.elf_file)])
|
|
||||||
self.check_call(command)
|
|
||||||
|
|
||||||
def bmp_debug(self, command, **kwargs):
|
|
||||||
if self.gdb is None:
|
|
||||||
raise ValueError('Cannot debug; gdb is missing')
|
|
||||||
if self.elf_file is None:
|
|
||||||
raise ValueError('Cannot debug; elf file is missing')
|
|
||||||
command = (self.gdb +
|
|
||||||
['-ex', "set confirm off",
|
|
||||||
'-ex', "target extended-remote {}".format(self.gdb_serial),
|
|
||||||
'-ex', "monitor swdp_scan",
|
|
||||||
'-ex', "attach 1",
|
|
||||||
'-ex', "file {}".format(self.elf_file),
|
|
||||||
'-ex', "load {}".format(self.elf_file)])
|
|
||||||
self.check_call(command)
|
|
||||||
|
|
||||||
def do_run(self, command, **kwargs):
|
|
||||||
|
|
||||||
if command == 'flash':
|
|
||||||
self.bmp_flash(command, **kwargs)
|
|
||||||
elif command == 'debug':
|
|
||||||
self.bmp_debug(command, **kwargs)
|
|
||||||
elif command == 'attach':
|
|
||||||
self.bmp_attach(command, **kwargs)
|
|
||||||
else:
|
|
||||||
self.bmp_flash(command, **kwargs)
|
|
|
@ -1,54 +0,0 @@
|
||||||
# Copyright (c) 2017 Linaro Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''bossac-specific runner (flash only) for Atmel SAM microcontrollers.'''
|
|
||||||
|
|
||||||
import platform
|
|
||||||
|
|
||||||
from west.runners.core import ZephyrBinaryRunner, RunnerCaps
|
|
||||||
|
|
||||||
DEFAULT_BOSSAC_PORT = '/dev/ttyACM0'
|
|
||||||
|
|
||||||
|
|
||||||
class BossacBinaryRunner(ZephyrBinaryRunner):
|
|
||||||
'''Runner front-end for bossac.'''
|
|
||||||
|
|
||||||
def __init__(self, cfg, bossac='bossac', port=DEFAULT_BOSSAC_PORT):
|
|
||||||
super(BossacBinaryRunner, self).__init__(cfg)
|
|
||||||
self.bossac = bossac
|
|
||||||
self.port = port
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def name(cls):
|
|
||||||
return 'bossac'
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def capabilities(cls):
|
|
||||||
return RunnerCaps(commands={'flash'})
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def do_add_parser(cls, parser):
|
|
||||||
parser.add_argument('--bossac', default='bossac',
|
|
||||||
help='path to bossac, default is bossac')
|
|
||||||
parser.add_argument('--bossac-port', default='/dev/ttyACM0',
|
|
||||||
help='serial port to use, default is /dev/ttyACM0')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, cfg, args):
|
|
||||||
return BossacBinaryRunner(cfg, bossac=args.bossac,
|
|
||||||
port=args.bossac_port)
|
|
||||||
|
|
||||||
def do_run(self, command, **kwargs):
|
|
||||||
if platform.system() != 'Linux':
|
|
||||||
msg = 'CAUTION: No flash tool for your host system found!'
|
|
||||||
raise NotImplementedError(msg)
|
|
||||||
|
|
||||||
cmd_stty = ['stty', '-F', self.port, 'raw', 'ispeed', '1200',
|
|
||||||
'ospeed', '1200', 'cs8', '-cstopb', 'ignpar', 'eol', '255',
|
|
||||||
'eof', '255']
|
|
||||||
cmd_flash = [self.bossac, '-p', self.port, '-R', '-e', '-w', '-v',
|
|
||||||
'-b', self.cfg.bin_file]
|
|
||||||
|
|
||||||
self.check_call(cmd_stty)
|
|
||||||
self.check_call(cmd_flash)
|
|
|
@ -1,508 +0,0 @@
|
||||||
#! /usr/bin/env python3
|
|
||||||
|
|
||||||
# Copyright (c) 2017 Linaro Limited.
|
|
||||||
# Copyright (c) 2017 Open Source Foundries Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
"""Zephyr binary runner core interfaces
|
|
||||||
|
|
||||||
This provides the core ZephyrBinaryRunner class meant for public use,
|
|
||||||
as well as some other helpers for concrete runner classes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import abc
|
|
||||||
import argparse
|
|
||||||
import os
|
|
||||||
import platform
|
|
||||||
import signal
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
from west import log
|
|
||||||
from west.util import quote_sh_list
|
|
||||||
|
|
||||||
# Turn on to enable just printing the commands that would be run,
|
|
||||||
# without actually running them. This can break runners that are expecting
|
|
||||||
# output or if one command depends on another, so it's just for debugging.
|
|
||||||
JUST_PRINT = False
|
|
||||||
|
|
||||||
|
|
||||||
class _DebugDummyPopen:
|
|
||||||
|
|
||||||
def terminate(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def wait(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
MAX_PORT = 49151
|
|
||||||
|
|
||||||
|
|
||||||
class NetworkPortHelper:
|
|
||||||
'''Helper class for dealing with local IP network ports.'''
|
|
||||||
|
|
||||||
def get_unused_ports(self, starting_from):
|
|
||||||
'''Find unused network ports, starting at given values.
|
|
||||||
|
|
||||||
starting_from is an iterable of ports the caller would like to use.
|
|
||||||
|
|
||||||
The return value is an iterable of ports, in the same order, using
|
|
||||||
the given values if they were unused, or the next sequentially
|
|
||||||
available unused port otherwise.
|
|
||||||
|
|
||||||
Ports may be bound between this call's check and actual usage, so
|
|
||||||
callers still need to handle errors involving returned ports.'''
|
|
||||||
start = list(starting_from)
|
|
||||||
used = self._used_now()
|
|
||||||
ret = []
|
|
||||||
|
|
||||||
for desired in start:
|
|
||||||
port = desired
|
|
||||||
while port in used:
|
|
||||||
port += 1
|
|
||||||
if port > MAX_PORT:
|
|
||||||
msg = "ports above {} are in use"
|
|
||||||
raise ValueError(msg.format(desired))
|
|
||||||
used.add(port)
|
|
||||||
ret.append(port)
|
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
def _used_now(self):
|
|
||||||
handlers = {
|
|
||||||
'Windows': self._used_now_windows,
|
|
||||||
'Linux': self._used_now_linux,
|
|
||||||
'Darwin': self._used_now_darwin,
|
|
||||||
}
|
|
||||||
handler = handlers[platform.system()]
|
|
||||||
return handler()
|
|
||||||
|
|
||||||
def _used_now_windows(self):
|
|
||||||
cmd = ['netstat', '-a', '-n', '-p', 'tcp']
|
|
||||||
return self._parser_windows(cmd)
|
|
||||||
|
|
||||||
def _used_now_linux(self):
|
|
||||||
cmd = ['ss', '-a', '-n', '-t']
|
|
||||||
return self._parser_linux(cmd)
|
|
||||||
|
|
||||||
def _used_now_darwin(self):
|
|
||||||
cmd = ['netstat', '-a', '-n', '-p', 'tcp']
|
|
||||||
return self._parser_darwin(cmd)
|
|
||||||
|
|
||||||
def _parser_windows(self, cmd):
|
|
||||||
out = subprocess.check_output(cmd).split(b'\r\n')
|
|
||||||
used_bytes = [x.split()[1].rsplit(b':', 1)[1] for x in out
|
|
||||||
if x.startswith(b' TCP')]
|
|
||||||
return {int(b) for b in used_bytes}
|
|
||||||
|
|
||||||
def _parser_linux(self, cmd):
|
|
||||||
out = subprocess.check_output(cmd).splitlines()[1:]
|
|
||||||
used_bytes = [s.split()[3].rsplit(b':', 1)[1] for s in out]
|
|
||||||
return {int(b) for b in used_bytes}
|
|
||||||
|
|
||||||
def _parser_darwin(self, cmd):
|
|
||||||
out = subprocess.check_output(cmd).split(b'\n')
|
|
||||||
used_bytes = [x.split()[3].rsplit(b':', 1)[1] for x in out
|
|
||||||
if x.startswith(b'tcp')]
|
|
||||||
return {int(b) for b in used_bytes}
|
|
||||||
|
|
||||||
|
|
||||||
class BuildConfiguration:
|
|
||||||
'''This helper class provides access to build-time configuration.
|
|
||||||
|
|
||||||
Configuration options can be read as if the object were a dict,
|
|
||||||
either object['CONFIG_FOO'] or object.get('CONFIG_FOO').
|
|
||||||
|
|
||||||
Configuration values in .config and generated_dts_board.conf are
|
|
||||||
available.'''
|
|
||||||
|
|
||||||
def __init__(self, build_dir):
|
|
||||||
self.build_dir = build_dir
|
|
||||||
self.options = {}
|
|
||||||
self._init()
|
|
||||||
|
|
||||||
def __getitem__(self, item):
|
|
||||||
return self.options[item]
|
|
||||||
|
|
||||||
def get(self, option, *args):
|
|
||||||
return self.options.get(option, *args)
|
|
||||||
|
|
||||||
def _init(self):
|
|
||||||
build_z = os.path.join(self.build_dir, 'zephyr')
|
|
||||||
generated = os.path.join(build_z, 'include', 'generated')
|
|
||||||
files = [os.path.join(build_z, '.config'),
|
|
||||||
os.path.join(generated, 'generated_dts_board.conf')]
|
|
||||||
for f in files:
|
|
||||||
self._parse(f)
|
|
||||||
|
|
||||||
def _parse(self, filename):
|
|
||||||
with open(filename, 'r') as f:
|
|
||||||
for line in f:
|
|
||||||
line = line.strip()
|
|
||||||
if not line or line.startswith('#'):
|
|
||||||
continue
|
|
||||||
option, value = line.split('=', 1)
|
|
||||||
self.options[option] = self._parse_value(value)
|
|
||||||
|
|
||||||
def _parse_value(self, value):
|
|
||||||
if value.startswith('"') or value.startswith("'"):
|
|
||||||
return value.split()
|
|
||||||
try:
|
|
||||||
return int(value, 0)
|
|
||||||
except ValueError:
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
class RunnerCaps:
|
|
||||||
'''This class represents a runner class's capabilities.
|
|
||||||
|
|
||||||
Each capability is represented as an attribute with the same
|
|
||||||
name. Flag attributes are True or False.
|
|
||||||
|
|
||||||
Available capabilities:
|
|
||||||
|
|
||||||
- commands: set of supported commands; default is {'flash',
|
|
||||||
'debug', 'debugserver', 'attach'}.
|
|
||||||
|
|
||||||
- flash_addr: whether the runner supports flashing to an
|
|
||||||
arbitrary address. Default is False. If true, the runner
|
|
||||||
must honor the --dt-flash option.
|
|
||||||
'''
|
|
||||||
|
|
||||||
def __init__(self,
|
|
||||||
commands={'flash', 'debug', 'debugserver', 'attach'},
|
|
||||||
flash_addr=False):
|
|
||||||
self.commands = commands
|
|
||||||
self.flash_addr = bool(flash_addr)
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return 'RunnerCaps(commands={}, flash_addr={})'.format(
|
|
||||||
self.commands, self.flash_addr)
|
|
||||||
|
|
||||||
|
|
||||||
class RunnerConfig:
|
|
||||||
'''Runner execution-time configuration.
|
|
||||||
|
|
||||||
This is a common object shared by all runners. Individual runners
|
|
||||||
can register specific configuration options using their
|
|
||||||
do_add_parser() hooks.
|
|
||||||
|
|
||||||
This class's __slots__ contains exactly the configuration variables.
|
|
||||||
'''
|
|
||||||
|
|
||||||
__slots__ = ['build_dir', 'board_dir', 'elf_file', 'hex_file',
|
|
||||||
'bin_file', 'gdb', 'openocd', 'openocd_search']
|
|
||||||
|
|
||||||
# TODO: revisit whether we can get rid of some of these. Having
|
|
||||||
# tool-specific configuration options here is a layering
|
|
||||||
# violation, but it's very convenient to have a single place to
|
|
||||||
# store the locations of tools (like gdb and openocd) that are
|
|
||||||
# needed by multiple ZephyrBinaryRunner subclasses.
|
|
||||||
def __init__(self, build_dir, board_dir,
|
|
||||||
elf_file, hex_file, bin_file,
|
|
||||||
gdb=None, openocd=None, openocd_search=None):
|
|
||||||
self.build_dir = build_dir
|
|
||||||
'''Zephyr application build directory'''
|
|
||||||
|
|
||||||
self.board_dir = board_dir
|
|
||||||
'''Zephyr board directory'''
|
|
||||||
|
|
||||||
self.elf_file = elf_file
|
|
||||||
'''Path to the elf file that the runner should operate on'''
|
|
||||||
|
|
||||||
self.hex_file = hex_file
|
|
||||||
'''Path to the hex file that the runner should operate on'''
|
|
||||||
|
|
||||||
self.bin_file = bin_file
|
|
||||||
'''Path to the bin file that the runner should operate on'''
|
|
||||||
|
|
||||||
self.gdb = gdb
|
|
||||||
''''Path to GDB compatible with the target, may be None.'''
|
|
||||||
|
|
||||||
self.openocd = openocd
|
|
||||||
'''Path to OpenOCD to use for this target, may be None.'''
|
|
||||||
|
|
||||||
self.openocd_search = openocd_search
|
|
||||||
'''directory to add to OpenOCD search path, may be None.'''
|
|
||||||
|
|
||||||
|
|
||||||
_YN_CHOICES = ['Y', 'y', 'N', 'n', 'yes', 'no', 'YES', 'NO']
|
|
||||||
|
|
||||||
|
|
||||||
class _DTFlashAction(argparse.Action):
|
|
||||||
|
|
||||||
def __call__(self, parser, namespace, values, option_string=None):
|
|
||||||
if values.lower().startswith('y'):
|
|
||||||
namespace.dt_flash = True
|
|
||||||
else:
|
|
||||||
namespace.dt_flash = False
|
|
||||||
|
|
||||||
|
|
||||||
class ZephyrBinaryRunner(abc.ABC):
|
|
||||||
'''Abstract superclass for binary runners (flashers, debuggers).
|
|
||||||
|
|
||||||
**Note**: these APIs are still evolving, and will change!
|
|
||||||
|
|
||||||
With some exceptions, boards supported by Zephyr must provide
|
|
||||||
generic means to be flashed (have a Zephyr firmware binary
|
|
||||||
permanently installed on the device for running) and debugged
|
|
||||||
(have a breakpoint debugger and program loader on a host
|
|
||||||
workstation attached to a running target).
|
|
||||||
|
|
||||||
This is supported by three top-level commands managed by the
|
|
||||||
Zephyr build system:
|
|
||||||
|
|
||||||
- 'flash': flash a previously configured binary to the board,
|
|
||||||
start execution on the target, then return.
|
|
||||||
|
|
||||||
- 'debug': connect to the board via a debugging protocol, program
|
|
||||||
the flash, then drop the user into a debugger interface with
|
|
||||||
symbol tables loaded from the current binary, and block until it
|
|
||||||
exits.
|
|
||||||
|
|
||||||
- 'debugserver': connect via a board-specific debugging protocol,
|
|
||||||
then reset and halt the target. Ensure the user is now able to
|
|
||||||
connect to a debug server with symbol tables loaded from the
|
|
||||||
binary.
|
|
||||||
|
|
||||||
- 'attach': connect to the board via a debugging protocol, then drop
|
|
||||||
the user into a debugger interface with symbol tables loaded from
|
|
||||||
the current binary, and block until it exits. Unlike 'debug', this
|
|
||||||
command does not program the flash.
|
|
||||||
|
|
||||||
This class provides an API for these commands. Every runner has a
|
|
||||||
name (like 'pyocd'), and declares commands it can handle (like
|
|
||||||
'flash'). Zephyr boards (like 'nrf52_pca10040') declare compatible
|
|
||||||
runner(s) by name to the build system, which makes concrete runner
|
|
||||||
instances to execute commands via this class.
|
|
||||||
|
|
||||||
If your board can use an existing runner, all you have to do is
|
|
||||||
give its name to the build system. How to do that is out of the
|
|
||||||
scope of this documentation, but use the existing boards as a
|
|
||||||
starting point.
|
|
||||||
|
|
||||||
If you want to define and use your own runner:
|
|
||||||
|
|
||||||
1. Define a ZephyrBinaryRunner subclass, and implement its
|
|
||||||
abstract methods. You may need to override capabilities().
|
|
||||||
|
|
||||||
2. Make sure the Python module defining your runner class is
|
|
||||||
imported, e.g. by editing this package's __init__.py (otherwise,
|
|
||||||
get_runners() won't work).
|
|
||||||
|
|
||||||
3. Give your runner's name to the Zephyr build system in your
|
|
||||||
board's build files.
|
|
||||||
|
|
||||||
For command-line invocation from the Zephyr build system, runners
|
|
||||||
define their own argparse-based interface through the common
|
|
||||||
add_parser() (and runner-specific do_add_parser() it delegates
|
|
||||||
to), and provide a way to create instances of themselves from
|
|
||||||
a RunnerConfig and parsed runner-specific arguments via create().
|
|
||||||
|
|
||||||
Runners use a variety of target-specific tools and configuration
|
|
||||||
values, the user interface to which is abstracted by this
|
|
||||||
class. Each runner subclass should take any values it needs to
|
|
||||||
execute one of these commands in its constructor. The actual
|
|
||||||
command execution is handled in the run() method.'''
|
|
||||||
|
|
||||||
def __init__(self, cfg):
|
|
||||||
'''Initialize core runner state.
|
|
||||||
|
|
||||||
`cfg` is a RunnerConfig instance.'''
|
|
||||||
self.cfg = cfg
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def get_runners():
|
|
||||||
'''Get a list of all currently defined runner classes.'''
|
|
||||||
return ZephyrBinaryRunner.__subclasses__()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
@abc.abstractmethod
|
|
||||||
def name(cls):
|
|
||||||
'''Return this runner's user-visible name.
|
|
||||||
|
|
||||||
When choosing a name, pick something short and lowercase,
|
|
||||||
based on the name of the tool (like openocd, jlink, etc.) or
|
|
||||||
the target architecture/board (like xtensa, em-starterkit,
|
|
||||||
etc.).'''
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def capabilities(cls):
|
|
||||||
'''Returns a RunnerCaps representing this runner's capabilities.
|
|
||||||
|
|
||||||
This implementation returns the default capabilities.
|
|
||||||
|
|
||||||
Subclasses should override appropriately if needed.'''
|
|
||||||
return RunnerCaps()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def add_parser(cls, parser):
|
|
||||||
'''Adds a sub-command parser for this runner.
|
|
||||||
|
|
||||||
The given object, parser, is a sub-command parser from the
|
|
||||||
argparse module. For more details, refer to the documentation
|
|
||||||
for argparse.ArgumentParser.add_subparsers().
|
|
||||||
|
|
||||||
The lone common optional argument is:
|
|
||||||
|
|
||||||
* --dt-flash (if the runner capabilities includes flash_addr)
|
|
||||||
|
|
||||||
Runner-specific options are added through the do_add_parser()
|
|
||||||
hook.'''
|
|
||||||
# Common options that depend on runner capabilities.
|
|
||||||
if cls.capabilities().flash_addr:
|
|
||||||
parser.add_argument('--dt-flash', default='n', choices=_YN_CHOICES,
|
|
||||||
action=_DTFlashAction,
|
|
||||||
help='''If 'yes', use configuration generated
|
|
||||||
by device tree (DT) to compute flash
|
|
||||||
addresses.''')
|
|
||||||
|
|
||||||
# Runner-specific options.
|
|
||||||
cls.do_add_parser(parser)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
@abc.abstractmethod
|
|
||||||
def do_add_parser(cls, parser):
|
|
||||||
'''Hook for adding runner-specific options.'''
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
@abc.abstractmethod
|
|
||||||
def create(cls, cfg, args):
|
|
||||||
'''Create an instance from command-line arguments.
|
|
||||||
|
|
||||||
- `cfg`: RunnerConfig instance (pass to superclass __init__)
|
|
||||||
- `args`: runner-specific argument namespace parsed from
|
|
||||||
execution environment, as specified by `add_parser()`.'''
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_flash_address(cls, args, build_conf, default=0x0):
|
|
||||||
'''Helper method for extracting a flash address.
|
|
||||||
|
|
||||||
If args.dt_flash is true, get the address from the
|
|
||||||
BoardConfiguration, build_conf. (If
|
|
||||||
CONFIG_HAS_FLASH_LOAD_OFFSET is n in that configuration, it
|
|
||||||
returns CONFIG_FLASH_BASE_ADDRESS. Otherwise, it returns
|
|
||||||
CONFIG_FLASH_BASE_ADDRESS + CONFIG_FLASH_LOAD_OFFSET.)
|
|
||||||
|
|
||||||
Otherwise (when args.dt_flash is False), the default value is
|
|
||||||
returned.'''
|
|
||||||
if args.dt_flash:
|
|
||||||
if build_conf['CONFIG_HAS_FLASH_LOAD_OFFSET']:
|
|
||||||
return (build_conf['CONFIG_FLASH_BASE_ADDRESS'] +
|
|
||||||
build_conf['CONFIG_FLASH_LOAD_OFFSET'])
|
|
||||||
else:
|
|
||||||
return build_conf['CONFIG_FLASH_BASE_ADDRESS']
|
|
||||||
else:
|
|
||||||
return default
|
|
||||||
|
|
||||||
def run(self, command, **kwargs):
|
|
||||||
'''Runs command ('flash', 'debug', 'debugserver', 'attach').
|
|
||||||
|
|
||||||
This is the main entry point to this runner.'''
|
|
||||||
caps = self.capabilities()
|
|
||||||
if command not in caps.commands:
|
|
||||||
raise ValueError('runner {} does not implement command {}'.format(
|
|
||||||
self.name(), command))
|
|
||||||
self.do_run(command, **kwargs)
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def do_run(self, command, **kwargs):
|
|
||||||
'''Concrete runner; run() delegates to this. Implement in subclasses.
|
|
||||||
|
|
||||||
In case of an unsupported command, raise a ValueError.'''
|
|
||||||
|
|
||||||
def run_server_and_client(self, server, client):
|
|
||||||
'''Run a server that ignores SIGINT, and a client that handles it.
|
|
||||||
|
|
||||||
This routine portably:
|
|
||||||
|
|
||||||
- creates a Popen object for the ``server`` command which ignores
|
|
||||||
SIGINT
|
|
||||||
- runs ``client`` in a subprocess while temporarily ignoring SIGINT
|
|
||||||
- cleans up the server after the client exits.
|
|
||||||
|
|
||||||
It's useful to e.g. open a GDB server and client.'''
|
|
||||||
server_proc = self.popen_ignore_int(server)
|
|
||||||
previous = signal.signal(signal.SIGINT, signal.SIG_IGN)
|
|
||||||
try:
|
|
||||||
self.check_call(client)
|
|
||||||
finally:
|
|
||||||
signal.signal(signal.SIGINT, previous)
|
|
||||||
server_proc.terminate()
|
|
||||||
server_proc.wait()
|
|
||||||
|
|
||||||
def call(self, cmd):
|
|
||||||
'''Subclass subprocess.call() wrapper.
|
|
||||||
|
|
||||||
Subclasses should use this method to run command in a
|
|
||||||
subprocess and get its return code, rather than
|
|
||||||
using subprocess directly, to keep accurate debug logs.
|
|
||||||
'''
|
|
||||||
quoted = quote_sh_list(cmd)
|
|
||||||
|
|
||||||
if JUST_PRINT:
|
|
||||||
log.inf(quoted)
|
|
||||||
return 0
|
|
||||||
|
|
||||||
log.dbg(quoted)
|
|
||||||
return subprocess.call(cmd)
|
|
||||||
|
|
||||||
def check_call(self, cmd):
|
|
||||||
'''Subclass subprocess.check_call() wrapper.
|
|
||||||
|
|
||||||
Subclasses should use this method to run command in a
|
|
||||||
subprocess and check that it executed correctly, rather than
|
|
||||||
using subprocess directly, to keep accurate debug logs.
|
|
||||||
'''
|
|
||||||
quoted = quote_sh_list(cmd)
|
|
||||||
|
|
||||||
if JUST_PRINT:
|
|
||||||
log.inf(quoted)
|
|
||||||
return
|
|
||||||
|
|
||||||
log.dbg(quoted)
|
|
||||||
try:
|
|
||||||
subprocess.check_call(cmd)
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
raise
|
|
||||||
|
|
||||||
def check_output(self, cmd):
|
|
||||||
'''Subclass subprocess.check_output() wrapper.
|
|
||||||
|
|
||||||
Subclasses should use this method to run command in a
|
|
||||||
subprocess and check that it executed correctly, rather than
|
|
||||||
using subprocess directly, to keep accurate debug logs.
|
|
||||||
'''
|
|
||||||
quoted = quote_sh_list(cmd)
|
|
||||||
|
|
||||||
if JUST_PRINT:
|
|
||||||
log.inf(quoted)
|
|
||||||
return b''
|
|
||||||
|
|
||||||
log.dbg(quoted)
|
|
||||||
try:
|
|
||||||
return subprocess.check_output(cmd)
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
raise
|
|
||||||
|
|
||||||
def popen_ignore_int(self, cmd):
|
|
||||||
'''Spawn a child command, ensuring it ignores SIGINT.
|
|
||||||
|
|
||||||
The returned subprocess.Popen object must be manually terminated.'''
|
|
||||||
cflags = 0
|
|
||||||
preexec = None
|
|
||||||
system = platform.system()
|
|
||||||
quoted = quote_sh_list(cmd)
|
|
||||||
|
|
||||||
if system == 'Windows':
|
|
||||||
cflags |= subprocess.CREATE_NEW_PROCESS_GROUP
|
|
||||||
elif system in {'Linux', 'Darwin'}:
|
|
||||||
preexec = os.setsid
|
|
||||||
|
|
||||||
if JUST_PRINT:
|
|
||||||
log.inf(quoted)
|
|
||||||
return _DebugDummyPopen()
|
|
||||||
|
|
||||||
log.dbg(quoted)
|
|
||||||
return subprocess.Popen(cmd, creationflags=cflags, preexec_fn=preexec)
|
|
|
@ -1,121 +0,0 @@
|
||||||
# Copyright (c) 2017 Linaro Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''Runner for flashing with dfu-util.'''
|
|
||||||
|
|
||||||
from collections import namedtuple
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
|
|
||||||
from west import log
|
|
||||||
from west.runners.core import ZephyrBinaryRunner, RunnerCaps, \
|
|
||||||
BuildConfiguration
|
|
||||||
|
|
||||||
|
|
||||||
DfuSeConfig = namedtuple('DfuSeConfig', ['address', 'options'])
|
|
||||||
|
|
||||||
|
|
||||||
class DfuUtilBinaryRunner(ZephyrBinaryRunner):
|
|
||||||
'''Runner front-end for dfu-util.'''
|
|
||||||
|
|
||||||
def __init__(self, cfg, pid, alt, img, exe='dfu-util',
|
|
||||||
dfuse_config=None):
|
|
||||||
super(DfuUtilBinaryRunner, self).__init__(cfg)
|
|
||||||
self.alt = alt
|
|
||||||
self.img = img
|
|
||||||
self.cmd = [exe, '-d,{}'.format(pid)]
|
|
||||||
try:
|
|
||||||
self.list_pattern = ', alt={},'.format(int(self.alt))
|
|
||||||
except ValueError:
|
|
||||||
self.list_pattern = ', name="{}",'.format(self.alt)
|
|
||||||
|
|
||||||
if dfuse_config is None:
|
|
||||||
self.dfuse = False
|
|
||||||
else:
|
|
||||||
self.dfuse = True
|
|
||||||
self.dfuse_config = dfuse_config
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def name(cls):
|
|
||||||
return 'dfu-util'
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def capabilities(cls):
|
|
||||||
return RunnerCaps(commands={'flash'}, flash_addr=True)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def do_add_parser(cls, parser):
|
|
||||||
# Required:
|
|
||||||
parser.add_argument("--pid", required=True,
|
|
||||||
help="USB VID:PID of the board")
|
|
||||||
parser.add_argument("--alt", required=True,
|
|
||||||
help="interface alternate setting number or name")
|
|
||||||
|
|
||||||
# Optional:
|
|
||||||
parser.add_argument("--img",
|
|
||||||
help="binary to flash, default is --bin-file")
|
|
||||||
parser.add_argument("--dfuse", default=False, action='store_true',
|
|
||||||
help='''set if target is a DfuSe device;
|
|
||||||
implies --dt-flash.''')
|
|
||||||
parser.add_argument("--dfuse-modifiers", default='leave',
|
|
||||||
help='''colon-separated list of DfuSe modifiers
|
|
||||||
(default is "leave", which starts execution
|
|
||||||
immediately); --dfuse must also be given for this
|
|
||||||
option to take effect.''')
|
|
||||||
parser.add_argument('--dfu-util', default='dfu-util',
|
|
||||||
help='dfu-util executable; defaults to "dfu-util"')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, cfg, args):
|
|
||||||
if args.img is None:
|
|
||||||
args.img = cfg.bin_file
|
|
||||||
|
|
||||||
if args.dfuse:
|
|
||||||
args.dt_flash = True # --dfuse implies --dt-flash.
|
|
||||||
build_conf = BuildConfiguration(cfg.build_dir)
|
|
||||||
dcfg = DfuSeConfig(address=cls.get_flash_address(args, build_conf),
|
|
||||||
options=args.dfuse_modifiers)
|
|
||||||
else:
|
|
||||||
dcfg = None
|
|
||||||
|
|
||||||
return DfuUtilBinaryRunner(cfg, args.pid, args.alt, args.img,
|
|
||||||
exe=args.dfu_util, dfuse_config=dcfg)
|
|
||||||
|
|
||||||
def find_device(self):
|
|
||||||
cmd = list(self.cmd) + ['-l']
|
|
||||||
output = self.check_output(cmd)
|
|
||||||
output = output.decode(sys.getdefaultencoding())
|
|
||||||
return self.list_pattern in output
|
|
||||||
|
|
||||||
def do_run(self, command, **kwargs):
|
|
||||||
reset = False
|
|
||||||
if not self.find_device():
|
|
||||||
reset = True
|
|
||||||
log.dbg('Device not found, waiting for it',
|
|
||||||
level=log.VERBOSE_EXTREME)
|
|
||||||
# Use of print() here is advised. We don't want to lose
|
|
||||||
# this information in a separate log -- this is
|
|
||||||
# interactive and requires a terminal.
|
|
||||||
print('Please reset your board to switch to DFU mode...')
|
|
||||||
while not self.find_device():
|
|
||||||
time.sleep(0.1)
|
|
||||||
|
|
||||||
cmd = list(self.cmd)
|
|
||||||
if self.dfuse:
|
|
||||||
# http://dfu-util.sourceforge.net/dfuse.html
|
|
||||||
dcfg = self.dfuse_config
|
|
||||||
addr_opts = hex(dcfg.address) + ':' + dcfg.options
|
|
||||||
cmd.extend(['-s', addr_opts])
|
|
||||||
cmd.extend(['-a', self.alt, '-D', self.img])
|
|
||||||
self.check_call(cmd)
|
|
||||||
|
|
||||||
if self.dfuse and 'leave' in dcfg.options.split(':'):
|
|
||||||
# Normal DFU devices generally need to be reset to switch
|
|
||||||
# back to the flashed program.
|
|
||||||
#
|
|
||||||
# DfuSe targets do as well, except when 'leave' is given
|
|
||||||
# as an option.
|
|
||||||
reset = False
|
|
||||||
if reset:
|
|
||||||
print('Now reset your board again to switch back to runtime mode.')
|
|
|
@ -1,100 +0,0 @@
|
||||||
# Copyright (c) 2017 Linaro Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''Runner for flashing ESP32 devices with esptool/espidf.'''
|
|
||||||
|
|
||||||
from os import path
|
|
||||||
|
|
||||||
from west import log
|
|
||||||
from west.runners.core import ZephyrBinaryRunner, RunnerCaps
|
|
||||||
|
|
||||||
|
|
||||||
class Esp32BinaryRunner(ZephyrBinaryRunner):
|
|
||||||
'''Runner front-end for espidf.'''
|
|
||||||
|
|
||||||
def __init__(self, cfg, device, baud=921600, flash_size='detect',
|
|
||||||
flash_freq='40m', flash_mode='dio', espidf='espidf',
|
|
||||||
bootloader_bin=None, partition_table_bin=None):
|
|
||||||
super(Esp32BinaryRunner, self).__init__(cfg)
|
|
||||||
self.elf = cfg.elf_file
|
|
||||||
self.device = device
|
|
||||||
self.baud = baud
|
|
||||||
self.flash_size = flash_size
|
|
||||||
self.flash_freq = flash_freq
|
|
||||||
self.flash_mode = flash_mode
|
|
||||||
self.espidf = espidf
|
|
||||||
self.bootloader_bin = bootloader_bin
|
|
||||||
self.partition_table_bin = partition_table_bin
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def name(cls):
|
|
||||||
return 'esp32'
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def capabilities(cls):
|
|
||||||
return RunnerCaps(commands={'flash'})
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def do_add_parser(cls, parser):
|
|
||||||
# Required
|
|
||||||
parser.add_argument('--esp-idf-path', required=True,
|
|
||||||
help='path to ESP-IDF')
|
|
||||||
|
|
||||||
# Optional
|
|
||||||
parser.add_argument('--esp-device', default='/dev/ttyUSB0',
|
|
||||||
help='serial port to flash, default /dev/ttyUSB0')
|
|
||||||
parser.add_argument('--esp-baud-rate', default='921600',
|
|
||||||
help='serial baud rate, default 921600')
|
|
||||||
parser.add_argument('--esp-flash-size', default='detect',
|
|
||||||
help='flash size, default "detect"')
|
|
||||||
parser.add_argument('--esp-flash-freq', default='40m',
|
|
||||||
help='flash frequency, default "40m"')
|
|
||||||
parser.add_argument('--esp-flash-mode', default='dio',
|
|
||||||
help='flash mode, default "dio"')
|
|
||||||
parser.add_argument(
|
|
||||||
'--esp-tool',
|
|
||||||
help='''if given, complete path to espidf. default is to search for
|
|
||||||
it in [ESP_IDF_PATH]/components/esptool_py/esptool/esptool.py''')
|
|
||||||
parser.add_argument('--esp-flash-bootloader',
|
|
||||||
help='Bootloader image to flash')
|
|
||||||
parser.add_argument('--esp-flash-partition_table',
|
|
||||||
help='Partition table to flash')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, cfg, args):
|
|
||||||
if args.esp_tool:
|
|
||||||
espidf = args.esp_tool
|
|
||||||
else:
|
|
||||||
espidf = path.join(args.esp_idf_path, 'components', 'esptool_py',
|
|
||||||
'esptool', 'esptool.py')
|
|
||||||
|
|
||||||
return Esp32BinaryRunner(
|
|
||||||
cfg, args.esp_device, baud=args.esp_baud_rate,
|
|
||||||
flash_size=args.esp_flash_size, flash_freq=args.esp_flash_freq,
|
|
||||||
flash_mode=args.esp_flash_mode, espidf=espidf,
|
|
||||||
bootloader_bin=args.esp_flash_bootloader,
|
|
||||||
partition_table_bin=args.esp_flash_partition_table)
|
|
||||||
|
|
||||||
def do_run(self, command, **kwargs):
|
|
||||||
bin_name = path.splitext(self.elf)[0] + path.extsep + 'bin'
|
|
||||||
cmd_convert = [self.espidf, '--chip', 'esp32', 'elf2image', self.elf]
|
|
||||||
cmd_flash = [self.espidf, '--chip', 'esp32', '--port', self.device,
|
|
||||||
'--baud', self.baud, '--before', 'default_reset',
|
|
||||||
'--after', 'hard_reset', 'write_flash', '-u',
|
|
||||||
'--flash_mode', self.flash_mode,
|
|
||||||
'--flash_freq', self.flash_freq,
|
|
||||||
'--flash_size', self.flash_size]
|
|
||||||
|
|
||||||
if self.bootloader_bin:
|
|
||||||
cmd_flash.extend(['0x1000', self.bootloader_bin])
|
|
||||||
cmd_flash.extend(['0x8000', self.partition_table_bin])
|
|
||||||
cmd_flash.extend(['0x10000', bin_name])
|
|
||||||
else:
|
|
||||||
cmd_flash.extend(['0x1000', bin_name])
|
|
||||||
|
|
||||||
log.inf("Converting ELF to BIN")
|
|
||||||
self.check_call(cmd_convert)
|
|
||||||
|
|
||||||
log.inf("Flashing ESP32 on {} ({}bps)".format(self.device, self.baud))
|
|
||||||
self.check_call(cmd_flash)
|
|
|
@ -1,166 +0,0 @@
|
||||||
# Copyright (c) 2018 Intel Corporation.
|
|
||||||
# Copyright 2018 Open Source Foundries Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''Runner for debugging and flashing Intel S1000 CRB'''
|
|
||||||
from os import path
|
|
||||||
import time
|
|
||||||
import signal
|
|
||||||
from west import log
|
|
||||||
from west.runners.core import ZephyrBinaryRunner
|
|
||||||
|
|
||||||
DEFAULT_XT_GDB_PORT = 20000
|
|
||||||
|
|
||||||
|
|
||||||
class IntelS1000BinaryRunner(ZephyrBinaryRunner):
|
|
||||||
'''Runner front-end for Intel S1000.'''
|
|
||||||
|
|
||||||
def __init__(self, cfg, xt_ocd_dir,
|
|
||||||
ocd_topology, ocd_jtag_instr, gdb_flash_file,
|
|
||||||
gdb_port=DEFAULT_XT_GDB_PORT):
|
|
||||||
super(IntelS1000BinaryRunner, self).__init__(cfg)
|
|
||||||
self.board_dir = cfg.board_dir
|
|
||||||
self.elf_name = cfg.elf_file
|
|
||||||
self.gdb_cmd = cfg.gdb
|
|
||||||
self.xt_ocd_dir = xt_ocd_dir
|
|
||||||
self.ocd_topology = ocd_topology
|
|
||||||
self.ocd_jtag_instr = ocd_jtag_instr
|
|
||||||
self.gdb_flash_file = gdb_flash_file
|
|
||||||
self.gdb_port = gdb_port
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def name(cls):
|
|
||||||
return 'intel_s1000'
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def do_add_parser(cls, parser):
|
|
||||||
# Optional
|
|
||||||
parser.add_argument(
|
|
||||||
'--xt-ocd-dir', default='/opt/tensilica/xocd-12.0.4/xt-ocd',
|
|
||||||
help='ocd-dir, defaults to /opt/tensilica/xocd-12.0.4/xt-ocd')
|
|
||||||
parser.add_argument(
|
|
||||||
'--ocd-topology', default='topology_dsp0_flyswatter2.xml',
|
|
||||||
help='ocd-topology, defaults to topology_dsp0_flyswatter2.xml')
|
|
||||||
parser.add_argument(
|
|
||||||
'--ocd-jtag-instr', default='dsp0_gdb.txt',
|
|
||||||
help='ocd-jtag-instr, defaults to dsp0_gdb.txt')
|
|
||||||
parser.add_argument(
|
|
||||||
'--gdb-flash-file', default='load_elf.txt',
|
|
||||||
help='gdb-flash-file, defaults to load_elf.txt')
|
|
||||||
parser.add_argument(
|
|
||||||
'--gdb-port', default=DEFAULT_XT_GDB_PORT,
|
|
||||||
help='xt-gdb port, defaults to 20000')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, cfg, args):
|
|
||||||
return IntelS1000BinaryRunner(
|
|
||||||
cfg, args.xt_ocd_dir,
|
|
||||||
args.ocd_topology, args.ocd_jtag_instr, args.gdb_flash_file,
|
|
||||||
gdb_port=args.gdb_port)
|
|
||||||
|
|
||||||
def do_run(self, command, **kwargs):
|
|
||||||
kwargs['ocd-topology'] = path.join(self.board_dir, 'support',
|
|
||||||
self.ocd_topology)
|
|
||||||
kwargs['ocd-jtag-instr'] = path.join(self.board_dir, 'support',
|
|
||||||
self.ocd_jtag_instr)
|
|
||||||
kwargs['gdb-flash-file'] = path.join(self.board_dir, 'support',
|
|
||||||
self.gdb_flash_file)
|
|
||||||
|
|
||||||
if command == 'flash':
|
|
||||||
self.flash(**kwargs)
|
|
||||||
elif command == 'debugserver':
|
|
||||||
self.debugserver(**kwargs)
|
|
||||||
else:
|
|
||||||
self.do_debug(**kwargs)
|
|
||||||
|
|
||||||
def flash(self, **kwargs):
|
|
||||||
topology_file = kwargs['ocd-topology']
|
|
||||||
jtag_instr_file = kwargs['ocd-jtag-instr']
|
|
||||||
gdb_flash_file = kwargs['gdb-flash-file']
|
|
||||||
|
|
||||||
self.print_gdbserver_message(self.gdb_port)
|
|
||||||
server_cmd = [self.xt_ocd_dir,
|
|
||||||
'-c', topology_file,
|
|
||||||
'-I', jtag_instr_file]
|
|
||||||
|
|
||||||
# Start the server
|
|
||||||
# Note that XTOCD takes a few seconds to execute and always fails the
|
|
||||||
# first time. It has to be relaunched the second time to work.
|
|
||||||
server_proc = self.popen_ignore_int(server_cmd)
|
|
||||||
time.sleep(6)
|
|
||||||
server_proc.terminate()
|
|
||||||
server_proc = self.popen_ignore_int(server_cmd)
|
|
||||||
time.sleep(6)
|
|
||||||
|
|
||||||
# Start the client
|
|
||||||
gdb_cmd = [self.gdb_cmd, '-x', gdb_flash_file]
|
|
||||||
client_proc = self.popen_ignore_int(gdb_cmd)
|
|
||||||
|
|
||||||
# Wait for 3 seconds (waiting for XTGDB to finish loading the image)
|
|
||||||
time.sleep(3)
|
|
||||||
|
|
||||||
# At this point, the ELF image is loaded and the program is in
|
|
||||||
# execution. Now we can quit the client (xt-gdb) and the server
|
|
||||||
# (xt-ocd) as they are not needed anymore. The loaded program
|
|
||||||
# (ELF) will continue to run though.
|
|
||||||
client_proc.terminate()
|
|
||||||
server_proc.terminate()
|
|
||||||
|
|
||||||
def do_debug(self, **kwargs):
|
|
||||||
if self.elf_name is None:
|
|
||||||
raise ValueError('Cannot debug; elf is missing')
|
|
||||||
if self.gdb_cmd is None:
|
|
||||||
raise ValueError('Cannot debug; no gdb specified')
|
|
||||||
|
|
||||||
topology_file = kwargs['ocd-topology']
|
|
||||||
jtag_instr_file = kwargs['ocd-jtag-instr']
|
|
||||||
|
|
||||||
self.print_gdbserver_message(self.gdb_port)
|
|
||||||
server_cmd = [self.xt_ocd_dir,
|
|
||||||
'-c', topology_file,
|
|
||||||
'-I', jtag_instr_file]
|
|
||||||
|
|
||||||
# Start the server
|
|
||||||
# Note that XTOCD takes a few seconds to execute and always fails the
|
|
||||||
# first time. It has to be relaunched the second time to work.
|
|
||||||
server_proc = self.popen_ignore_int(server_cmd)
|
|
||||||
time.sleep(6)
|
|
||||||
server_proc.terminate()
|
|
||||||
server_proc = self.popen_ignore_int(server_cmd)
|
|
||||||
time.sleep(6)
|
|
||||||
|
|
||||||
gdb_cmd = [self.gdb_cmd,
|
|
||||||
'-ex', 'target remote :{}'.format(self.gdb_port),
|
|
||||||
self.elf_name]
|
|
||||||
|
|
||||||
# Start the client
|
|
||||||
# The below statement will consume the "^C" keypress ensuring
|
|
||||||
# the python main application doesn't exit. This is important
|
|
||||||
# since ^C in gdb means a "halt" operation.
|
|
||||||
previous = signal.signal(signal.SIGINT, signal.SIG_IGN)
|
|
||||||
try:
|
|
||||||
self.check_call(gdb_cmd)
|
|
||||||
finally:
|
|
||||||
signal.signal(signal.SIGINT, previous)
|
|
||||||
server_proc.terminate()
|
|
||||||
server_proc.wait()
|
|
||||||
|
|
||||||
def print_gdbserver_message(self, gdb_port):
|
|
||||||
log.inf('Intel S1000 GDB server running on port {}'.format(gdb_port))
|
|
||||||
|
|
||||||
def debugserver(self, **kwargs):
|
|
||||||
topology_file = kwargs['ocd-topology']
|
|
||||||
jtag_instr_file = kwargs['ocd-jtag-instr']
|
|
||||||
|
|
||||||
self.print_gdbserver_message(self.gdb_port)
|
|
||||||
server_cmd = [self.xt_ocd_dir,
|
|
||||||
'-c', topology_file,
|
|
||||||
'-I', jtag_instr_file]
|
|
||||||
|
|
||||||
# Note that XTOCD takes a few seconds to execute and always fails the
|
|
||||||
# first time. It has to be relaunched the second time to work.
|
|
||||||
server_proc = self.popen_ignore_int(server_cmd)
|
|
||||||
time.sleep(6)
|
|
||||||
server_proc.terminate()
|
|
||||||
self.check_call(server_cmd)
|
|
|
@ -1,150 +0,0 @@
|
||||||
# Copyright (c) 2017 Linaro Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''Runner for debugging with J-Link.'''
|
|
||||||
|
|
||||||
import os
|
|
||||||
import tempfile
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from west import log
|
|
||||||
from west.runners.core import ZephyrBinaryRunner, RunnerCaps, \
|
|
||||||
BuildConfiguration
|
|
||||||
|
|
||||||
DEFAULT_JLINK_EXE = 'JLink.exe' if sys.platform == 'win32' else 'JLinkExe'
|
|
||||||
DEFAULT_JLINK_GDB_PORT = 2331
|
|
||||||
|
|
||||||
|
|
||||||
class JLinkBinaryRunner(ZephyrBinaryRunner):
|
|
||||||
'''Runner front-end for the J-Link GDB server.'''
|
|
||||||
|
|
||||||
def __init__(self, cfg, device,
|
|
||||||
commander=DEFAULT_JLINK_EXE,
|
|
||||||
flash_addr=0x0, erase=True,
|
|
||||||
iface='swd', speed='auto',
|
|
||||||
gdbserver='JLinkGDBServer', gdb_port=DEFAULT_JLINK_GDB_PORT,
|
|
||||||
tui=False):
|
|
||||||
super(JLinkBinaryRunner, self).__init__(cfg)
|
|
||||||
self.bin_name = cfg.bin_file
|
|
||||||
self.elf_name = cfg.elf_file
|
|
||||||
self.gdb_cmd = [cfg.gdb] if cfg.gdb else None
|
|
||||||
self.device = device
|
|
||||||
self.commander = commander
|
|
||||||
self.flash_addr = flash_addr
|
|
||||||
self.erase = erase
|
|
||||||
self.gdbserver_cmd = [gdbserver]
|
|
||||||
self.iface = iface
|
|
||||||
self.speed = speed
|
|
||||||
self.gdb_port = gdb_port
|
|
||||||
self.tui_arg = ['-tui'] if tui else []
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def name(cls):
|
|
||||||
return 'jlink'
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def capabilities(cls):
|
|
||||||
return RunnerCaps(commands={'flash', 'debug', 'debugserver', 'attach'},
|
|
||||||
flash_addr=True)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def do_add_parser(cls, parser):
|
|
||||||
# Required:
|
|
||||||
parser.add_argument('--device', required=True, help='device name')
|
|
||||||
|
|
||||||
# Optional:
|
|
||||||
parser.add_argument('--iface', default='swd',
|
|
||||||
help='interface to use, default is swd')
|
|
||||||
parser.add_argument('--speed', default='auto',
|
|
||||||
help='interface speed, default is autodetect')
|
|
||||||
parser.add_argument('--tui', default=False, action='store_true',
|
|
||||||
help='if given, GDB uses -tui')
|
|
||||||
parser.add_argument('--gdbserver', default='JLinkGDBServer',
|
|
||||||
help='GDB server, default is JLinkGDBServer')
|
|
||||||
parser.add_argument('--gdb-port', default=DEFAULT_JLINK_GDB_PORT,
|
|
||||||
help='pyocd gdb port, defaults to {}'.format(
|
|
||||||
DEFAULT_JLINK_GDB_PORT))
|
|
||||||
parser.add_argument('--commander', default=DEFAULT_JLINK_EXE,
|
|
||||||
help='J-Link Commander, default is JLinkExe')
|
|
||||||
parser.add_argument('--erase', default=False, action='store_true',
|
|
||||||
help='if given, mass erase flash before loading')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, cfg, args):
|
|
||||||
build_conf = BuildConfiguration(cfg.build_dir)
|
|
||||||
flash_addr = cls.get_flash_address(args, build_conf)
|
|
||||||
return JLinkBinaryRunner(cfg, args.device,
|
|
||||||
commander=args.commander,
|
|
||||||
flash_addr=flash_addr, erase=args.erase,
|
|
||||||
iface=args.iface, speed=args.speed,
|
|
||||||
gdbserver=args.gdbserver,
|
|
||||||
gdb_port=args.gdb_port,
|
|
||||||
tui=args.tui)
|
|
||||||
|
|
||||||
def print_gdbserver_message(self):
|
|
||||||
log.inf('J-Link GDB server running on port {}'.format(self.gdb_port))
|
|
||||||
|
|
||||||
def do_run(self, command, **kwargs):
|
|
||||||
server_cmd = (self.gdbserver_cmd +
|
|
||||||
['-select', 'usb', # only USB connections supported
|
|
||||||
'-port', str(self.gdb_port),
|
|
||||||
'-if', self.iface,
|
|
||||||
'-speed', self.speed,
|
|
||||||
'-device', self.device,
|
|
||||||
'-silent',
|
|
||||||
'-singlerun'])
|
|
||||||
|
|
||||||
if command == 'flash':
|
|
||||||
self.flash(**kwargs)
|
|
||||||
elif command == 'debugserver':
|
|
||||||
self.print_gdbserver_message()
|
|
||||||
self.check_call(server_cmd)
|
|
||||||
else:
|
|
||||||
if self.gdb_cmd is None:
|
|
||||||
raise ValueError('Cannot debug; gdb is missing')
|
|
||||||
if self.elf_name is None:
|
|
||||||
raise ValueError('Cannot debug; elf is missing')
|
|
||||||
client_cmd = (self.gdb_cmd +
|
|
||||||
self.tui_arg +
|
|
||||||
[self.elf_name] +
|
|
||||||
['-ex', 'target remote :{}'.format(self.gdb_port)])
|
|
||||||
if command == 'debug':
|
|
||||||
client_cmd += ['-ex', 'monitor halt',
|
|
||||||
'-ex', 'monitor reset',
|
|
||||||
'-ex', 'load']
|
|
||||||
self.print_gdbserver_message()
|
|
||||||
self.run_server_and_client(server_cmd, client_cmd)
|
|
||||||
|
|
||||||
def flash(self, **kwargs):
|
|
||||||
if self.bin_name is None:
|
|
||||||
raise ValueError('Cannot flash; bin_name is missing')
|
|
||||||
|
|
||||||
lines = ['r'] # Reset and halt the target
|
|
||||||
|
|
||||||
if self.erase:
|
|
||||||
lines.append('erase') # Erase all flash sectors
|
|
||||||
|
|
||||||
lines.append('loadfile {} 0x{:x}'.format(self.bin_name,
|
|
||||||
self.flash_addr))
|
|
||||||
lines.append('g') # Start the CPU
|
|
||||||
lines.append('q') # Close the connection and quit
|
|
||||||
|
|
||||||
log.dbg('JLink commander script:')
|
|
||||||
log.dbg('\n'.join(lines))
|
|
||||||
|
|
||||||
# Don't use NamedTemporaryFile: the resulting file can't be
|
|
||||||
# opened again on Windows.
|
|
||||||
with tempfile.TemporaryDirectory(suffix='jlink') as d:
|
|
||||||
fname = os.path.join(d, 'runner.jlink')
|
|
||||||
with open(fname, 'wb') as f:
|
|
||||||
f.writelines(bytes(line + '\n', 'utf-8') for line in lines)
|
|
||||||
|
|
||||||
cmd = ([self.commander] +
|
|
||||||
['-if', self.iface,
|
|
||||||
'-speed', self.speed,
|
|
||||||
'-device', self.device,
|
|
||||||
'-CommanderScript', fname])
|
|
||||||
|
|
||||||
log.inf('Flashing Target Device')
|
|
||||||
self.check_call(cmd)
|
|
|
@ -1,99 +0,0 @@
|
||||||
# Copyright (c) 2017 Linaro Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''Runner for NIOS II, based on quartus-flash.py and GDB.'''
|
|
||||||
|
|
||||||
from west import log
|
|
||||||
from west.runners.core import ZephyrBinaryRunner, NetworkPortHelper
|
|
||||||
|
|
||||||
|
|
||||||
class Nios2BinaryRunner(ZephyrBinaryRunner):
|
|
||||||
'''Runner front-end for NIOS II.'''
|
|
||||||
|
|
||||||
# From the original shell script:
|
|
||||||
#
|
|
||||||
# "XXX [flash] only support[s] cases where the .elf is sent
|
|
||||||
# over the JTAG and the CPU directly boots from __start. CONFIG_XIP
|
|
||||||
# and CONFIG_INCLUDE_RESET_VECTOR must be disabled."
|
|
||||||
|
|
||||||
def __init__(self, cfg, quartus_py=None, cpu_sof=None, tui=False):
|
|
||||||
super(Nios2BinaryRunner, self).__init__(cfg)
|
|
||||||
self.hex_name = cfg.hex_file
|
|
||||||
self.elf_name = cfg.elf_file
|
|
||||||
self.cpu_sof = cpu_sof
|
|
||||||
self.quartus_py = quartus_py
|
|
||||||
self.gdb_cmd = [cfg.gdb] if cfg.gdb else None
|
|
||||||
self.tui_arg = ['-tui'] if tui else []
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def name(cls):
|
|
||||||
return 'nios2'
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def do_add_parser(cls, parser):
|
|
||||||
# TODO merge quartus-flash.py script into this file.
|
|
||||||
parser.add_argument('--quartus-flash', required=True)
|
|
||||||
parser.add_argument('--cpu-sof', required=True,
|
|
||||||
help='path to the the CPU .sof data')
|
|
||||||
parser.add_argument('--tui', default=False, action='store_true',
|
|
||||||
help='if given, GDB uses -tui')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, cfg, args):
|
|
||||||
return Nios2BinaryRunner(cfg,
|
|
||||||
quartus_py=args.quartus_flash,
|
|
||||||
cpu_sof=args.cpu_sof,
|
|
||||||
tui=args.tui)
|
|
||||||
|
|
||||||
def do_run(self, command, **kwargs):
|
|
||||||
if command == 'flash':
|
|
||||||
self.flash(**kwargs)
|
|
||||||
else:
|
|
||||||
self.debug_debugserver(command, **kwargs)
|
|
||||||
|
|
||||||
def flash(self, **kwargs):
|
|
||||||
if self.quartus_py is None:
|
|
||||||
raise ValueError('Cannot flash; --quartus-flash not given.')
|
|
||||||
if self.cpu_sof is None:
|
|
||||||
raise ValueError('Cannot flash; --cpu-sof not given.')
|
|
||||||
|
|
||||||
cmd = [self.quartus_py,
|
|
||||||
'--sof', self.cpu_sof,
|
|
||||||
'--kernel', self.hex_name]
|
|
||||||
|
|
||||||
self.check_call(cmd)
|
|
||||||
|
|
||||||
def print_gdbserver_message(self, gdb_port):
|
|
||||||
log.inf('Nios II GDB server running on port {}'.format(gdb_port))
|
|
||||||
|
|
||||||
def debug_debugserver(self, command, **kwargs):
|
|
||||||
# Per comments in the shell script, the NIOSII GDB server
|
|
||||||
# doesn't exit gracefully, so it's better to explicitly search
|
|
||||||
# for an unused port. The script picks a random value in
|
|
||||||
# between 1024 and 49151, but we'll start with the
|
|
||||||
# "traditional" 3333 choice.
|
|
||||||
gdb_start = 3333
|
|
||||||
nh = NetworkPortHelper()
|
|
||||||
gdb_port = nh.get_unused_ports([gdb_start])[0]
|
|
||||||
|
|
||||||
server_cmd = (['nios2-gdb-server',
|
|
||||||
'--tcpport', str(gdb_port),
|
|
||||||
'--stop', '--reset-target'])
|
|
||||||
|
|
||||||
if command == 'debugserver':
|
|
||||||
self.print_gdbserver_message(gdb_port)
|
|
||||||
self.check_call(server_cmd)
|
|
||||||
else:
|
|
||||||
if self.elf_name is None:
|
|
||||||
raise ValueError('Cannot debug; elf is missing')
|
|
||||||
if self.gdb_cmd is None:
|
|
||||||
raise ValueError('Cannot debug; no gdb specified')
|
|
||||||
|
|
||||||
gdb_cmd = (self.gdb_cmd +
|
|
||||||
self.tui_arg +
|
|
||||||
[self.elf_name,
|
|
||||||
'-ex', 'target remote :{}'.format(gdb_port)])
|
|
||||||
|
|
||||||
self.print_gdbserver_message(gdb_port)
|
|
||||||
self.run_server_and_client(server_cmd, gdb_cmd)
|
|
|
@ -1,129 +0,0 @@
|
||||||
# Copyright (c) 2017 Linaro Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''Runner for flashing with nrfjprog.'''
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from west import log
|
|
||||||
from west.runners.core import ZephyrBinaryRunner, RunnerCaps
|
|
||||||
|
|
||||||
|
|
||||||
class NrfJprogBinaryRunner(ZephyrBinaryRunner):
|
|
||||||
'''Runner front-end for nrfjprog.'''
|
|
||||||
|
|
||||||
def __init__(self, cfg, family, softreset, snr, erase=False):
|
|
||||||
super(NrfJprogBinaryRunner, self).__init__(cfg)
|
|
||||||
self.hex_ = cfg.hex_file
|
|
||||||
self.family = family
|
|
||||||
self.softreset = softreset
|
|
||||||
self.snr = snr
|
|
||||||
self.erase = erase
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def name(cls):
|
|
||||||
return 'nrfjprog'
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def capabilities(cls):
|
|
||||||
return RunnerCaps(commands={'flash'})
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def do_add_parser(cls, parser):
|
|
||||||
parser.add_argument('--nrf-family', required=True,
|
|
||||||
choices=['NRF51', 'NRF52'],
|
|
||||||
help='family of nRF MCU')
|
|
||||||
parser.add_argument('--softreset', required=False,
|
|
||||||
action='store_true',
|
|
||||||
help='use reset instead of pinreset')
|
|
||||||
parser.add_argument('--erase', action='store_true',
|
|
||||||
help='if given, mass erase flash before loading')
|
|
||||||
parser.add_argument('--snr', required=False,
|
|
||||||
help='serial number of board to use')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, cfg, args):
|
|
||||||
return NrfJprogBinaryRunner(cfg, args.nrf_family, args.softreset,
|
|
||||||
args.snr, erase=args.erase)
|
|
||||||
|
|
||||||
def get_board_snr_from_user(self):
|
|
||||||
snrs = self.check_output(['nrfjprog', '--ids'])
|
|
||||||
snrs = snrs.decode(sys.getdefaultencoding()).strip().splitlines()
|
|
||||||
|
|
||||||
if len(snrs) == 0:
|
|
||||||
raise RuntimeError('"nrfjprog --ids" did not find a board; '
|
|
||||||
'is the board connected?')
|
|
||||||
elif len(snrs) == 1:
|
|
||||||
board_snr = snrs[0]
|
|
||||||
if board_snr == '0':
|
|
||||||
raise RuntimeError('"nrfjprog --ids" returned 0; '
|
|
||||||
'is a debugger already connected?')
|
|
||||||
return board_snr
|
|
||||||
|
|
||||||
log.dbg("Refusing the temptation to guess a board",
|
|
||||||
level=log.VERBOSE_EXTREME)
|
|
||||||
|
|
||||||
# Use of print() here is advised. We don't want to lose
|
|
||||||
# this information in a separate log -- this is
|
|
||||||
# interactive and requires a terminal.
|
|
||||||
print('There are multiple boards connected.')
|
|
||||||
for i, snr in enumerate(snrs, 1):
|
|
||||||
print('{}. {}'.format(i, snr))
|
|
||||||
|
|
||||||
p = 'Please select one with desired serial number (1-{}): '.format(
|
|
||||||
len(snrs))
|
|
||||||
while True:
|
|
||||||
value = input(p)
|
|
||||||
try:
|
|
||||||
value = int(value)
|
|
||||||
except ValueError:
|
|
||||||
continue
|
|
||||||
if 1 <= value <= len(snrs):
|
|
||||||
break
|
|
||||||
|
|
||||||
return snrs[value - 1]
|
|
||||||
|
|
||||||
def do_run(self, command, **kwargs):
|
|
||||||
commands = []
|
|
||||||
if (self.snr is None):
|
|
||||||
board_snr = self.get_board_snr_from_user()
|
|
||||||
else:
|
|
||||||
board_snr = self.snr.lstrip("0")
|
|
||||||
program_cmd = ['nrfjprog', '--program', self.hex_, '-f', self.family,
|
|
||||||
'--snr', board_snr]
|
|
||||||
|
|
||||||
print('Flashing file: {}'.format(self.hex_))
|
|
||||||
if self.erase:
|
|
||||||
commands.extend([
|
|
||||||
['nrfjprog',
|
|
||||||
'--eraseall',
|
|
||||||
'-f', self.family,
|
|
||||||
'--snr', board_snr],
|
|
||||||
program_cmd
|
|
||||||
])
|
|
||||||
else:
|
|
||||||
if self.family == 'NRF51':
|
|
||||||
commands.append(program_cmd + ['--sectorerase'])
|
|
||||||
else:
|
|
||||||
commands.append(program_cmd + ['--sectoranduicrerase'])
|
|
||||||
|
|
||||||
if self.family == 'NRF52' and not self.softreset:
|
|
||||||
commands.extend([
|
|
||||||
# Enable pin reset
|
|
||||||
['nrfjprog', '--pinresetenable', '-f', self.family,
|
|
||||||
'--snr', board_snr],
|
|
||||||
])
|
|
||||||
|
|
||||||
if self.softreset:
|
|
||||||
commands.append(['nrfjprog', '--reset', '-f', self.family,
|
|
||||||
'--snr', board_snr])
|
|
||||||
else:
|
|
||||||
commands.append(['nrfjprog', '--pinreset', '-f', self.family,
|
|
||||||
'--snr', board_snr])
|
|
||||||
|
|
||||||
for cmd in commands:
|
|
||||||
self.check_call(cmd)
|
|
||||||
|
|
||||||
log.inf('Board with serial number {} flashed successfully.'.format(
|
|
||||||
board_snr))
|
|
|
@ -1,94 +0,0 @@
|
||||||
# Copyright (c) 2018 Synopsys Inc.
|
|
||||||
# Copyright (c) 2017 Open Source Foundries Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''ARC architecture-specific runners.'''
|
|
||||||
|
|
||||||
from os import path
|
|
||||||
|
|
||||||
from west.runners.core import ZephyrBinaryRunner
|
|
||||||
|
|
||||||
DEFAULT_ARC_GDB_PORT = 3333
|
|
||||||
DEFAULT_PROPS_FILE = 'nsim.props'
|
|
||||||
|
|
||||||
|
|
||||||
class NsimBinaryRunner(ZephyrBinaryRunner):
|
|
||||||
'''Runner front-end for the ARC si.'''
|
|
||||||
|
|
||||||
# This unusual 'flash' implementation matches the original shell script.
|
|
||||||
#
|
|
||||||
# It works by starting a GDB server in a separate session, connecting a
|
|
||||||
# client to it to load the program, and running 'continue' within the
|
|
||||||
# client to execute the application.
|
|
||||||
#
|
|
||||||
|
|
||||||
def __init__(self, cfg,
|
|
||||||
tui=False,
|
|
||||||
gdb_port=DEFAULT_ARC_GDB_PORT,
|
|
||||||
props=DEFAULT_PROPS_FILE):
|
|
||||||
super(NsimBinaryRunner, self).__init__(cfg)
|
|
||||||
self.gdb_cmd = [cfg.gdb] + (['-tui'] if tui else [])
|
|
||||||
self.nsim_cmd = ['nsimdrv']
|
|
||||||
self.gdb_port = gdb_port
|
|
||||||
self.props = props
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def name(cls):
|
|
||||||
return 'arc-nsim'
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def do_add_parser(cls, parser):
|
|
||||||
parser.add_argument('--gdb-port', default=DEFAULT_ARC_GDB_PORT,
|
|
||||||
help='nsim gdb port, defaults to 3333')
|
|
||||||
parser.add_argument('--props', default=DEFAULT_PROPS_FILE,
|
|
||||||
help='nsim props file, defaults to nsim.props')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, cfg, args):
|
|
||||||
if cfg.gdb is None:
|
|
||||||
raise ValueError('--gdb not provided at command line')
|
|
||||||
|
|
||||||
return NsimBinaryRunner(
|
|
||||||
cfg,
|
|
||||||
gdb_port=args.gdb_port,
|
|
||||||
props=args.props)
|
|
||||||
|
|
||||||
def do_run(self, command, **kwargs):
|
|
||||||
kwargs['nsim-cfg'] = path.join(self.cfg.board_dir, 'support',
|
|
||||||
self.props)
|
|
||||||
|
|
||||||
if command == 'flash':
|
|
||||||
self.do_flash(**kwargs)
|
|
||||||
elif command == 'debug':
|
|
||||||
self.do_debug(**kwargs)
|
|
||||||
else:
|
|
||||||
self.debugserver(**kwargs)
|
|
||||||
|
|
||||||
def do_flash(self, **kwargs):
|
|
||||||
config = kwargs['nsim-cfg']
|
|
||||||
|
|
||||||
cmd = (self.nsim_cmd + ['-propsfile', config, self.cfg.elf_file])
|
|
||||||
self.check_call(cmd)
|
|
||||||
|
|
||||||
def do_debug(self, **kwargs):
|
|
||||||
config = kwargs['nsim-cfg']
|
|
||||||
|
|
||||||
server_cmd = (self.nsim_cmd + ['-gdb',
|
|
||||||
'-port={}'.format(self.gdb_port),
|
|
||||||
'-propsfile', config])
|
|
||||||
|
|
||||||
gdb_cmd = (self.gdb_cmd +
|
|
||||||
['-ex', 'target remote :{}'.format(self.gdb_port),
|
|
||||||
'-ex', 'load', self.cfg.elf_file])
|
|
||||||
|
|
||||||
self.run_server_and_client(server_cmd, gdb_cmd)
|
|
||||||
|
|
||||||
def debugserver(self, **kwargs):
|
|
||||||
config = kwargs['nsim-cfg']
|
|
||||||
|
|
||||||
cmd = (self.nsim_cmd +
|
|
||||||
['-gdb', '-port={}'.format(self.gdb_port),
|
|
||||||
'-propsfile', config])
|
|
||||||
|
|
||||||
self.check_call(cmd)
|
|
|
@ -1,145 +0,0 @@
|
||||||
# Copyright (c) 2017 Linaro Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''Runner for openocd.'''
|
|
||||||
|
|
||||||
from os import path
|
|
||||||
|
|
||||||
from west.runners.core import ZephyrBinaryRunner
|
|
||||||
|
|
||||||
DEFAULT_OPENOCD_TCL_PORT = 6333
|
|
||||||
DEFAULT_OPENOCD_TELNET_PORT = 4444
|
|
||||||
DEFAULT_OPENOCD_GDB_PORT = 3333
|
|
||||||
|
|
||||||
|
|
||||||
class OpenOcdBinaryRunner(ZephyrBinaryRunner):
|
|
||||||
'''Runner front-end for openocd.'''
|
|
||||||
|
|
||||||
def __init__(self, cfg,
|
|
||||||
pre_cmd=None, load_cmd=None, verify_cmd=None, post_cmd=None,
|
|
||||||
tui=None,
|
|
||||||
tcl_port=DEFAULT_OPENOCD_TCL_PORT,
|
|
||||||
telnet_port=DEFAULT_OPENOCD_TELNET_PORT,
|
|
||||||
gdb_port=DEFAULT_OPENOCD_GDB_PORT):
|
|
||||||
super(OpenOcdBinaryRunner, self).__init__(cfg)
|
|
||||||
self.openocd_config = path.join(cfg.board_dir, 'support',
|
|
||||||
'openocd.cfg')
|
|
||||||
|
|
||||||
search_args = []
|
|
||||||
if cfg.openocd_search is not None:
|
|
||||||
search_args = ['-s', cfg.openocd_search]
|
|
||||||
self.openocd_cmd = [cfg.openocd] + search_args
|
|
||||||
self.elf_name = cfg.elf_file
|
|
||||||
self.load_cmd = load_cmd
|
|
||||||
self.verify_cmd = verify_cmd
|
|
||||||
self.pre_cmd = pre_cmd
|
|
||||||
self.post_cmd = post_cmd
|
|
||||||
self.tcl_port = tcl_port
|
|
||||||
self.telnet_port = telnet_port
|
|
||||||
self.gdb_port = gdb_port
|
|
||||||
self.gdb_cmd = [cfg.gdb] if cfg.gdb else None
|
|
||||||
self.tui_arg = ['-tui'] if tui else []
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def name(cls):
|
|
||||||
return 'openocd'
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def do_add_parser(cls, parser):
|
|
||||||
# Options for flashing:
|
|
||||||
parser.add_argument('--cmd-pre-load',
|
|
||||||
help='Command to run before flashing')
|
|
||||||
parser.add_argument('--cmd-load',
|
|
||||||
help='''Command to load/flash binary
|
|
||||||
(required when flashing)''')
|
|
||||||
parser.add_argument('--cmd-verify',
|
|
||||||
help='''Command to verify flashed binary''')
|
|
||||||
parser.add_argument('--cmd-post-verify',
|
|
||||||
help='Command to run after verification')
|
|
||||||
|
|
||||||
# Options for debugging:
|
|
||||||
parser.add_argument('--tui', default=False, action='store_true',
|
|
||||||
help='if given, GDB uses -tui')
|
|
||||||
parser.add_argument('--tcl-port', default=DEFAULT_OPENOCD_TCL_PORT,
|
|
||||||
help='openocd TCL port, defaults to 6333')
|
|
||||||
parser.add_argument('--telnet-port',
|
|
||||||
default=DEFAULT_OPENOCD_TELNET_PORT,
|
|
||||||
help='openocd telnet port, defaults to 4444')
|
|
||||||
parser.add_argument('--gdb-port', default=DEFAULT_OPENOCD_GDB_PORT,
|
|
||||||
help='openocd gdb port, defaults to 3333')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, cfg, args):
|
|
||||||
return OpenOcdBinaryRunner(
|
|
||||||
cfg,
|
|
||||||
pre_cmd=args.cmd_pre_load, load_cmd=args.cmd_load,
|
|
||||||
verify_cmd=args.cmd_verify, post_cmd=args.cmd_post_verify,
|
|
||||||
tui=args.tui,
|
|
||||||
tcl_port=args.tcl_port, telnet_port=args.telnet_port,
|
|
||||||
gdb_port=args.gdb_port)
|
|
||||||
|
|
||||||
def do_run(self, command, **kwargs):
|
|
||||||
if command == 'flash':
|
|
||||||
self.do_flash(**kwargs)
|
|
||||||
elif command == 'debug':
|
|
||||||
self.do_debug(**kwargs)
|
|
||||||
else:
|
|
||||||
self.do_debugserver(**kwargs)
|
|
||||||
|
|
||||||
def do_flash(self, **kwargs):
|
|
||||||
if self.load_cmd is None:
|
|
||||||
raise ValueError('Cannot flash; load command is missing')
|
|
||||||
if self.verify_cmd is None:
|
|
||||||
raise ValueError('Cannot flash; verify command is missing')
|
|
||||||
|
|
||||||
pre_cmd = []
|
|
||||||
if self.pre_cmd is not None:
|
|
||||||
pre_cmd = ['-c', self.pre_cmd]
|
|
||||||
|
|
||||||
post_cmd = []
|
|
||||||
if self.post_cmd is not None:
|
|
||||||
post_cmd = ['-c', self.post_cmd]
|
|
||||||
|
|
||||||
cmd = (self.openocd_cmd +
|
|
||||||
['-f', self.openocd_config,
|
|
||||||
'-c', 'init',
|
|
||||||
'-c', 'targets'] +
|
|
||||||
pre_cmd +
|
|
||||||
['-c', 'reset halt',
|
|
||||||
'-c', self.load_cmd,
|
|
||||||
'-c', 'reset halt',
|
|
||||||
'-c', self.verify_cmd] +
|
|
||||||
post_cmd +
|
|
||||||
['-c', 'reset run',
|
|
||||||
'-c', 'shutdown'])
|
|
||||||
self.check_call(cmd)
|
|
||||||
|
|
||||||
def do_debug(self, **kwargs):
|
|
||||||
if self.gdb_cmd is None:
|
|
||||||
raise ValueError('Cannot debug; no gdb specified')
|
|
||||||
if self.elf_name is None:
|
|
||||||
raise ValueError('Cannot debug; no .elf specified')
|
|
||||||
|
|
||||||
server_cmd = (self.openocd_cmd +
|
|
||||||
['-f', self.openocd_config,
|
|
||||||
'-c', 'tcl_port {}'.format(self.tcl_port),
|
|
||||||
'-c', 'telnet_port {}'.format(self.telnet_port),
|
|
||||||
'-c', 'gdb_port {}'.format(self.gdb_port),
|
|
||||||
'-c', 'init',
|
|
||||||
'-c', 'targets',
|
|
||||||
'-c', 'halt'])
|
|
||||||
|
|
||||||
gdb_cmd = (self.gdb_cmd + self.tui_arg +
|
|
||||||
['-ex', 'target remote :{}'.format(self.gdb_port),
|
|
||||||
self.elf_name])
|
|
||||||
|
|
||||||
self.run_server_and_client(server_cmd, gdb_cmd)
|
|
||||||
|
|
||||||
def do_debugserver(self, **kwargs):
|
|
||||||
cmd = (self.openocd_cmd +
|
|
||||||
['-f', self.openocd_config,
|
|
||||||
'-c', 'init',
|
|
||||||
'-c', 'targets',
|
|
||||||
'-c', 'reset halt'])
|
|
||||||
self.check_call(cmd)
|
|
|
@ -1,169 +0,0 @@
|
||||||
# Copyright (c) 2017 Linaro Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''Runner for pyOCD .'''
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from west.runners.core import ZephyrBinaryRunner, RunnerCaps, \
|
|
||||||
BuildConfiguration
|
|
||||||
from west import log
|
|
||||||
|
|
||||||
DEFAULT_PYOCD_GDB_PORT = 3333
|
|
||||||
|
|
||||||
|
|
||||||
class PyOcdBinaryRunner(ZephyrBinaryRunner):
|
|
||||||
'''Runner front-end for pyOCD.'''
|
|
||||||
|
|
||||||
def __init__(self, cfg, target,
|
|
||||||
flashtool='pyocd-flashtool', flash_addr=0x0,
|
|
||||||
flashtool_opts=None,
|
|
||||||
gdbserver='pyocd-gdbserver',
|
|
||||||
gdb_port=DEFAULT_PYOCD_GDB_PORT, tui=False,
|
|
||||||
board_id=None, daparg=None, frequency=None):
|
|
||||||
super(PyOcdBinaryRunner, self).__init__(cfg)
|
|
||||||
|
|
||||||
self.target_args = ['-t', target]
|
|
||||||
self.flashtool = flashtool
|
|
||||||
self.flash_addr_args = ['-a', hex(flash_addr)] if flash_addr else []
|
|
||||||
self.gdb_cmd = [cfg.gdb] if cfg.gdb is not None else None
|
|
||||||
self.gdbserver = gdbserver
|
|
||||||
self.gdb_port = gdb_port
|
|
||||||
self.tui_args = ['-tui'] if tui else []
|
|
||||||
self.hex_name = cfg.hex_file
|
|
||||||
self.bin_name = cfg.bin_file
|
|
||||||
self.elf_name = cfg.elf_file
|
|
||||||
|
|
||||||
board_args = []
|
|
||||||
if board_id is not None:
|
|
||||||
board_args = ['-b', board_id]
|
|
||||||
self.board_args = board_args
|
|
||||||
|
|
||||||
daparg_args = []
|
|
||||||
if daparg is not None:
|
|
||||||
daparg_args = ['-da', daparg]
|
|
||||||
self.daparg_args = daparg_args
|
|
||||||
|
|
||||||
frequency_args = []
|
|
||||||
if frequency is not None:
|
|
||||||
frequency_args = ['-f', frequency]
|
|
||||||
self.frequency_args = frequency_args
|
|
||||||
|
|
||||||
self.flashtool_extra = flashtool_opts if flashtool_opts else []
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def name(cls):
|
|
||||||
return 'pyocd'
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def capabilities(cls):
|
|
||||||
return RunnerCaps(commands={'flash', 'debug', 'debugserver', 'attach'},
|
|
||||||
flash_addr=True)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def do_add_parser(cls, parser):
|
|
||||||
parser.add_argument('--target', required=True,
|
|
||||||
help='target override')
|
|
||||||
|
|
||||||
parser.add_argument('--daparg',
|
|
||||||
help='Additional -da arguments to pyocd tool')
|
|
||||||
parser.add_argument('--flashtool', default='pyocd-flashtool',
|
|
||||||
help='flash tool path, default is pyocd-flashtool')
|
|
||||||
parser.add_argument('--flashtool-opt', default=[], action='append',
|
|
||||||
help='''Additional options for pyocd-flashtool,
|
|
||||||
e.g. -ce to chip erase''')
|
|
||||||
parser.add_argument('--frequency',
|
|
||||||
help='SWD clock frequency in Hz')
|
|
||||||
parser.add_argument('--gdbserver', default='pyocd-gdbserver',
|
|
||||||
help='GDB server, default is pyocd-gdbserver')
|
|
||||||
parser.add_argument('--gdb-port', default=DEFAULT_PYOCD_GDB_PORT,
|
|
||||||
help='pyocd gdb port, defaults to {}'.format(
|
|
||||||
DEFAULT_PYOCD_GDB_PORT))
|
|
||||||
parser.add_argument('--tui', default=False, action='store_true',
|
|
||||||
help='if given, GDB uses -tui')
|
|
||||||
parser.add_argument('--board-id',
|
|
||||||
help='ID of board to flash, default is to prompt')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, cfg, args):
|
|
||||||
daparg = os.environ.get('PYOCD_DAPARG')
|
|
||||||
if daparg:
|
|
||||||
log.wrn('Setting PYOCD_DAPARG in the environment is',
|
|
||||||
'deprecated; use the --daparg option instead.')
|
|
||||||
if args.daparg is None:
|
|
||||||
log.dbg('Missing --daparg set to {} from environment'.format(
|
|
||||||
daparg), level=log.VERBOSE_VERY)
|
|
||||||
args.daparg = daparg
|
|
||||||
|
|
||||||
build_conf = BuildConfiguration(cfg.build_dir)
|
|
||||||
flash_addr = cls.get_flash_address(args, build_conf)
|
|
||||||
|
|
||||||
return PyOcdBinaryRunner(
|
|
||||||
cfg, args.target, flashtool=args.flashtool,
|
|
||||||
flash_addr=flash_addr, flashtool_opts=args.flashtool_opt,
|
|
||||||
gdbserver=args.gdbserver, gdb_port=args.gdb_port, tui=args.tui,
|
|
||||||
board_id=args.board_id, daparg=args.daparg,
|
|
||||||
frequency=args.frequency)
|
|
||||||
|
|
||||||
def port_args(self):
|
|
||||||
return ['-p', str(self.gdb_port)]
|
|
||||||
|
|
||||||
def do_run(self, command, **kwargs):
|
|
||||||
if command == 'flash':
|
|
||||||
self.flash(**kwargs)
|
|
||||||
else:
|
|
||||||
self.debug_debugserver(command, **kwargs)
|
|
||||||
|
|
||||||
def flash(self, **kwargs):
|
|
||||||
if os.path.isfile(self.hex_name):
|
|
||||||
fname = self.hex_name
|
|
||||||
elif os.path.isfile(self.bin_name):
|
|
||||||
fname = self.bin_name
|
|
||||||
else:
|
|
||||||
raise ValueError(
|
|
||||||
'Cannot flash; no hex ({}) or bin ({}) files'.format(
|
|
||||||
self.hex_name, self.bin_name))
|
|
||||||
|
|
||||||
cmd = ([self.flashtool] +
|
|
||||||
self.flash_addr_args +
|
|
||||||
self.daparg_args +
|
|
||||||
self.target_args +
|
|
||||||
self.board_args +
|
|
||||||
self.frequency_args +
|
|
||||||
self.flashtool_extra +
|
|
||||||
[fname])
|
|
||||||
|
|
||||||
log.inf('Flashing Target Device')
|
|
||||||
self.check_call(cmd)
|
|
||||||
|
|
||||||
def print_gdbserver_message(self):
|
|
||||||
log.inf('pyOCD GDB server running on port {}'.format(self.gdb_port))
|
|
||||||
|
|
||||||
def debug_debugserver(self, command, **kwargs):
|
|
||||||
server_cmd = ([self.gdbserver] +
|
|
||||||
self.daparg_args +
|
|
||||||
self.port_args() +
|
|
||||||
self.target_args +
|
|
||||||
self.board_args +
|
|
||||||
self.frequency_args)
|
|
||||||
|
|
||||||
if command == 'debugserver':
|
|
||||||
self.print_gdbserver_message()
|
|
||||||
self.check_call(server_cmd)
|
|
||||||
else:
|
|
||||||
if self.gdb_cmd is None:
|
|
||||||
raise ValueError('Cannot debug; gdb is missing')
|
|
||||||
if self.elf_name is None:
|
|
||||||
raise ValueError('Cannot debug; elf is missing')
|
|
||||||
client_cmd = (self.gdb_cmd +
|
|
||||||
self.tui_args +
|
|
||||||
[self.elf_name] +
|
|
||||||
['-ex', 'target remote :{}'.format(self.gdb_port)])
|
|
||||||
if command == 'debug':
|
|
||||||
client_cmd += ['-ex', 'monitor halt',
|
|
||||||
'-ex', 'monitor reset',
|
|
||||||
'-ex', 'load']
|
|
||||||
|
|
||||||
self.print_gdbserver_message()
|
|
||||||
self.run_server_and_client(server_cmd, client_cmd)
|
|
|
@ -1,34 +0,0 @@
|
||||||
# Copyright (c) 2017 Linaro Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''Runner stub for QEMU.'''
|
|
||||||
|
|
||||||
from west.runners.core import ZephyrBinaryRunner, RunnerCaps
|
|
||||||
|
|
||||||
|
|
||||||
class QemuBinaryRunner(ZephyrBinaryRunner):
|
|
||||||
'''Place-holder for QEMU runner customizations.'''
|
|
||||||
|
|
||||||
def __init__(self, cfg):
|
|
||||||
super(QemuBinaryRunner, self).__init__(cfg)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def name(cls):
|
|
||||||
return 'qemu'
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def capabilities(cls):
|
|
||||||
# This is a stub.
|
|
||||||
return RunnerCaps(commands=set())
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def do_add_parser(cls, parser):
|
|
||||||
pass # Nothing to do.
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, cfg, args):
|
|
||||||
return QemuBinaryRunner(cfg)
|
|
||||||
|
|
||||||
def do_run(self, command, **kwargs):
|
|
||||||
pass
|
|
|
@ -1,40 +0,0 @@
|
||||||
# Copyright (c) 2017 Linaro Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''Runner for debugging with xt-gdb.'''
|
|
||||||
|
|
||||||
from os import path
|
|
||||||
|
|
||||||
from west.runners.core import ZephyrBinaryRunner, RunnerCaps
|
|
||||||
|
|
||||||
|
|
||||||
class XtensaBinaryRunner(ZephyrBinaryRunner):
|
|
||||||
'''Runner front-end for xt-gdb.'''
|
|
||||||
|
|
||||||
def __init__(self, cfg):
|
|
||||||
super(XtensaBinaryRunner, self).__init__(cfg)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def name(cls):
|
|
||||||
return 'xtensa'
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def capabilities(cls):
|
|
||||||
return RunnerCaps(commands={'debug'})
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def do_add_parser(cls, parser):
|
|
||||||
parser.add_argument('--xcc-tools', required=True,
|
|
||||||
help='path to XTensa tools')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def create(cls, cfg, args):
|
|
||||||
# Override any GDB with the one provided by the XTensa tools.
|
|
||||||
cfg.gdb = path.join(args.xcc_tools, 'bin', 'xt-gdb')
|
|
||||||
return XtensaBinaryRunner(cfg)
|
|
||||||
|
|
||||||
def do_run(self, command, **kwargs):
|
|
||||||
gdb_cmd = [self.cfg.gdb, self.cfg.elf_file]
|
|
||||||
|
|
||||||
self.check_call(gdb_cmd)
|
|
|
@ -1,80 +0,0 @@
|
||||||
# Copyright 2018 Open Source Foundries Limited.
|
|
||||||
#
|
|
||||||
# SPDX-License-Identifier: Apache-2.0
|
|
||||||
|
|
||||||
'''Miscellaneous utilities used by west
|
|
||||||
'''
|
|
||||||
|
|
||||||
import os
|
|
||||||
import shlex
|
|
||||||
import textwrap
|
|
||||||
|
|
||||||
|
|
||||||
def quote_sh_list(cmd):
|
|
||||||
'''Transform a command from list into shell string form.'''
|
|
||||||
fmt = ' '.join('{}' for _ in cmd)
|
|
||||||
args = [shlex.quote(s) for s in cmd]
|
|
||||||
return fmt.format(*args)
|
|
||||||
|
|
||||||
|
|
||||||
def wrap(text, indent):
|
|
||||||
'''Convenience routine for wrapping text to a consistent indent.'''
|
|
||||||
return textwrap.wrap(text, initial_indent=indent,
|
|
||||||
subsequent_indent=indent)
|
|
||||||
|
|
||||||
|
|
||||||
class WestNotFound(RuntimeError):
|
|
||||||
'''Neither the current directory nor any parent has a West installation.'''
|
|
||||||
|
|
||||||
|
|
||||||
def west_dir(start=None):
|
|
||||||
'''Returns the absolute path of the west/ top level directory.
|
|
||||||
|
|
||||||
Starts the search from the start directory, and goes to its
|
|
||||||
parents. If the start directory is not specified, the current
|
|
||||||
directory is used.
|
|
||||||
|
|
||||||
Raises WestNotFound if no west top-level directory is found.
|
|
||||||
'''
|
|
||||||
return os.path.join(west_topdir(start), 'west')
|
|
||||||
|
|
||||||
|
|
||||||
def west_topdir(start=None):
|
|
||||||
'''
|
|
||||||
Like west_dir(), but returns the path to the parent directory of the west/
|
|
||||||
directory instead, where project repositories are stored
|
|
||||||
'''
|
|
||||||
# If you change this function, make sure to update the bootstrap
|
|
||||||
# script's find_west_topdir().
|
|
||||||
|
|
||||||
if start is None:
|
|
||||||
cur_dir = os.getcwd()
|
|
||||||
else:
|
|
||||||
cur_dir = start
|
|
||||||
|
|
||||||
while True:
|
|
||||||
if os.path.isfile(os.path.join(cur_dir, 'west', '.west_topdir')):
|
|
||||||
return cur_dir
|
|
||||||
|
|
||||||
parent_dir = os.path.dirname(cur_dir)
|
|
||||||
if cur_dir == parent_dir:
|
|
||||||
# At the root
|
|
||||||
raise WestNotFound('Could not find a West installation '
|
|
||||||
'in this or any parent directory')
|
|
||||||
cur_dir = parent_dir
|
|
||||||
|
|
||||||
|
|
||||||
def in_multirepo_install(start=None):
|
|
||||||
'''Returns True iff the path ``start`` is in a multi-repo installation.
|
|
||||||
|
|
||||||
If start is not given, it defaults to the current working directory.
|
|
||||||
|
|
||||||
This is equivalent to checking if west_dir() raises an exception
|
|
||||||
when given the same start kwarg.
|
|
||||||
'''
|
|
||||||
try:
|
|
||||||
west_topdir(start)
|
|
||||||
result = True
|
|
||||||
except WestNotFound:
|
|
||||||
result = False
|
|
||||||
return result
|
|
92
scripts/west
92
scripts/west
|
@ -1,92 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
# Zephyr launcher which is interoperable with:
|
|
||||||
#
|
|
||||||
# 1. "mono-repo" Zephyr installations that have 'make flash'
|
|
||||||
# etc. supplied by a copy of some west code in scripts/meta.
|
|
||||||
#
|
|
||||||
# 2. "multi-repo" Zephyr installations where west is provided in a
|
|
||||||
# separate Git repository elsewhere.
|
|
||||||
#
|
|
||||||
# This is basically a copy of the "wrapper" functionality in the west
|
|
||||||
# bootstrap script for the multi-repo case, plus a fallback onto the
|
|
||||||
# copy in scripts/meta/west for mono-repo installs.
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import colorama
|
|
||||||
|
|
||||||
if sys.version_info < (3,):
|
|
||||||
sys.exit('fatal error: you are running Python 2')
|
|
||||||
|
|
||||||
# Top-level west directory, containing west itself and the manifest.
|
|
||||||
WEST_DIR = 'west'
|
|
||||||
# Subdirectory to check out the west source repository into.
|
|
||||||
WEST = 'west'
|
|
||||||
# File inside of WEST_DIR which marks it as the top level of the
|
|
||||||
# Zephyr project installation.
|
|
||||||
#
|
|
||||||
# (The WEST_DIR name is not distinct enough to use when searching for
|
|
||||||
# the top level; other directories named "west" may exist elsewhere,
|
|
||||||
# e.g. zephyr/doc/west.)
|
|
||||||
WEST_MARKER = '.west_topdir'
|
|
||||||
|
|
||||||
|
|
||||||
class WestNotFound(RuntimeError):
|
|
||||||
'''Neither the current directory nor any parent has a West installation.'''
|
|
||||||
|
|
||||||
|
|
||||||
def find_west_topdir(start):
|
|
||||||
'''Find the top-level installation directory, starting at ``start``.
|
|
||||||
|
|
||||||
If none is found, raises WestNotFound.'''
|
|
||||||
cur_dir = start
|
|
||||||
|
|
||||||
while True:
|
|
||||||
if os.path.isfile(os.path.join(cur_dir, WEST_DIR, WEST_MARKER)):
|
|
||||||
return cur_dir
|
|
||||||
|
|
||||||
parent_dir = os.path.dirname(cur_dir)
|
|
||||||
if cur_dir == parent_dir:
|
|
||||||
# At the root
|
|
||||||
raise WestNotFound()
|
|
||||||
cur_dir = parent_dir
|
|
||||||
|
|
||||||
|
|
||||||
def wrap(west_dir, argv):
|
|
||||||
# Pull in the west main module, after adding the directory
|
|
||||||
# containing the package to sys.path.
|
|
||||||
sys.path.append(west_dir)
|
|
||||||
import west.main
|
|
||||||
|
|
||||||
# Invoke west's main with our arguments. It needs to be run from
|
|
||||||
# this process for 'west debug' to work properly, so don't change
|
|
||||||
# this code to running main in a subprocess.
|
|
||||||
west.main.main(sys.argv[1:])
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
# Figure out which west to run. If we're in a multirepo
|
|
||||||
# installation, prefer the standalone west. Otherwise, we're in a
|
|
||||||
# monorepo installation, so we need to fall back on the copy of
|
|
||||||
# west in the Zephyr repository's scripts/meta directory.
|
|
||||||
try:
|
|
||||||
topdir = find_west_topdir(__file__)
|
|
||||||
west_dir = os.path.join(topdir, 'west', 'west', 'src')
|
|
||||||
except WestNotFound:
|
|
||||||
west_dir = os.path.join(os.environ['ZEPHYR_BASE'], 'scripts', 'meta')
|
|
||||||
|
|
||||||
try:
|
|
||||||
wrap(west_dir, sys.argv[1:])
|
|
||||||
finally:
|
|
||||||
print(colorama.Fore.LIGHTRED_EX, end='')
|
|
||||||
print('NOTE: you just ran a copy of west from {};'.
|
|
||||||
format(os.path.dirname(__file__)),
|
|
||||||
'this will be removed from the Zephyr repository in the future.',
|
|
||||||
'West is now developed separately.')
|
|
||||||
print(colorama.Style.RESET_ALL, end='', flush=True)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
|
@ -4,14 +4,3 @@ set ZEPHYR_BASE=%~dp0
|
||||||
if exist "%userprofile%\zephyrrc.cmd" (
|
if exist "%userprofile%\zephyrrc.cmd" (
|
||||||
call "%userprofile%\zephyrrc.cmd"
|
call "%userprofile%\zephyrrc.cmd"
|
||||||
)
|
)
|
||||||
|
|
||||||
rem Zephyr meta-tool (west) launcher alias, which keeps monorepo
|
|
||||||
rem Zephyr installations' 'make flash' etc. working. See
|
|
||||||
rem https://www.python.org/dev/peps/pep-0486/ for details on the
|
|
||||||
rem virtualenv-related pieces. (We need to implement this manually
|
|
||||||
rem because Zephyr's minimum supported Python version is 3.4.)
|
|
||||||
if defined VIRTUAL_ENV (
|
|
||||||
doskey west=python %ZEPHYR_BASE%\scripts\west $*
|
|
||||||
) else (
|
|
||||||
doskey west=py -3 %ZEPHYR_BASE%\scripts\west $*
|
|
||||||
)
|
|
||||||
|
|
Loading…
Reference in a new issue