forked from brl/citadel
Squashed 'poky/' content from commit 4469acdf1d
git-subtree-dir: poky git-subtree-split: 4469acdf1d0338220f3fe2ecb5e079eea6fda375
This commit is contained in:
369
scripts/lib/devtool/__init__.py
Normal file
369
scripts/lib/devtool/__init__.py
Normal file
@@ -0,0 +1,369 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
# Development tool - utility functions for plugins
|
||||
#
|
||||
# Copyright (C) 2014 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
"""Devtool plugins module"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
import logging
|
||||
import re
|
||||
import codecs
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
class DevtoolError(Exception):
|
||||
"""Exception for handling devtool errors"""
|
||||
def __init__(self, message, exitcode=1):
|
||||
super(DevtoolError, self).__init__(message)
|
||||
self.exitcode = exitcode
|
||||
|
||||
|
||||
def exec_build_env_command(init_path, builddir, cmd, watch=False, **options):
|
||||
"""Run a program in bitbake build context"""
|
||||
import bb
|
||||
if not 'cwd' in options:
|
||||
options["cwd"] = builddir
|
||||
if init_path:
|
||||
# As the OE init script makes use of BASH_SOURCE to determine OEROOT,
|
||||
# and can't determine it when running under dash, we need to set
|
||||
# the executable to bash to correctly set things up
|
||||
if not 'executable' in options:
|
||||
options['executable'] = 'bash'
|
||||
logger.debug('Executing command: "%s" using init path %s' % (cmd, init_path))
|
||||
init_prefix = '. %s %s > /dev/null && ' % (init_path, builddir)
|
||||
else:
|
||||
logger.debug('Executing command "%s"' % cmd)
|
||||
init_prefix = ''
|
||||
if watch:
|
||||
if sys.stdout.isatty():
|
||||
# Fool bitbake into thinking it's outputting to a terminal (because it is, indirectly)
|
||||
cmd = 'script -e -q -c "%s" /dev/null' % cmd
|
||||
return exec_watch('%s%s' % (init_prefix, cmd), **options)
|
||||
else:
|
||||
return bb.process.run('%s%s' % (init_prefix, cmd), **options)
|
||||
|
||||
def exec_watch(cmd, **options):
|
||||
"""Run program with stdout shown on sys.stdout"""
|
||||
import bb
|
||||
if isinstance(cmd, str) and not "shell" in options:
|
||||
options["shell"] = True
|
||||
|
||||
process = subprocess.Popen(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **options
|
||||
)
|
||||
|
||||
reader = codecs.getreader('utf-8')(process.stdout)
|
||||
buf = ''
|
||||
while True:
|
||||
out = reader.read(1, 1)
|
||||
if out:
|
||||
sys.stdout.write(out)
|
||||
sys.stdout.flush()
|
||||
buf += out
|
||||
elif out == '' and process.poll() != None:
|
||||
break
|
||||
|
||||
if process.returncode != 0:
|
||||
raise bb.process.ExecutionError(cmd, process.returncode, buf, None)
|
||||
|
||||
return buf, None
|
||||
|
||||
def exec_fakeroot(d, cmd, **kwargs):
|
||||
"""Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions"""
|
||||
# Grab the command and check it actually exists
|
||||
fakerootcmd = d.getVar('FAKEROOTCMD')
|
||||
if not os.path.exists(fakerootcmd):
|
||||
logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built')
|
||||
return 2
|
||||
# Set up the appropriate environment
|
||||
newenv = dict(os.environ)
|
||||
fakerootenv = d.getVar('FAKEROOTENV')
|
||||
for varvalue in fakerootenv.split():
|
||||
if '=' in varvalue:
|
||||
splitval = varvalue.split('=', 1)
|
||||
newenv[splitval[0]] = splitval[1]
|
||||
return subprocess.call("%s %s" % (fakerootcmd, cmd), env=newenv, **kwargs)
|
||||
|
||||
def setup_tinfoil(config_only=False, basepath=None, tracking=False):
|
||||
"""Initialize tinfoil api from bitbake"""
|
||||
import scriptpath
|
||||
orig_cwd = os.path.abspath(os.curdir)
|
||||
try:
|
||||
if basepath:
|
||||
os.chdir(basepath)
|
||||
bitbakepath = scriptpath.add_bitbake_lib_path()
|
||||
if not bitbakepath:
|
||||
logger.error("Unable to find bitbake by searching parent directory of this script or PATH")
|
||||
sys.exit(1)
|
||||
|
||||
import bb.tinfoil
|
||||
tinfoil = bb.tinfoil.Tinfoil(tracking=tracking)
|
||||
try:
|
||||
tinfoil.logger.setLevel(logger.getEffectiveLevel())
|
||||
tinfoil.prepare(config_only)
|
||||
except bb.tinfoil.TinfoilUIException:
|
||||
tinfoil.shutdown()
|
||||
raise DevtoolError('Failed to start bitbake environment')
|
||||
except:
|
||||
tinfoil.shutdown()
|
||||
raise
|
||||
finally:
|
||||
os.chdir(orig_cwd)
|
||||
return tinfoil
|
||||
|
||||
def parse_recipe(config, tinfoil, pn, appends, filter_workspace=True):
|
||||
"""Parse the specified recipe"""
|
||||
try:
|
||||
recipefile = tinfoil.get_recipe_file(pn)
|
||||
except bb.providers.NoProvider as e:
|
||||
logger.error(str(e))
|
||||
return None
|
||||
if appends:
|
||||
append_files = tinfoil.get_file_appends(recipefile)
|
||||
if filter_workspace:
|
||||
# Filter out appends from the workspace
|
||||
append_files = [path for path in append_files if
|
||||
not path.startswith(config.workspace_path)]
|
||||
else:
|
||||
append_files = None
|
||||
try:
|
||||
rd = tinfoil.parse_recipe_file(recipefile, appends, append_files)
|
||||
except Exception as e:
|
||||
logger.error(str(e))
|
||||
return None
|
||||
return rd
|
||||
|
||||
def check_workspace_recipe(workspace, pn, checksrc=True, bbclassextend=False):
|
||||
"""
|
||||
Check that a recipe is in the workspace and (optionally) that source
|
||||
is present.
|
||||
"""
|
||||
|
||||
workspacepn = pn
|
||||
|
||||
for recipe, value in workspace.items():
|
||||
if recipe == pn:
|
||||
break
|
||||
if bbclassextend:
|
||||
recipefile = value['recipefile']
|
||||
if recipefile:
|
||||
targets = get_bbclassextend_targets(recipefile, recipe)
|
||||
if pn in targets:
|
||||
workspacepn = recipe
|
||||
break
|
||||
else:
|
||||
raise DevtoolError("No recipe named '%s' in your workspace" % pn)
|
||||
|
||||
if checksrc:
|
||||
srctree = workspace[workspacepn]['srctree']
|
||||
if not os.path.exists(srctree):
|
||||
raise DevtoolError("Source tree %s for recipe %s does not exist" % (srctree, workspacepn))
|
||||
if not os.listdir(srctree):
|
||||
raise DevtoolError("Source tree %s for recipe %s is empty" % (srctree, workspacepn))
|
||||
|
||||
return workspacepn
|
||||
|
||||
def use_external_build(same_dir, no_same_dir, d):
|
||||
"""
|
||||
Determine if we should use B!=S (separate build and source directories) or not
|
||||
"""
|
||||
b_is_s = True
|
||||
if no_same_dir:
|
||||
logger.info('Using separate build directory since --no-same-dir specified')
|
||||
b_is_s = False
|
||||
elif same_dir:
|
||||
logger.info('Using source tree as build directory since --same-dir specified')
|
||||
elif bb.data.inherits_class('autotools-brokensep', d):
|
||||
logger.info('Using source tree as build directory since recipe inherits autotools-brokensep')
|
||||
elif os.path.abspath(d.getVar('B')) == os.path.abspath(d.getVar('S')):
|
||||
logger.info('Using source tree as build directory since that would be the default for this recipe')
|
||||
else:
|
||||
b_is_s = False
|
||||
return b_is_s
|
||||
|
||||
def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None):
|
||||
"""
|
||||
Set up the git repository for the source tree
|
||||
"""
|
||||
import bb.process
|
||||
import oe.patch
|
||||
if not os.path.exists(os.path.join(repodir, '.git')):
|
||||
bb.process.run('git init', cwd=repodir)
|
||||
bb.process.run('git add .', cwd=repodir)
|
||||
commit_cmd = ['git']
|
||||
oe.patch.GitApplyTree.gitCommandUserOptions(commit_cmd, d=d)
|
||||
commit_cmd += ['commit', '-q']
|
||||
stdout, _ = bb.process.run('git status --porcelain', cwd=repodir)
|
||||
if not stdout:
|
||||
commit_cmd.append('--allow-empty')
|
||||
commitmsg = "Initial empty commit with no upstream sources"
|
||||
elif version:
|
||||
commitmsg = "Initial commit from upstream at version %s" % version
|
||||
else:
|
||||
commitmsg = "Initial commit from upstream"
|
||||
commit_cmd += ['-m', commitmsg]
|
||||
bb.process.run(commit_cmd, cwd=repodir)
|
||||
|
||||
bb.process.run('git checkout -b %s' % devbranch, cwd=repodir)
|
||||
bb.process.run('git tag -f %s' % basetag, cwd=repodir)
|
||||
|
||||
def recipe_to_append(recipefile, config, wildcard=False):
|
||||
"""
|
||||
Convert a recipe file to a bbappend file path within the workspace.
|
||||
NOTE: if the bbappend already exists, you should be using
|
||||
workspace[args.recipename]['bbappend'] instead of calling this
|
||||
function.
|
||||
"""
|
||||
appendname = os.path.splitext(os.path.basename(recipefile))[0]
|
||||
if wildcard:
|
||||
appendname = re.sub(r'_.*', '_%', appendname)
|
||||
appendpath = os.path.join(config.workspace_path, 'appends')
|
||||
appendfile = os.path.join(appendpath, appendname + '.bbappend')
|
||||
return appendfile
|
||||
|
||||
def get_bbclassextend_targets(recipefile, pn):
|
||||
"""
|
||||
Cheap function to get BBCLASSEXTEND and then convert that to the
|
||||
list of targets that would result.
|
||||
"""
|
||||
import bb.utils
|
||||
|
||||
values = {}
|
||||
def get_bbclassextend_varfunc(varname, origvalue, op, newlines):
|
||||
values[varname] = origvalue
|
||||
return origvalue, None, 0, True
|
||||
with open(recipefile, 'r') as f:
|
||||
bb.utils.edit_metadata(f, ['BBCLASSEXTEND'], get_bbclassextend_varfunc)
|
||||
|
||||
targets = []
|
||||
bbclassextend = values.get('BBCLASSEXTEND', '').split()
|
||||
if bbclassextend:
|
||||
for variant in bbclassextend:
|
||||
if variant == 'nativesdk':
|
||||
targets.append('%s-%s' % (variant, pn))
|
||||
elif variant in ['native', 'cross', 'crosssdk']:
|
||||
targets.append('%s-%s' % (pn, variant))
|
||||
return targets
|
||||
|
||||
def replace_from_file(path, old, new):
|
||||
"""Replace strings on a file"""
|
||||
|
||||
def read_file(path):
|
||||
data = None
|
||||
with open(path) as f:
|
||||
data = f.read()
|
||||
return data
|
||||
|
||||
def write_file(path, data):
|
||||
if data is None:
|
||||
return
|
||||
wdata = data.rstrip() + "\n"
|
||||
with open(path, "w") as f:
|
||||
f.write(wdata)
|
||||
|
||||
# In case old is None, return immediately
|
||||
if old is None:
|
||||
return
|
||||
try:
|
||||
rdata = read_file(path)
|
||||
except IOError as e:
|
||||
# if file does not exit, just quit, otherwise raise an exception
|
||||
if e.errno == errno.ENOENT:
|
||||
return
|
||||
else:
|
||||
raise
|
||||
|
||||
old_contents = rdata.splitlines()
|
||||
new_contents = []
|
||||
for old_content in old_contents:
|
||||
try:
|
||||
new_contents.append(old_content.replace(old, new))
|
||||
except ValueError:
|
||||
pass
|
||||
write_file(path, "\n".join(new_contents))
|
||||
|
||||
|
||||
def update_unlockedsigs(basepath, workspace, fixed_setup, extra=None):
|
||||
""" This function will make unlocked-sigs.inc match the recipes in the
|
||||
workspace plus any extras we want unlocked. """
|
||||
|
||||
if not fixed_setup:
|
||||
# Only need to write this out within the eSDK
|
||||
return
|
||||
|
||||
if not extra:
|
||||
extra = []
|
||||
|
||||
confdir = os.path.join(basepath, 'conf')
|
||||
unlockedsigs = os.path.join(confdir, 'unlocked-sigs.inc')
|
||||
|
||||
# Get current unlocked list if any
|
||||
values = {}
|
||||
def get_unlockedsigs_varfunc(varname, origvalue, op, newlines):
|
||||
values[varname] = origvalue
|
||||
return origvalue, None, 0, True
|
||||
if os.path.exists(unlockedsigs):
|
||||
with open(unlockedsigs, 'r') as f:
|
||||
bb.utils.edit_metadata(f, ['SIGGEN_UNLOCKED_RECIPES'], get_unlockedsigs_varfunc)
|
||||
unlocked = sorted(values.get('SIGGEN_UNLOCKED_RECIPES', []))
|
||||
|
||||
# If the new list is different to the current list, write it out
|
||||
newunlocked = sorted(list(workspace.keys()) + extra)
|
||||
if unlocked != newunlocked:
|
||||
bb.utils.mkdirhier(confdir)
|
||||
with open(unlockedsigs, 'w') as f:
|
||||
f.write("# DO NOT MODIFY! YOUR CHANGES WILL BE LOST.\n" +
|
||||
"# This layer was created by the OpenEmbedded devtool" +
|
||||
" utility in order to\n" +
|
||||
"# contain recipes that are unlocked.\n")
|
||||
|
||||
f.write('SIGGEN_UNLOCKED_RECIPES += "\\\n')
|
||||
for pn in newunlocked:
|
||||
f.write(' ' + pn)
|
||||
f.write('"')
|
||||
|
||||
def check_prerelease_version(ver, operation):
|
||||
if 'pre' in ver or 'rc' in ver:
|
||||
logger.warning('Version "%s" looks like a pre-release version. '
|
||||
'If that is the case, in order to ensure that the '
|
||||
'version doesn\'t appear to go backwards when you '
|
||||
'later upgrade to the final release version, it is '
|
||||
'recommmended that instead you use '
|
||||
'<current version>+<pre-release version> e.g. if '
|
||||
'upgrading from 1.9 to 2.0-rc2 use "1.9+2.0-rc2". '
|
||||
'If you prefer not to reset and re-try, you can change '
|
||||
'the version after %s succeeds using "devtool rename" '
|
||||
'with -V/--version.' % (ver, operation))
|
||||
|
||||
def check_git_repo_dirty(repodir):
|
||||
"""Check if a git repository is clean or not"""
|
||||
stdout, _ = bb.process.run('git status --porcelain', cwd=repodir)
|
||||
return stdout
|
||||
|
||||
def check_git_repo_op(srctree, ignoredirs=None):
|
||||
"""Check if a git repository is in the middle of a rebase"""
|
||||
stdout, _ = bb.process.run('git rev-parse --show-toplevel', cwd=srctree)
|
||||
topleveldir = stdout.strip()
|
||||
if ignoredirs and topleveldir in ignoredirs:
|
||||
return
|
||||
gitdir = os.path.join(topleveldir, '.git')
|
||||
if os.path.exists(os.path.join(gitdir, 'rebase-merge')):
|
||||
raise DevtoolError("Source tree %s appears to be in the middle of a rebase - please resolve this first" % srctree)
|
||||
if os.path.exists(os.path.join(gitdir, 'rebase-apply')):
|
||||
raise DevtoolError("Source tree %s appears to be in the middle of 'git am' or 'git apply' - please resolve this first" % srctree)
|
86
scripts/lib/devtool/build.py
Normal file
86
scripts/lib/devtool/build.py
Normal file
@@ -0,0 +1,86 @@
|
||||
# Development tool - build command plugin
|
||||
#
|
||||
# Copyright (C) 2014-2015 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
"""Devtool build plugin"""
|
||||
|
||||
import os
|
||||
import bb
|
||||
import logging
|
||||
import argparse
|
||||
import tempfile
|
||||
from devtool import exec_build_env_command, check_workspace_recipe, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
|
||||
def _set_file_values(fn, values):
|
||||
remaining = list(values.keys())
|
||||
|
||||
def varfunc(varname, origvalue, op, newlines):
|
||||
newvalue = values.get(varname, origvalue)
|
||||
remaining.remove(varname)
|
||||
return (newvalue, '=', 0, True)
|
||||
|
||||
with open(fn, 'r') as f:
|
||||
(updated, newlines) = bb.utils.edit_metadata(f, values, varfunc)
|
||||
|
||||
for item in remaining:
|
||||
updated = True
|
||||
newlines.append('%s = "%s"' % (item, values[item]))
|
||||
|
||||
if updated:
|
||||
with open(fn, 'w') as f:
|
||||
f.writelines(newlines)
|
||||
return updated
|
||||
|
||||
def _get_build_tasks(config):
|
||||
tasks = config.get('Build', 'build_task', 'populate_sysroot,packagedata').split(',')
|
||||
return ['do_%s' % task.strip() for task in tasks]
|
||||
|
||||
def build(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'build' subcommand"""
|
||||
workspacepn = check_workspace_recipe(workspace, args.recipename, bbclassextend=True)
|
||||
|
||||
build_tasks = _get_build_tasks(config)
|
||||
|
||||
bbappend = workspace[workspacepn]['bbappend']
|
||||
if args.disable_parallel_make:
|
||||
logger.info("Disabling 'make' parallelism")
|
||||
_set_file_values(bbappend, {'PARALLEL_MAKE': ''})
|
||||
try:
|
||||
bbargs = []
|
||||
for task in build_tasks:
|
||||
if args.recipename.endswith('-native') and 'package' in task:
|
||||
continue
|
||||
bbargs.append('%s:%s' % (args.recipename, task))
|
||||
exec_build_env_command(config.init_path, basepath, 'bitbake %s' % ' '.join(bbargs), watch=True)
|
||||
except bb.process.ExecutionError as e:
|
||||
# We've already seen the output since watch=True, so just ensure we return something to the user
|
||||
return e.exitcode
|
||||
finally:
|
||||
if args.disable_parallel_make:
|
||||
_set_file_values(bbappend, {'PARALLEL_MAKE': None})
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from this plugin"""
|
||||
parser_build = subparsers.add_parser('build', help='Build a recipe',
|
||||
description='Builds the specified recipe using bitbake (up to and including %s)' % ', '.join(_get_build_tasks(context.config)),
|
||||
group='working', order=50)
|
||||
parser_build.add_argument('recipename', help='Recipe to build')
|
||||
parser_build.add_argument('-s', '--disable-parallel-make', action="store_true", help='Disable make parallelism')
|
||||
parser_build.set_defaults(func=build)
|
174
scripts/lib/devtool/build_image.py
Normal file
174
scripts/lib/devtool/build_image.py
Normal file
@@ -0,0 +1,174 @@
|
||||
# Development tool - build-image plugin
|
||||
#
|
||||
# Copyright (C) 2015 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
"""Devtool plugin containing the build-image subcommand."""
|
||||
|
||||
import os
|
||||
import errno
|
||||
import logging
|
||||
|
||||
from bb.process import ExecutionError
|
||||
from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
class TargetNotImageError(Exception):
|
||||
pass
|
||||
|
||||
def _get_packages(tinfoil, workspace, config):
|
||||
"""Get list of packages from recipes in the workspace."""
|
||||
result = []
|
||||
for recipe in workspace:
|
||||
data = parse_recipe(config, tinfoil, recipe, True)
|
||||
if 'class-target' in data.getVar('OVERRIDES').split(':'):
|
||||
if recipe in data.getVar('PACKAGES').split():
|
||||
result.append(recipe)
|
||||
else:
|
||||
logger.warning("Skipping recipe %s as it doesn't produce a "
|
||||
"package with the same name", recipe)
|
||||
return result
|
||||
|
||||
def build_image(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'build-image' subcommand."""
|
||||
|
||||
image = args.imagename
|
||||
auto_image = False
|
||||
if not image:
|
||||
sdk_targets = config.get('SDK', 'sdk_targets', '').split()
|
||||
if sdk_targets:
|
||||
image = sdk_targets[0]
|
||||
auto_image = True
|
||||
if not image:
|
||||
raise DevtoolError('Unable to determine image to build, please specify one')
|
||||
|
||||
try:
|
||||
if args.add_packages:
|
||||
add_packages = args.add_packages.split(',')
|
||||
else:
|
||||
add_packages = None
|
||||
result, outputdir = build_image_task(config, basepath, workspace, image, add_packages)
|
||||
except TargetNotImageError:
|
||||
if auto_image:
|
||||
raise DevtoolError('Unable to determine image to build, please specify one')
|
||||
else:
|
||||
raise DevtoolError('Specified recipe %s is not an image recipe' % image)
|
||||
|
||||
if result == 0:
|
||||
logger.info('Successfully built %s. You can find output files in %s'
|
||||
% (image, outputdir))
|
||||
return result
|
||||
|
||||
def build_image_task(config, basepath, workspace, image, add_packages=None, task=None, extra_append=None):
|
||||
# remove <image>.bbappend to make sure setup_tinfoil doesn't
|
||||
# break because of it
|
||||
target_basename = config.get('SDK', 'target_basename', '')
|
||||
if target_basename:
|
||||
appendfile = os.path.join(config.workspace_path, 'appends',
|
||||
'%s.bbappend' % target_basename)
|
||||
try:
|
||||
os.unlink(appendfile)
|
||||
except OSError as exc:
|
||||
if exc.errno != errno.ENOENT:
|
||||
raise
|
||||
|
||||
tinfoil = setup_tinfoil(basepath=basepath)
|
||||
try:
|
||||
rd = parse_recipe(config, tinfoil, image, True)
|
||||
if not rd:
|
||||
# Error already shown
|
||||
return (1, None)
|
||||
if not bb.data.inherits_class('image', rd):
|
||||
raise TargetNotImageError()
|
||||
|
||||
# Get the actual filename used and strip the .bb and full path
|
||||
target_basename = rd.getVar('FILE')
|
||||
target_basename = os.path.splitext(os.path.basename(target_basename))[0]
|
||||
config.set('SDK', 'target_basename', target_basename)
|
||||
config.write()
|
||||
|
||||
appendfile = os.path.join(config.workspace_path, 'appends',
|
||||
'%s.bbappend' % target_basename)
|
||||
|
||||
outputdir = None
|
||||
try:
|
||||
if workspace or add_packages:
|
||||
if add_packages:
|
||||
packages = add_packages
|
||||
else:
|
||||
packages = _get_packages(tinfoil, workspace, config)
|
||||
else:
|
||||
packages = None
|
||||
if not task:
|
||||
if not packages and not add_packages and workspace:
|
||||
logger.warning('No recipes in workspace, building image %s unmodified', image)
|
||||
elif not packages:
|
||||
logger.warning('No packages to add, building image %s unmodified', image)
|
||||
|
||||
if packages or extra_append:
|
||||
bb.utils.mkdirhier(os.path.dirname(appendfile))
|
||||
with open(appendfile, 'w') as afile:
|
||||
if packages:
|
||||
# include packages from workspace recipes into the image
|
||||
afile.write('IMAGE_INSTALL_append = " %s"\n' % ' '.join(packages))
|
||||
if not task:
|
||||
logger.info('Building image %s with the following '
|
||||
'additional packages: %s', image, ' '.join(packages))
|
||||
if extra_append:
|
||||
for line in extra_append:
|
||||
afile.write('%s\n' % line)
|
||||
|
||||
if task in ['populate_sdk', 'populate_sdk_ext']:
|
||||
outputdir = rd.getVar('SDK_DEPLOY')
|
||||
else:
|
||||
outputdir = rd.getVar('DEPLOY_DIR_IMAGE')
|
||||
|
||||
tmp_tinfoil = tinfoil
|
||||
tinfoil = None
|
||||
tmp_tinfoil.shutdown()
|
||||
|
||||
options = ''
|
||||
if task:
|
||||
options += '-c %s' % task
|
||||
|
||||
# run bitbake to build image (or specified task)
|
||||
try:
|
||||
exec_build_env_command(config.init_path, basepath,
|
||||
'bitbake %s %s' % (options, image), watch=True)
|
||||
except ExecutionError as err:
|
||||
return (err.exitcode, None)
|
||||
finally:
|
||||
if os.path.isfile(appendfile):
|
||||
os.unlink(appendfile)
|
||||
finally:
|
||||
if tinfoil:
|
||||
tinfoil.shutdown()
|
||||
return (0, outputdir)
|
||||
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from the build-image plugin"""
|
||||
parser = subparsers.add_parser('build-image',
|
||||
help='Build image including workspace recipe packages',
|
||||
description='Builds an image, extending it to include '
|
||||
'packages from recipes in the workspace',
|
||||
group='testbuild', order=-10)
|
||||
parser.add_argument('imagename', help='Image recipe to build', nargs='?')
|
||||
parser.add_argument('-p', '--add-packages', help='Instead of adding packages for the '
|
||||
'entire workspace, specify packages to be added to the image '
|
||||
'(separate multiple packages by commas)',
|
||||
metavar='PACKAGES')
|
||||
parser.set_defaults(func=build_image)
|
65
scripts/lib/devtool/build_sdk.py
Normal file
65
scripts/lib/devtool/build_sdk.py
Normal file
@@ -0,0 +1,65 @@
|
||||
# Development tool - build-sdk command plugin
|
||||
#
|
||||
# Copyright (C) 2015-2016 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import logging
|
||||
import glob
|
||||
import shutil
|
||||
import errno
|
||||
import sys
|
||||
import tempfile
|
||||
from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
|
||||
from devtool import build_image
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
|
||||
def build_sdk(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool build-sdk command"""
|
||||
|
||||
sdk_targets = config.get('SDK', 'sdk_targets', '').split()
|
||||
if sdk_targets:
|
||||
image = sdk_targets[0]
|
||||
else:
|
||||
raise DevtoolError('Unable to determine image to build SDK for')
|
||||
|
||||
extra_append = ['SDK_DERIVATIVE = "1"']
|
||||
try:
|
||||
result, outputdir = build_image.build_image_task(config,
|
||||
basepath,
|
||||
workspace,
|
||||
image,
|
||||
task='populate_sdk_ext',
|
||||
extra_append=extra_append)
|
||||
except build_image.TargetNotImageError:
|
||||
raise DevtoolError('Unable to determine image to build SDK for')
|
||||
|
||||
if result == 0:
|
||||
logger.info('Successfully built SDK. You can find output files in %s'
|
||||
% outputdir)
|
||||
return result
|
||||
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands"""
|
||||
if context.fixed_setup:
|
||||
parser_build_sdk = subparsers.add_parser('build-sdk',
|
||||
help='Build a derivative SDK of this one',
|
||||
description='Builds an extensible SDK based upon this one and the items in your workspace',
|
||||
group='advanced')
|
||||
parser_build_sdk.set_defaults(func=build_sdk)
|
354
scripts/lib/devtool/deploy.py
Normal file
354
scripts/lib/devtool/deploy.py
Normal file
@@ -0,0 +1,354 @@
|
||||
# Development tool - deploy/undeploy command plugin
|
||||
#
|
||||
# Copyright (C) 2014-2016 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
"""Devtool plugin containing the deploy subcommands"""
|
||||
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
import bb.utils
|
||||
import argparse_oe
|
||||
import oe.types
|
||||
|
||||
from devtool import exec_fakeroot, setup_tinfoil, check_workspace_recipe, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
deploylist_path = '/.devtool'
|
||||
|
||||
def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=False, nopreserve=False, nocheckspace=False):
|
||||
"""
|
||||
Prepare a shell script for running on the target to
|
||||
deploy/undeploy files. We have to be careful what we put in this
|
||||
script - only commands that are likely to be available on the
|
||||
target are suitable (the target might be constrained, e.g. using
|
||||
busybox rather than bash with coreutils).
|
||||
"""
|
||||
lines = []
|
||||
lines.append('#!/bin/sh')
|
||||
lines.append('set -e')
|
||||
if undeployall:
|
||||
# Yes, I know this is crude - but it does work
|
||||
lines.append('for entry in %s/*.list; do' % deploylist_path)
|
||||
lines.append('[ ! -f $entry ] && exit')
|
||||
lines.append('set `basename $entry | sed "s/.list//"`')
|
||||
if dryrun:
|
||||
if not deploy:
|
||||
lines.append('echo "Previously deployed files for $1:"')
|
||||
lines.append('manifest="%s/$1.list"' % deploylist_path)
|
||||
lines.append('preservedir="%s/$1.preserve"' % deploylist_path)
|
||||
lines.append('if [ -f $manifest ] ; then')
|
||||
# Read manifest in reverse and delete files / remove empty dirs
|
||||
lines.append(' sed \'1!G;h;$!d\' $manifest | while read file')
|
||||
lines.append(' do')
|
||||
if dryrun:
|
||||
lines.append(' if [ ! -d $file ] ; then')
|
||||
lines.append(' echo $file')
|
||||
lines.append(' fi')
|
||||
else:
|
||||
lines.append(' if [ -d $file ] ; then')
|
||||
# Avoid deleting a preserved directory in case it has special perms
|
||||
lines.append(' if [ ! -d $preservedir/$file ] ; then')
|
||||
lines.append(' rmdir $file > /dev/null 2>&1 || true')
|
||||
lines.append(' fi')
|
||||
lines.append(' else')
|
||||
lines.append(' rm -f $file')
|
||||
lines.append(' fi')
|
||||
lines.append(' done')
|
||||
if not dryrun:
|
||||
lines.append(' rm $manifest')
|
||||
if not deploy and not dryrun:
|
||||
# May as well remove all traces
|
||||
lines.append(' rmdir `dirname $manifest` > /dev/null 2>&1 || true')
|
||||
lines.append('fi')
|
||||
|
||||
if deploy:
|
||||
if not nocheckspace:
|
||||
# Check for available space
|
||||
# FIXME This doesn't take into account files spread across multiple
|
||||
# partitions, but doing that is non-trivial
|
||||
# Find the part of the destination path that exists
|
||||
lines.append('checkpath="$2"')
|
||||
lines.append('while [ "$checkpath" != "/" ] && [ ! -e $checkpath ]')
|
||||
lines.append('do')
|
||||
lines.append(' checkpath=`dirname "$checkpath"`')
|
||||
lines.append('done')
|
||||
lines.append(r'freespace=$(df -P $checkpath | sed -nre "s/^(\S+\s+){3}([0-9]+).*/\2/p")')
|
||||
# First line of the file is the total space
|
||||
lines.append('total=`head -n1 $3`')
|
||||
lines.append('if [ $total -gt $freespace ] ; then')
|
||||
lines.append(' echo "ERROR: insufficient space on target (available ${freespace}, needed ${total})"')
|
||||
lines.append(' exit 1')
|
||||
lines.append('fi')
|
||||
if not nopreserve:
|
||||
# Preserve any files that exist. Note that this will add to the
|
||||
# preserved list with successive deployments if the list of files
|
||||
# deployed changes, but because we've deleted any previously
|
||||
# deployed files at this point it will never preserve anything
|
||||
# that was deployed, only files that existed prior to any deploying
|
||||
# (which makes the most sense)
|
||||
lines.append('cat $3 | sed "1d" | while read file fsize')
|
||||
lines.append('do')
|
||||
lines.append(' if [ -e $file ] ; then')
|
||||
lines.append(' dest="$preservedir/$file"')
|
||||
lines.append(' mkdir -p `dirname $dest`')
|
||||
lines.append(' mv $file $dest')
|
||||
lines.append(' fi')
|
||||
lines.append('done')
|
||||
lines.append('rm $3')
|
||||
lines.append('mkdir -p `dirname $manifest`')
|
||||
lines.append('mkdir -p $2')
|
||||
if verbose:
|
||||
lines.append(' tar xv -C $2 -f - | tee $manifest')
|
||||
else:
|
||||
lines.append(' tar xv -C $2 -f - > $manifest')
|
||||
lines.append('sed -i "s!^./!$2!" $manifest')
|
||||
elif not dryrun:
|
||||
# Put any preserved files back
|
||||
lines.append('if [ -d $preservedir ] ; then')
|
||||
lines.append(' cd $preservedir')
|
||||
# find from busybox might not have -exec, so we don't use that
|
||||
lines.append(' find . -type f | while read file')
|
||||
lines.append(' do')
|
||||
lines.append(' mv $file /$file')
|
||||
lines.append(' done')
|
||||
lines.append(' cd /')
|
||||
lines.append(' rm -rf $preservedir')
|
||||
lines.append('fi')
|
||||
|
||||
if undeployall:
|
||||
if not dryrun:
|
||||
lines.append('echo "NOTE: Successfully undeployed $1"')
|
||||
lines.append('done')
|
||||
|
||||
# Delete the script itself
|
||||
lines.append('rm $0')
|
||||
lines.append('')
|
||||
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
|
||||
def deploy(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'deploy' subcommand"""
|
||||
import math
|
||||
import oe.recipeutils
|
||||
import oe.package
|
||||
|
||||
check_workspace_recipe(workspace, args.recipename, checksrc=False)
|
||||
|
||||
try:
|
||||
host, destdir = args.target.split(':')
|
||||
except ValueError:
|
||||
destdir = '/'
|
||||
else:
|
||||
args.target = host
|
||||
if not destdir.endswith('/'):
|
||||
destdir += '/'
|
||||
|
||||
tinfoil = setup_tinfoil(basepath=basepath)
|
||||
try:
|
||||
try:
|
||||
rd = tinfoil.parse_recipe(args.recipename)
|
||||
except Exception as e:
|
||||
raise DevtoolError('Exception parsing recipe %s: %s' %
|
||||
(args.recipename, e))
|
||||
recipe_outdir = rd.getVar('D')
|
||||
if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir):
|
||||
raise DevtoolError('No files to deploy - have you built the %s '
|
||||
'recipe? If so, the install step has not installed '
|
||||
'any files.' % args.recipename)
|
||||
|
||||
if args.strip and not args.dry_run:
|
||||
# Fakeroot copy to new destination
|
||||
srcdir = recipe_outdir
|
||||
recipe_outdir = os.path.join(rd.getVar('WORKDIR'), 'deploy-target-stripped')
|
||||
if os.path.isdir(recipe_outdir):
|
||||
bb.utils.remove(recipe_outdir, True)
|
||||
exec_fakeroot(rd, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True)
|
||||
os.environ['PATH'] = ':'.join([os.environ['PATH'], rd.getVar('PATH') or ''])
|
||||
oe.package.strip_execs(args.recipename, recipe_outdir, rd.getVar('STRIP'), rd.getVar('libdir'),
|
||||
rd.getVar('base_libdir'))
|
||||
|
||||
filelist = []
|
||||
ftotalsize = 0
|
||||
for root, _, files in os.walk(recipe_outdir):
|
||||
for fn in files:
|
||||
# Get the size in kiB (since we'll be comparing it to the output of du -k)
|
||||
# MUST use lstat() here not stat() or getfilesize() since we don't want to
|
||||
# dereference symlinks
|
||||
fsize = int(math.ceil(float(os.lstat(os.path.join(root, fn)).st_size)/1024))
|
||||
ftotalsize += fsize
|
||||
# The path as it would appear on the target
|
||||
fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn)
|
||||
filelist.append((fpath, fsize))
|
||||
|
||||
if args.dry_run:
|
||||
print('Files to be deployed for %s on target %s:' % (args.recipename, args.target))
|
||||
for item, _ in filelist:
|
||||
print(' %s' % item)
|
||||
return 0
|
||||
|
||||
extraoptions = ''
|
||||
if args.no_host_check:
|
||||
extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
|
||||
if not args.show_status:
|
||||
extraoptions += ' -q'
|
||||
|
||||
scp_port = ''
|
||||
ssh_port = ''
|
||||
if not args.port:
|
||||
raise DevtoolError("If you specify -P/--port then you must provide the port to be used to connect to the target")
|
||||
else:
|
||||
scp_port = "-P %s" % args.port
|
||||
ssh_port = "-p %s" % args.port
|
||||
|
||||
# In order to delete previously deployed files and have the manifest file on
|
||||
# the target, we write out a shell script and then copy it to the target
|
||||
# so we can then run it (piping tar output to it).
|
||||
# (We cannot use scp here, because it doesn't preserve symlinks.)
|
||||
tmpdir = tempfile.mkdtemp(prefix='devtool')
|
||||
try:
|
||||
tmpscript = '/tmp/devtool_deploy.sh'
|
||||
tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list')
|
||||
shellscript = _prepare_remote_script(deploy=True,
|
||||
verbose=args.show_status,
|
||||
nopreserve=args.no_preserve,
|
||||
nocheckspace=args.no_check_space)
|
||||
# Write out the script to a file
|
||||
with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
|
||||
f.write(shellscript)
|
||||
# Write out the file list
|
||||
with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f:
|
||||
f.write('%d\n' % ftotalsize)
|
||||
for fpath, fsize in filelist:
|
||||
f.write('%s %d\n' % (fpath, fsize))
|
||||
# Copy them to the target
|
||||
ret = subprocess.call("scp %s %s %s/* %s:%s" % (scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
|
||||
if ret != 0:
|
||||
raise DevtoolError('Failed to copy script to %s - rerun with -s to '
|
||||
'get a complete error message' % args.target)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
# Now run the script
|
||||
ret = exec_fakeroot(rd, 'tar cf - . | ssh %s %s %s \'sh %s %s %s %s\'' % (ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
|
||||
if ret != 0:
|
||||
raise DevtoolError('Deploy failed - rerun with -s to get a complete '
|
||||
'error message')
|
||||
|
||||
logger.info('Successfully deployed %s' % recipe_outdir)
|
||||
|
||||
files_list = []
|
||||
for root, _, files in os.walk(recipe_outdir):
|
||||
for filename in files:
|
||||
filename = os.path.relpath(os.path.join(root, filename), recipe_outdir)
|
||||
files_list.append(os.path.join(destdir, filename))
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
return 0
|
||||
|
||||
def undeploy(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'undeploy' subcommand"""
|
||||
if args.all and args.recipename:
|
||||
raise argparse_oe.ArgumentUsageError('Cannot specify -a/--all with a recipe name', 'undeploy-target')
|
||||
elif not args.recipename and not args.all:
|
||||
raise argparse_oe.ArgumentUsageError('If you don\'t specify a recipe, you must specify -a/--all', 'undeploy-target')
|
||||
|
||||
extraoptions = ''
|
||||
if args.no_host_check:
|
||||
extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
|
||||
if not args.show_status:
|
||||
extraoptions += ' -q'
|
||||
|
||||
scp_port = ''
|
||||
ssh_port = ''
|
||||
if not args.port:
|
||||
raise DevtoolError("If you specify -P/--port then you must provide the port to be used to connect to the target")
|
||||
else:
|
||||
scp_port = "-P %s" % args.port
|
||||
ssh_port = "-p %s" % args.port
|
||||
|
||||
args.target = args.target.split(':')[0]
|
||||
|
||||
tmpdir = tempfile.mkdtemp(prefix='devtool')
|
||||
try:
|
||||
tmpscript = '/tmp/devtool_undeploy.sh'
|
||||
shellscript = _prepare_remote_script(deploy=False, dryrun=args.dry_run, undeployall=args.all)
|
||||
# Write out the script to a file
|
||||
with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
|
||||
f.write(shellscript)
|
||||
# Copy it to the target
|
||||
ret = subprocess.call("scp %s %s %s/* %s:%s" % (scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
|
||||
if ret != 0:
|
||||
raise DevtoolError('Failed to copy script to %s - rerun with -s to '
|
||||
'get a complete error message' % args.target)
|
||||
finally:
|
||||
shutil.rmtree(tmpdir)
|
||||
|
||||
# Now run the script
|
||||
ret = subprocess.call('ssh %s %s %s \'sh %s %s\'' % (ssh_port, extraoptions, args.target, tmpscript, args.recipename), shell=True)
|
||||
if ret != 0:
|
||||
raise DevtoolError('Undeploy failed - rerun with -s to get a complete '
|
||||
'error message')
|
||||
|
||||
if not args.all and not args.dry_run:
|
||||
logger.info('Successfully undeployed %s' % args.recipename)
|
||||
return 0
|
||||
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from the deploy plugin"""
|
||||
|
||||
parser_deploy = subparsers.add_parser('deploy-target',
|
||||
help='Deploy recipe output files to live target machine',
|
||||
description='Deploys a recipe\'s build output (i.e. the output of the do_install task) to a live target machine over ssh. By default, any existing files will be preserved instead of being overwritten and will be restored if you run devtool undeploy-target. Note: this only deploys the recipe itself and not any runtime dependencies, so it is assumed that those have been installed on the target beforehand.',
|
||||
group='testbuild')
|
||||
parser_deploy.add_argument('recipename', help='Recipe to deploy')
|
||||
parser_deploy.add_argument('target', help='Live target machine running an ssh server: user@hostname[:destdir]')
|
||||
parser_deploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
|
||||
parser_deploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
|
||||
parser_deploy.add_argument('-n', '--dry-run', help='List files to be deployed only', action='store_true')
|
||||
parser_deploy.add_argument('-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
|
||||
parser_deploy.add_argument('--no-check-space', help='Do not check for available space before deploying', action='store_true')
|
||||
parser_deploy.add_argument('-P', '--port', default='22', help='Port to use for connection to the target')
|
||||
|
||||
strip_opts = parser_deploy.add_mutually_exclusive_group(required=False)
|
||||
strip_opts.add_argument('-S', '--strip',
|
||||
help='Strip executables prior to deploying (default: %(default)s). '
|
||||
'The default value of this option can be controlled by setting the strip option in the [Deploy] section to True or False.',
|
||||
default=oe.types.boolean(context.config.get('Deploy', 'strip', default='0')),
|
||||
action='store_true')
|
||||
strip_opts.add_argument('--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false')
|
||||
|
||||
parser_deploy.set_defaults(func=deploy)
|
||||
|
||||
parser_undeploy = subparsers.add_parser('undeploy-target',
|
||||
help='Undeploy recipe output files in live target machine',
|
||||
description='Un-deploys recipe output files previously deployed to a live target machine by devtool deploy-target.',
|
||||
group='testbuild')
|
||||
parser_undeploy.add_argument('recipename', help='Recipe to undeploy (if not using -a/--all)', nargs='?')
|
||||
parser_undeploy.add_argument('target', help='Live target machine running an ssh server: user@hostname')
|
||||
parser_undeploy.add_argument('-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
|
||||
parser_undeploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
|
||||
parser_undeploy.add_argument('-a', '--all', help='Undeploy all recipes deployed on the target', action='store_true')
|
||||
parser_undeploy.add_argument('-n', '--dry-run', help='List files to be undeployed only', action='store_true')
|
||||
parser_undeploy.add_argument('-P', '--port', default='22', help='Port to use for connection to the target')
|
||||
parser_undeploy.set_defaults(func=undeploy)
|
119
scripts/lib/devtool/export.py
Normal file
119
scripts/lib/devtool/export.py
Normal file
@@ -0,0 +1,119 @@
|
||||
# Development tool - export command plugin
|
||||
#
|
||||
# Copyright (C) 2014-2017 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
"""Devtool export plugin"""
|
||||
|
||||
import os
|
||||
import argparse
|
||||
import tarfile
|
||||
import logging
|
||||
import datetime
|
||||
import json
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
# output files
|
||||
default_arcname_prefix = "workspace-export"
|
||||
metadata = '.export_metadata'
|
||||
|
||||
def export(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'export' subcommand"""
|
||||
|
||||
def add_metadata(tar):
|
||||
"""Archive the workspace object"""
|
||||
# finally store the workspace metadata
|
||||
with open(metadata, 'w') as fd:
|
||||
fd.write(json.dumps((config.workspace_path, workspace)))
|
||||
tar.add(metadata)
|
||||
os.unlink(metadata)
|
||||
|
||||
def add_recipe(tar, recipe, data):
|
||||
"""Archive recipe with proper arcname"""
|
||||
# Create a map of name/arcnames
|
||||
arcnames = []
|
||||
for key, name in data.items():
|
||||
if name:
|
||||
if key == 'srctree':
|
||||
# all sources, no matter where are located, goes into the sources directory
|
||||
arcname = 'sources/%s' % recipe
|
||||
else:
|
||||
arcname = name.replace(config.workspace_path, '')
|
||||
arcnames.append((name, arcname))
|
||||
|
||||
for name, arcname in arcnames:
|
||||
tar.add(name, arcname=arcname)
|
||||
|
||||
|
||||
# Make sure workspace is non-empty and possible listed include/excluded recipes are in workspace
|
||||
if not workspace:
|
||||
logger.info('Workspace contains no recipes, nothing to export')
|
||||
return 0
|
||||
else:
|
||||
for param, recipes in {'include':args.include,'exclude':args.exclude}.items():
|
||||
for recipe in recipes:
|
||||
if recipe not in workspace:
|
||||
logger.error('Recipe (%s) on %s argument not in the current workspace' % (recipe, param))
|
||||
return 1
|
||||
|
||||
name = args.file
|
||||
|
||||
default_name = "%s-%s.tar.gz" % (default_arcname_prefix, datetime.datetime.now().strftime('%Y%m%d%H%M%S'))
|
||||
if not name:
|
||||
name = default_name
|
||||
else:
|
||||
# if name is a directory, append the default name
|
||||
if os.path.isdir(name):
|
||||
name = os.path.join(name, default_name)
|
||||
|
||||
if os.path.exists(name) and not args.overwrite:
|
||||
logger.error('Tar archive %s exists. Use --overwrite/-o to overwrite it')
|
||||
return 1
|
||||
|
||||
# if all workspace is excluded, quit
|
||||
if not len(set(workspace.keys()).difference(set(args.exclude))):
|
||||
logger.warn('All recipes in workspace excluded, nothing to export')
|
||||
return 0
|
||||
|
||||
exported = []
|
||||
with tarfile.open(name, 'w:gz') as tar:
|
||||
if args.include:
|
||||
for recipe in args.include:
|
||||
add_recipe(tar, recipe, workspace[recipe])
|
||||
exported.append(recipe)
|
||||
else:
|
||||
for recipe, data in workspace.items():
|
||||
if recipe not in args.exclude:
|
||||
add_recipe(tar, recipe, data)
|
||||
exported.append(recipe)
|
||||
|
||||
add_metadata(tar)
|
||||
|
||||
logger.info('Tar archive created at %s with the following recipes: %s' % (name, ', '.join(exported)))
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool export subcommands"""
|
||||
parser = subparsers.add_parser('export',
|
||||
help='Export workspace into a tar archive',
|
||||
description='Export one or more recipes from current workspace into a tar archive',
|
||||
group='advanced')
|
||||
|
||||
parser.add_argument('--file', '-f', help='Output archive file name')
|
||||
parser.add_argument('--overwrite', '-o', action="store_true", help='Overwrite previous export tar archive')
|
||||
group = parser.add_mutually_exclusive_group()
|
||||
group.add_argument('--include', '-i', nargs='+', default=[], help='Include recipes into the tar archive')
|
||||
group.add_argument('--exclude', '-e', nargs='+', default=[], help='Exclude recipes into the tar archive')
|
||||
parser.set_defaults(func=export)
|
144
scripts/lib/devtool/import.py
Normal file
144
scripts/lib/devtool/import.py
Normal file
@@ -0,0 +1,144 @@
|
||||
# Development tool - import command plugin
|
||||
#
|
||||
# Copyright (C) 2014-2017 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
"""Devtool import plugin"""
|
||||
|
||||
import os
|
||||
import tarfile
|
||||
import logging
|
||||
import collections
|
||||
import json
|
||||
import fnmatch
|
||||
|
||||
from devtool import standard, setup_tinfoil, replace_from_file, DevtoolError
|
||||
from devtool import export
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def devimport(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'import' subcommand"""
|
||||
|
||||
def get_pn(name):
|
||||
""" Returns the filename of a workspace recipe/append"""
|
||||
metadata = name.split('/')[-1]
|
||||
fn, _ = os.path.splitext(metadata)
|
||||
return fn
|
||||
|
||||
if not os.path.exists(args.file):
|
||||
raise DevtoolError('Tar archive %s does not exist. Export your workspace using "devtool export"' % args.file)
|
||||
|
||||
with tarfile.open(args.file) as tar:
|
||||
# Get exported metadata
|
||||
export_workspace_path = export_workspace = None
|
||||
try:
|
||||
metadata = tar.getmember(export.metadata)
|
||||
except KeyError as ke:
|
||||
raise DevtoolError('The export metadata file created by "devtool export" was not found. "devtool import" can only be used to import tar archives created by "devtool export".')
|
||||
|
||||
tar.extract(metadata)
|
||||
with open(metadata.name) as fdm:
|
||||
export_workspace_path, export_workspace = json.load(fdm)
|
||||
os.unlink(metadata.name)
|
||||
|
||||
members = tar.getmembers()
|
||||
|
||||
# Get appends and recipes from the exported archive, these
|
||||
# will be needed to find out those appends without corresponding
|
||||
# recipe pair
|
||||
append_fns, recipe_fns = set(), set()
|
||||
for member in members:
|
||||
if member.name.startswith('appends'):
|
||||
append_fns.add(get_pn(member.name))
|
||||
elif member.name.startswith('recipes'):
|
||||
recipe_fns.add(get_pn(member.name))
|
||||
|
||||
# Setup tinfoil, get required data and shutdown
|
||||
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
|
||||
try:
|
||||
current_fns = [os.path.basename(recipe[0]) for recipe in tinfoil.cooker.recipecaches[''].pkg_fn.items()]
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
# Find those appends that do not have recipes in current metadata
|
||||
non_importables = []
|
||||
for fn in append_fns - recipe_fns:
|
||||
# Check on current metadata (covering those layers indicated in bblayers.conf)
|
||||
for current_fn in current_fns:
|
||||
if fnmatch.fnmatch(current_fn, '*' + fn.replace('%', '') + '*'):
|
||||
break
|
||||
else:
|
||||
non_importables.append(fn)
|
||||
logger.warn('No recipe to append %s.bbapppend, skipping' % fn)
|
||||
|
||||
# Extract
|
||||
imported = []
|
||||
for member in members:
|
||||
if member.name == export.metadata:
|
||||
continue
|
||||
|
||||
for nonimp in non_importables:
|
||||
pn = nonimp.split('_')[0]
|
||||
# do not extract data from non-importable recipes or metadata
|
||||
if member.name.startswith('appends/%s' % nonimp) or \
|
||||
member.name.startswith('recipes/%s' % nonimp) or \
|
||||
member.name.startswith('sources/%s' % pn):
|
||||
break
|
||||
else:
|
||||
path = os.path.join(config.workspace_path, member.name)
|
||||
if os.path.exists(path):
|
||||
# by default, no file overwrite is done unless -o is given by the user
|
||||
if args.overwrite:
|
||||
try:
|
||||
tar.extract(member, path=config.workspace_path)
|
||||
except PermissionError as pe:
|
||||
logger.warn(pe)
|
||||
else:
|
||||
logger.warn('File already present. Use --overwrite/-o to overwrite it: %s' % member.name)
|
||||
continue
|
||||
else:
|
||||
tar.extract(member, path=config.workspace_path)
|
||||
|
||||
# Update EXTERNALSRC and the devtool md5 file
|
||||
if member.name.startswith('appends'):
|
||||
if export_workspace_path:
|
||||
# appends created by 'devtool modify' just need to update the workspace
|
||||
replace_from_file(path, export_workspace_path, config.workspace_path)
|
||||
|
||||
# appends created by 'devtool add' need replacement of exported source tree
|
||||
pn = get_pn(member.name).split('_')[0]
|
||||
exported_srctree = export_workspace[pn]['srctree']
|
||||
if exported_srctree:
|
||||
replace_from_file(path, exported_srctree, os.path.join(config.workspace_path, 'sources', pn))
|
||||
|
||||
standard._add_md5(config, pn, path)
|
||||
imported.append(pn)
|
||||
|
||||
if imported:
|
||||
logger.info('Imported recipes into workspace %s: %s' % (config.workspace_path, ', '.join(imported)))
|
||||
else:
|
||||
logger.warn('No recipes imported into the workspace')
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool import subcommands"""
|
||||
parser = subparsers.add_parser('import',
|
||||
help='Import exported tar archive into workspace',
|
||||
description='Import tar archive previously created by "devtool export" into workspace',
|
||||
group='advanced')
|
||||
parser.add_argument('file', metavar='FILE', help='Name of the tar archive to import')
|
||||
parser.add_argument('--overwrite', '-o', action="store_true", help='Overwrite files when extracting')
|
||||
parser.set_defaults(func=devimport)
|
60
scripts/lib/devtool/package.py
Normal file
60
scripts/lib/devtool/package.py
Normal file
@@ -0,0 +1,60 @@
|
||||
# Development tool - package command plugin
|
||||
#
|
||||
# Copyright (C) 2014-2015 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
"""Devtool plugin containing the package subcommands"""
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import logging
|
||||
from bb.process import ExecutionError
|
||||
from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def package(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'package' subcommand"""
|
||||
check_workspace_recipe(workspace, args.recipename)
|
||||
|
||||
tinfoil = setup_tinfoil(basepath=basepath, config_only=True)
|
||||
try:
|
||||
image_pkgtype = config.get('Package', 'image_pkgtype', '')
|
||||
if not image_pkgtype:
|
||||
image_pkgtype = tinfoil.config_data.getVar('IMAGE_PKGTYPE')
|
||||
|
||||
deploy_dir_pkg = tinfoil.config_data.getVar('DEPLOY_DIR_%s' % image_pkgtype.upper())
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
package_task = config.get('Package', 'package_task', 'package_write_%s' % image_pkgtype)
|
||||
try:
|
||||
exec_build_env_command(config.init_path, basepath, 'bitbake -c %s %s' % (package_task, args.recipename), watch=True)
|
||||
except bb.process.ExecutionError as e:
|
||||
# We've already seen the output since watch=True, so just ensure we return something to the user
|
||||
return e.exitcode
|
||||
|
||||
logger.info('Your packages are in %s' % deploy_dir_pkg)
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from the package plugin"""
|
||||
if context.fixed_setup:
|
||||
parser_package = subparsers.add_parser('package',
|
||||
help='Build packages for a recipe',
|
||||
description='Builds packages for a recipe\'s output files',
|
||||
group='testbuild', order=-5)
|
||||
parser_package.add_argument('recipename', help='Recipe to package')
|
||||
parser_package.set_defaults(func=package)
|
74
scripts/lib/devtool/runqemu.py
Normal file
74
scripts/lib/devtool/runqemu.py
Normal file
@@ -0,0 +1,74 @@
|
||||
# Development tool - runqemu command plugin
|
||||
#
|
||||
# Copyright (C) 2015 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
"""Devtool runqemu plugin"""
|
||||
|
||||
import os
|
||||
import bb
|
||||
import logging
|
||||
import argparse
|
||||
import glob
|
||||
from devtool import exec_build_env_command, setup_tinfoil, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def runqemu(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'runqemu' subcommand"""
|
||||
|
||||
tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
|
||||
try:
|
||||
machine = tinfoil.config_data.getVar('MACHINE')
|
||||
bindir_native = os.path.join(tinfoil.config_data.getVar('STAGING_DIR'),
|
||||
tinfoil.config_data.getVar('BUILD_ARCH'),
|
||||
tinfoil.config_data.getVar('bindir_native').lstrip(os.path.sep))
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
if not glob.glob(os.path.join(bindir_native, 'qemu-system-*')):
|
||||
raise DevtoolError('QEMU is not available within this SDK')
|
||||
|
||||
imagename = args.imagename
|
||||
if not imagename:
|
||||
sdk_targets = config.get('SDK', 'sdk_targets', '').split()
|
||||
if sdk_targets:
|
||||
imagename = sdk_targets[0]
|
||||
if not imagename:
|
||||
raise DevtoolError('Unable to determine image name to run, please specify one')
|
||||
|
||||
try:
|
||||
# FIXME runqemu assumes that if OECORE_NATIVE_SYSROOT is set then it shouldn't
|
||||
# run bitbake to find out the values of various environment variables, which
|
||||
# isn't the case for the extensible SDK. Work around it for now.
|
||||
newenv = dict(os.environ)
|
||||
newenv.pop('OECORE_NATIVE_SYSROOT', '')
|
||||
exec_build_env_command(config.init_path, basepath, 'runqemu %s %s %s' % (machine, imagename, " ".join(args.args)), watch=True, env=newenv)
|
||||
except bb.process.ExecutionError as e:
|
||||
# We've already seen the output since watch=True, so just ensure we return something to the user
|
||||
return e.exitcode
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from this plugin"""
|
||||
if context.fixed_setup:
|
||||
parser_runqemu = subparsers.add_parser('runqemu', help='Run QEMU on the specified image',
|
||||
description='Runs QEMU to boot the specified image',
|
||||
group='testbuild', order=-20)
|
||||
parser_runqemu.add_argument('imagename', help='Name of built image to boot within QEMU', nargs='?')
|
||||
parser_runqemu.add_argument('args', help='Any remaining arguments are passed to the runqemu script (pass --help after imagename to see what these are)',
|
||||
nargs=argparse.REMAINDER)
|
||||
parser_runqemu.set_defaults(func=runqemu)
|
336
scripts/lib/devtool/sdk.py
Normal file
336
scripts/lib/devtool/sdk.py
Normal file
@@ -0,0 +1,336 @@
|
||||
# Development tool - sdk-update command plugin
|
||||
#
|
||||
# Copyright (C) 2015-2016 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import logging
|
||||
import glob
|
||||
import shutil
|
||||
import errno
|
||||
import sys
|
||||
import tempfile
|
||||
import re
|
||||
from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def parse_locked_sigs(sigfile_path):
|
||||
"""Return <pn:task>:<hash> dictionary"""
|
||||
sig_dict = {}
|
||||
with open(sigfile_path) as f:
|
||||
lines = f.readlines()
|
||||
for line in lines:
|
||||
if ':' in line:
|
||||
taskkey, _, hashval = line.rpartition(':')
|
||||
sig_dict[taskkey.strip()] = hashval.split()[0]
|
||||
return sig_dict
|
||||
|
||||
def generate_update_dict(sigfile_new, sigfile_old):
|
||||
"""Return a dict containing <pn:task>:<hash> which indicates what need to be updated"""
|
||||
update_dict = {}
|
||||
sigdict_new = parse_locked_sigs(sigfile_new)
|
||||
sigdict_old = parse_locked_sigs(sigfile_old)
|
||||
for k in sigdict_new:
|
||||
if k not in sigdict_old:
|
||||
update_dict[k] = sigdict_new[k]
|
||||
continue
|
||||
if sigdict_new[k] != sigdict_old[k]:
|
||||
update_dict[k] = sigdict_new[k]
|
||||
continue
|
||||
return update_dict
|
||||
|
||||
def get_sstate_objects(update_dict, sstate_dir):
|
||||
"""Return a list containing sstate objects which are to be installed"""
|
||||
sstate_objects = []
|
||||
for k in update_dict:
|
||||
files = set()
|
||||
hashval = update_dict[k]
|
||||
p = sstate_dir + '/' + hashval[:2] + '/*' + hashval + '*.tgz'
|
||||
files |= set(glob.glob(p))
|
||||
p = sstate_dir + '/*/' + hashval[:2] + '/*' + hashval + '*.tgz'
|
||||
files |= set(glob.glob(p))
|
||||
files = list(files)
|
||||
if len(files) == 1:
|
||||
sstate_objects.extend(files)
|
||||
elif len(files) > 1:
|
||||
logger.error("More than one matching sstate object found for %s" % hashval)
|
||||
|
||||
return sstate_objects
|
||||
|
||||
def mkdir(d):
|
||||
try:
|
||||
os.makedirs(d)
|
||||
except OSError as e:
|
||||
if e.errno != errno.EEXIST:
|
||||
raise e
|
||||
|
||||
def install_sstate_objects(sstate_objects, src_sdk, dest_sdk):
|
||||
"""Install sstate objects into destination SDK"""
|
||||
sstate_dir = os.path.join(dest_sdk, 'sstate-cache')
|
||||
if not os.path.exists(sstate_dir):
|
||||
logger.error("Missing sstate-cache directory in %s, it might not be an extensible SDK." % dest_sdk)
|
||||
raise
|
||||
for sb in sstate_objects:
|
||||
dst = sb.replace(src_sdk, dest_sdk)
|
||||
destdir = os.path.dirname(dst)
|
||||
mkdir(destdir)
|
||||
logger.debug("Copying %s to %s" % (sb, dst))
|
||||
shutil.copy(sb, dst)
|
||||
|
||||
def check_manifest(fn, basepath):
|
||||
import bb.utils
|
||||
changedfiles = []
|
||||
with open(fn, 'r') as f:
|
||||
for line in f:
|
||||
splitline = line.split()
|
||||
if len(splitline) > 1:
|
||||
chksum = splitline[0]
|
||||
fpath = splitline[1]
|
||||
curr_chksum = bb.utils.sha256_file(os.path.join(basepath, fpath))
|
||||
if chksum != curr_chksum:
|
||||
logger.debug('File %s changed: old csum = %s, new = %s' % (os.path.join(basepath, fpath), curr_chksum, chksum))
|
||||
changedfiles.append(fpath)
|
||||
return changedfiles
|
||||
|
||||
def sdk_update(args, config, basepath, workspace):
|
||||
"""Entry point for devtool sdk-update command"""
|
||||
updateserver = args.updateserver
|
||||
if not updateserver:
|
||||
updateserver = config.get('SDK', 'updateserver', '')
|
||||
logger.debug("updateserver: %s" % updateserver)
|
||||
|
||||
# Make sure we are using sdk-update from within SDK
|
||||
logger.debug("basepath = %s" % basepath)
|
||||
old_locked_sig_file_path = os.path.join(basepath, 'conf/locked-sigs.inc')
|
||||
if not os.path.exists(old_locked_sig_file_path):
|
||||
logger.error("Not using devtool's sdk-update command from within an extensible SDK. Please specify correct basepath via --basepath option")
|
||||
return -1
|
||||
else:
|
||||
logger.debug("Found conf/locked-sigs.inc in %s" % basepath)
|
||||
|
||||
if not '://' in updateserver:
|
||||
logger.error("Update server must be a URL")
|
||||
return -1
|
||||
|
||||
layers_dir = os.path.join(basepath, 'layers')
|
||||
conf_dir = os.path.join(basepath, 'conf')
|
||||
|
||||
# Grab variable values
|
||||
tinfoil = setup_tinfoil(config_only=True, basepath=basepath)
|
||||
try:
|
||||
stamps_dir = tinfoil.config_data.getVar('STAMPS_DIR')
|
||||
sstate_mirrors = tinfoil.config_data.getVar('SSTATE_MIRRORS')
|
||||
site_conf_version = tinfoil.config_data.getVar('SITE_CONF_VERSION')
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
tmpsdk_dir = tempfile.mkdtemp()
|
||||
try:
|
||||
os.makedirs(os.path.join(tmpsdk_dir, 'conf'))
|
||||
new_locked_sig_file_path = os.path.join(tmpsdk_dir, 'conf', 'locked-sigs.inc')
|
||||
# Fetch manifest from server
|
||||
tmpmanifest = os.path.join(tmpsdk_dir, 'conf', 'sdk-conf-manifest')
|
||||
ret = subprocess.call("wget -q -O %s %s/conf/sdk-conf-manifest" % (tmpmanifest, updateserver), shell=True)
|
||||
changedfiles = check_manifest(tmpmanifest, basepath)
|
||||
if not changedfiles:
|
||||
logger.info("Already up-to-date")
|
||||
return 0
|
||||
# Update metadata
|
||||
logger.debug("Updating metadata via git ...")
|
||||
#Check for the status before doing a fetch and reset
|
||||
if os.path.exists(os.path.join(basepath, 'layers/.git')):
|
||||
out = subprocess.check_output("git status --porcelain", shell=True, cwd=layers_dir)
|
||||
if not out:
|
||||
ret = subprocess.call("git fetch --all; git reset --hard @{u}", shell=True, cwd=layers_dir)
|
||||
else:
|
||||
logger.error("Failed to update metadata as there have been changes made to it. Aborting.");
|
||||
logger.error("Changed files:\n%s" % out);
|
||||
return -1
|
||||
else:
|
||||
ret = -1
|
||||
if ret != 0:
|
||||
ret = subprocess.call("git clone %s/layers/.git" % updateserver, shell=True, cwd=tmpsdk_dir)
|
||||
if ret != 0:
|
||||
logger.error("Updating metadata via git failed")
|
||||
return ret
|
||||
logger.debug("Updating conf files ...")
|
||||
for changedfile in changedfiles:
|
||||
ret = subprocess.call("wget -q -O %s %s/%s" % (changedfile, updateserver, changedfile), shell=True, cwd=tmpsdk_dir)
|
||||
if ret != 0:
|
||||
logger.error("Updating %s failed" % changedfile)
|
||||
return ret
|
||||
|
||||
# Check if UNINATIVE_CHECKSUM changed
|
||||
uninative = False
|
||||
if 'conf/local.conf' in changedfiles:
|
||||
def read_uninative_checksums(fn):
|
||||
chksumitems = []
|
||||
with open(fn, 'r') as f:
|
||||
for line in f:
|
||||
if line.startswith('UNINATIVE_CHECKSUM'):
|
||||
splitline = re.split(r'[\[\]"\']', line)
|
||||
if len(splitline) > 3:
|
||||
chksumitems.append((splitline[1], splitline[3]))
|
||||
return chksumitems
|
||||
|
||||
oldsums = read_uninative_checksums(os.path.join(basepath, 'conf/local.conf'))
|
||||
newsums = read_uninative_checksums(os.path.join(tmpsdk_dir, 'conf/local.conf'))
|
||||
if oldsums != newsums:
|
||||
uninative = True
|
||||
for buildarch, chksum in newsums:
|
||||
uninative_file = os.path.join('downloads', 'uninative', chksum, '%s-nativesdk-libc.tar.bz2' % buildarch)
|
||||
mkdir(os.path.join(tmpsdk_dir, os.path.dirname(uninative_file)))
|
||||
ret = subprocess.call("wget -q -O %s %s/%s" % (uninative_file, updateserver, uninative_file), shell=True, cwd=tmpsdk_dir)
|
||||
|
||||
# Ok, all is well at this point - move everything over
|
||||
tmplayers_dir = os.path.join(tmpsdk_dir, 'layers')
|
||||
if os.path.exists(tmplayers_dir):
|
||||
shutil.rmtree(layers_dir)
|
||||
shutil.move(tmplayers_dir, layers_dir)
|
||||
for changedfile in changedfiles:
|
||||
destfile = os.path.join(basepath, changedfile)
|
||||
os.remove(destfile)
|
||||
shutil.move(os.path.join(tmpsdk_dir, changedfile), destfile)
|
||||
os.remove(os.path.join(conf_dir, 'sdk-conf-manifest'))
|
||||
shutil.move(tmpmanifest, conf_dir)
|
||||
if uninative:
|
||||
shutil.rmtree(os.path.join(basepath, 'downloads', 'uninative'))
|
||||
shutil.move(os.path.join(tmpsdk_dir, 'downloads', 'uninative'), os.path.join(basepath, 'downloads'))
|
||||
|
||||
if not sstate_mirrors:
|
||||
with open(os.path.join(conf_dir, 'site.conf'), 'a') as f:
|
||||
f.write('SCONF_VERSION = "%s"\n' % site_conf_version)
|
||||
f.write('SSTATE_MIRRORS_append = " file://.* %s/sstate-cache/PATH \\n "\n' % updateserver)
|
||||
finally:
|
||||
shutil.rmtree(tmpsdk_dir)
|
||||
|
||||
if not args.skip_prepare:
|
||||
# Find all potentially updateable tasks
|
||||
sdk_update_targets = []
|
||||
tasks = ['do_populate_sysroot', 'do_packagedata']
|
||||
for root, _, files in os.walk(stamps_dir):
|
||||
for fn in files:
|
||||
if not '.sigdata.' in fn:
|
||||
for task in tasks:
|
||||
if '.%s.' % task in fn or '.%s_setscene.' % task in fn:
|
||||
sdk_update_targets.append('%s:%s' % (os.path.basename(root), task))
|
||||
# Run bitbake command for the whole SDK
|
||||
logger.info("Preparing build system... (This may take some time.)")
|
||||
try:
|
||||
exec_build_env_command(config.init_path, basepath, 'bitbake --setscene-only %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT)
|
||||
output, _ = exec_build_env_command(config.init_path, basepath, 'bitbake -n %s' % ' '.join(sdk_update_targets), stderr=subprocess.STDOUT)
|
||||
runlines = []
|
||||
for line in output.splitlines():
|
||||
if 'Running task ' in line:
|
||||
runlines.append(line)
|
||||
if runlines:
|
||||
logger.error('Unexecuted tasks found in preparation log:\n %s' % '\n '.join(runlines))
|
||||
return -1
|
||||
except bb.process.ExecutionError as e:
|
||||
logger.error('Preparation failed:\n%s' % e.stdout)
|
||||
return -1
|
||||
return 0
|
||||
|
||||
def sdk_install(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool sdk-install command"""
|
||||
|
||||
import oe.recipeutils
|
||||
import bb.process
|
||||
|
||||
for recipe in args.recipename:
|
||||
if recipe in workspace:
|
||||
raise DevtoolError('recipe %s is a recipe in your workspace' % recipe)
|
||||
|
||||
tasks = ['do_populate_sysroot', 'do_packagedata']
|
||||
stampprefixes = {}
|
||||
def checkstamp(recipe):
|
||||
stampprefix = stampprefixes[recipe]
|
||||
stamps = glob.glob(stampprefix + '*')
|
||||
for stamp in stamps:
|
||||
if '.sigdata.' not in stamp and stamp.startswith((stampprefix + '.', stampprefix + '_setscene.')):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
install_recipes = []
|
||||
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
|
||||
try:
|
||||
for recipe in args.recipename:
|
||||
rd = parse_recipe(config, tinfoil, recipe, True)
|
||||
if not rd:
|
||||
return 1
|
||||
stampprefixes[recipe] = '%s.%s' % (rd.getVar('STAMP'), tasks[0])
|
||||
if checkstamp(recipe):
|
||||
logger.info('%s is already installed' % recipe)
|
||||
else:
|
||||
install_recipes.append(recipe)
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
if install_recipes:
|
||||
logger.info('Installing %s...' % ', '.join(install_recipes))
|
||||
install_tasks = []
|
||||
for recipe in install_recipes:
|
||||
for task in tasks:
|
||||
if recipe.endswith('-native') and 'package' in task:
|
||||
continue
|
||||
install_tasks.append('%s:%s' % (recipe, task))
|
||||
options = ''
|
||||
if not args.allow_build:
|
||||
options += ' --setscene-only'
|
||||
try:
|
||||
exec_build_env_command(config.init_path, basepath, 'bitbake %s %s' % (options, ' '.join(install_tasks)), watch=True)
|
||||
except bb.process.ExecutionError as e:
|
||||
raise DevtoolError('Failed to install %s:\n%s' % (recipe, str(e)))
|
||||
failed = False
|
||||
for recipe in install_recipes:
|
||||
if checkstamp(recipe):
|
||||
logger.info('Successfully installed %s' % recipe)
|
||||
else:
|
||||
raise DevtoolError('Failed to install %s - unavailable' % recipe)
|
||||
failed = True
|
||||
if failed:
|
||||
return 2
|
||||
|
||||
try:
|
||||
exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots', watch=True)
|
||||
except bb.process.ExecutionError as e:
|
||||
raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e)))
|
||||
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from the sdk plugin"""
|
||||
if context.fixed_setup:
|
||||
parser_sdk = subparsers.add_parser('sdk-update',
|
||||
help='Update SDK components',
|
||||
description='Updates installed SDK components from a remote server',
|
||||
group='sdk')
|
||||
updateserver = context.config.get('SDK', 'updateserver', '')
|
||||
if updateserver:
|
||||
parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from (default %s)' % updateserver, nargs='?')
|
||||
else:
|
||||
parser_sdk.add_argument('updateserver', help='The update server to fetch latest SDK components from')
|
||||
parser_sdk.add_argument('--skip-prepare', action="store_true", help='Skip re-preparing the build system after updating (for debugging only)')
|
||||
parser_sdk.set_defaults(func=sdk_update)
|
||||
|
||||
parser_sdk_install = subparsers.add_parser('sdk-install',
|
||||
help='Install additional SDK components',
|
||||
description='Installs additional recipe development files into the SDK. (You can use "devtool search" to find available recipes.)',
|
||||
group='sdk')
|
||||
parser_sdk_install.add_argument('recipename', help='Name of the recipe to install the development artifacts for', nargs='+')
|
||||
parser_sdk_install.add_argument('-s', '--allow-build', help='Allow building requested item(s) from source', action='store_true')
|
||||
parser_sdk_install.set_defaults(func=sdk_install)
|
88
scripts/lib/devtool/search.py
Normal file
88
scripts/lib/devtool/search.py
Normal file
@@ -0,0 +1,88 @@
|
||||
# Development tool - search command plugin
|
||||
#
|
||||
# Copyright (C) 2015 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
"""Devtool search plugin"""
|
||||
|
||||
import os
|
||||
import bb
|
||||
import logging
|
||||
import argparse
|
||||
import re
|
||||
from devtool import setup_tinfoil, parse_recipe, DevtoolError
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def search(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'search' subcommand"""
|
||||
|
||||
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
|
||||
try:
|
||||
pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
|
||||
defsummary = tinfoil.config_data.getVar('SUMMARY', False) or ''
|
||||
|
||||
keyword_rc = re.compile(args.keyword)
|
||||
|
||||
for fn in os.listdir(pkgdata_dir):
|
||||
pfn = os.path.join(pkgdata_dir, fn)
|
||||
if not os.path.isfile(pfn):
|
||||
continue
|
||||
|
||||
packages = []
|
||||
match = False
|
||||
if keyword_rc.search(fn):
|
||||
match = True
|
||||
|
||||
if not match:
|
||||
with open(pfn, 'r') as f:
|
||||
for line in f:
|
||||
if line.startswith('PACKAGES:'):
|
||||
packages = line.split(':', 1)[1].strip().split()
|
||||
|
||||
for pkg in packages:
|
||||
if keyword_rc.search(pkg):
|
||||
match = True
|
||||
break
|
||||
if os.path.exists(os.path.join(pkgdata_dir, 'runtime', pkg + '.packaged')):
|
||||
with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f:
|
||||
for line in f:
|
||||
if ': ' in line:
|
||||
splitline = line.split(':', 1)
|
||||
key = splitline[0]
|
||||
value = splitline[1].strip()
|
||||
if key in ['PKG_%s' % pkg, 'DESCRIPTION', 'FILES_INFO'] or key.startswith('FILERPROVIDES_'):
|
||||
if keyword_rc.search(value):
|
||||
match = True
|
||||
break
|
||||
|
||||
if match:
|
||||
rd = parse_recipe(config, tinfoil, fn, True)
|
||||
summary = rd.getVar('SUMMARY')
|
||||
if summary == rd.expand(defsummary):
|
||||
summary = ''
|
||||
print("%s %s" % (fn.ljust(20), summary))
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from this plugin"""
|
||||
parser_search = subparsers.add_parser('search', help='Search available recipes',
|
||||
description='Searches for available target recipes. Matches on recipe name, package name, description and installed files, and prints the recipe name on match.',
|
||||
group='info')
|
||||
parser_search.add_argument('keyword', help='Keyword to search for (regular expression syntax allowed)')
|
||||
parser_search.set_defaults(func=search, no_workspace=True)
|
2153
scripts/lib/devtool/standard.py
Normal file
2153
scripts/lib/devtool/standard.py
Normal file
File diff suppressed because it is too large
Load Diff
521
scripts/lib/devtool/upgrade.py
Normal file
521
scripts/lib/devtool/upgrade.py
Normal file
@@ -0,0 +1,521 @@
|
||||
# Development tool - upgrade command plugin
|
||||
#
|
||||
# Copyright (C) 2014-2017 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
"""Devtool upgrade plugin"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import shutil
|
||||
import tempfile
|
||||
import logging
|
||||
import argparse
|
||||
import scriptutils
|
||||
import errno
|
||||
import bb
|
||||
|
||||
devtool_path = os.path.dirname(os.path.realpath(__file__)) + '/../../../meta/lib'
|
||||
sys.path = sys.path + [devtool_path]
|
||||
|
||||
import oe.recipeutils
|
||||
from devtool import standard
|
||||
from devtool import exec_build_env_command, setup_tinfoil, DevtoolError, parse_recipe, use_external_build, update_unlockedsigs, check_prerelease_version
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def _run(cmd, cwd=''):
|
||||
logger.debug("Running command %s> %s" % (cwd,cmd))
|
||||
return bb.process.run('%s' % cmd, cwd=cwd)
|
||||
|
||||
def _get_srctree(tmpdir):
|
||||
srctree = tmpdir
|
||||
dirs = os.listdir(tmpdir)
|
||||
if len(dirs) == 1:
|
||||
srctree = os.path.join(tmpdir, dirs[0])
|
||||
return srctree
|
||||
|
||||
def _copy_source_code(orig, dest):
|
||||
for path in standard._ls_tree(orig):
|
||||
dest_dir = os.path.join(dest, os.path.dirname(path))
|
||||
bb.utils.mkdirhier(dest_dir)
|
||||
dest_path = os.path.join(dest, path)
|
||||
shutil.move(os.path.join(orig, path), dest_path)
|
||||
|
||||
def _remove_patch_dirs(recipefolder):
|
||||
for root, dirs, files in os.walk(recipefolder):
|
||||
for d in dirs:
|
||||
shutil.rmtree(os.path.join(root,d))
|
||||
|
||||
def _recipe_contains(rd, var):
|
||||
rf = rd.getVar('FILE')
|
||||
varfiles = oe.recipeutils.get_var_files(rf, [var], rd)
|
||||
for var, fn in varfiles.items():
|
||||
if fn and fn.startswith(os.path.dirname(rf) + os.sep):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _rename_recipe_dirs(oldpv, newpv, path):
|
||||
for root, dirs, files in os.walk(path):
|
||||
# Rename directories with the version in their name
|
||||
for olddir in dirs:
|
||||
if olddir.find(oldpv) != -1:
|
||||
newdir = olddir.replace(oldpv, newpv)
|
||||
if olddir != newdir:
|
||||
shutil.move(os.path.join(path, olddir), os.path.join(path, newdir))
|
||||
# Rename any inc files with the version in their name (unusual, but possible)
|
||||
for oldfile in files:
|
||||
if oldfile.endswith('.inc'):
|
||||
if oldfile.find(oldpv) != -1:
|
||||
newfile = oldfile.replace(oldpv, newpv)
|
||||
if oldfile != newfile:
|
||||
os.rename(os.path.join(path, oldfile), os.path.join(path, newfile))
|
||||
|
||||
def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path):
|
||||
oldrecipe = os.path.basename(oldrecipe)
|
||||
if oldrecipe.endswith('_%s.bb' % oldpv):
|
||||
newrecipe = '%s_%s.bb' % (bpn, newpv)
|
||||
if oldrecipe != newrecipe:
|
||||
shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe))
|
||||
else:
|
||||
newrecipe = oldrecipe
|
||||
return os.path.join(path, newrecipe)
|
||||
|
||||
def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path):
|
||||
_rename_recipe_dirs(oldpv, newpv, path)
|
||||
return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path)
|
||||
|
||||
def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d):
|
||||
"""Writes an append file"""
|
||||
if not os.path.exists(rc):
|
||||
raise DevtoolError("bbappend not created because %s does not exist" % rc)
|
||||
|
||||
appendpath = os.path.join(workspace, 'appends')
|
||||
if not os.path.exists(appendpath):
|
||||
bb.utils.mkdirhier(appendpath)
|
||||
|
||||
brf = os.path.basename(os.path.splitext(rc)[0]) # rc basename
|
||||
|
||||
srctree = os.path.abspath(srctree)
|
||||
pn = d.getVar('PN')
|
||||
af = os.path.join(appendpath, '%s.bbappend' % brf)
|
||||
with open(af, 'w') as f:
|
||||
f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n\n')
|
||||
f.write('inherit externalsrc\n')
|
||||
f.write(('# NOTE: We use pn- overrides here to avoid affecting'
|
||||
'multiple variants in the case where the recipe uses BBCLASSEXTEND\n'))
|
||||
f.write('EXTERNALSRC_pn-%s = "%s"\n' % (pn, srctree))
|
||||
b_is_s = use_external_build(same_dir, no_same_dir, d)
|
||||
if b_is_s:
|
||||
f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree))
|
||||
f.write('\n')
|
||||
if rev:
|
||||
f.write('# initial_rev: %s\n' % rev)
|
||||
if copied:
|
||||
f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE')))
|
||||
f.write('# original_files: %s\n' % ' '.join(copied))
|
||||
return af
|
||||
|
||||
def _cleanup_on_error(rf, srctree):
|
||||
rfp = os.path.split(rf)[0] # recipe folder
|
||||
rfpp = os.path.split(rfp)[0] # recipes folder
|
||||
if os.path.exists(rfp):
|
||||
shutil.rmtree(b)
|
||||
if not len(os.listdir(rfpp)):
|
||||
os.rmdir(rfpp)
|
||||
srctree = os.path.abspath(srctree)
|
||||
if os.path.exists(srctree):
|
||||
shutil.rmtree(srctree)
|
||||
|
||||
def _upgrade_error(e, rf, srctree):
|
||||
if rf:
|
||||
cleanup_on_error(rf, srctree)
|
||||
logger.error(e)
|
||||
raise DevtoolError(e)
|
||||
|
||||
def _get_uri(rd):
|
||||
srcuris = rd.getVar('SRC_URI').split()
|
||||
if not len(srcuris):
|
||||
raise DevtoolError('SRC_URI not found on recipe')
|
||||
# Get first non-local entry in SRC_URI - usually by convention it's
|
||||
# the first entry, but not always!
|
||||
srcuri = None
|
||||
for entry in srcuris:
|
||||
if not entry.startswith('file://'):
|
||||
srcuri = entry
|
||||
break
|
||||
if not srcuri:
|
||||
raise DevtoolError('Unable to find non-local entry in SRC_URI')
|
||||
srcrev = '${AUTOREV}'
|
||||
if '://' in srcuri:
|
||||
# Fetch a URL
|
||||
rev_re = re.compile(';rev=([^;]+)')
|
||||
res = rev_re.search(srcuri)
|
||||
if res:
|
||||
srcrev = res.group(1)
|
||||
srcuri = rev_re.sub('', srcuri)
|
||||
return srcuri, srcrev
|
||||
|
||||
def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, keep_temp, tinfoil, rd):
|
||||
"""Extract sources of a recipe with a new version"""
|
||||
|
||||
def __run(cmd):
|
||||
"""Simple wrapper which calls _run with srctree as cwd"""
|
||||
return _run(cmd, srctree)
|
||||
|
||||
crd = rd.createCopy()
|
||||
|
||||
pv = crd.getVar('PV')
|
||||
crd.setVar('PV', newpv)
|
||||
|
||||
tmpsrctree = None
|
||||
uri, rev = _get_uri(crd)
|
||||
if srcrev:
|
||||
rev = srcrev
|
||||
if uri.startswith('git://'):
|
||||
__run('git fetch')
|
||||
__run('git checkout %s' % rev)
|
||||
__run('git tag -f devtool-base-new')
|
||||
md5 = None
|
||||
sha256 = None
|
||||
_, _, _, _, _, params = bb.fetch2.decodeurl(uri)
|
||||
srcsubdir_rel = params.get('destsuffix', 'git')
|
||||
if not srcbranch:
|
||||
check_branch, check_branch_err = __run('git branch -r --contains %s' % srcrev)
|
||||
get_branch = [x.strip() for x in check_branch.splitlines()]
|
||||
# Remove HEAD reference point and drop remote prefix
|
||||
get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
|
||||
if 'master' in get_branch:
|
||||
# If it is master, we do not need to append 'branch=master' as this is default.
|
||||
# Even with the case where get_branch has multiple objects, if 'master' is one
|
||||
# of them, we should default take from 'master'
|
||||
srcbranch = ''
|
||||
elif len(get_branch) == 1:
|
||||
# If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch'
|
||||
srcbranch = get_branch[0]
|
||||
else:
|
||||
# If get_branch contains more than one objects, then display error and exit.
|
||||
mbrch = '\n ' + '\n '.join(get_branch)
|
||||
raise DevtoolError('Revision %s was found on multiple branches: %s\nPlease provide the correct branch in the devtool command with "--srcbranch" or "-B" option.' % (srcrev, mbrch))
|
||||
else:
|
||||
__run('git checkout devtool-base -b devtool-%s' % newpv)
|
||||
|
||||
tmpdir = tempfile.mkdtemp(prefix='devtool')
|
||||
try:
|
||||
checksums, ftmpdir = scriptutils.fetch_url(tinfoil, uri, rev, tmpdir, logger, preserve_tmp=keep_temp)
|
||||
except scriptutils.FetchUrlFailure as e:
|
||||
raise DevtoolError(e)
|
||||
|
||||
if ftmpdir and keep_temp:
|
||||
logger.info('Fetch temp directory is %s' % ftmpdir)
|
||||
|
||||
md5 = checksums['md5sum']
|
||||
sha256 = checksums['sha256sum']
|
||||
|
||||
tmpsrctree = _get_srctree(tmpdir)
|
||||
srctree = os.path.abspath(srctree)
|
||||
srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir)
|
||||
|
||||
# Delete all sources so we ensure no stray files are left over
|
||||
for item in os.listdir(srctree):
|
||||
if item in ['.git', 'oe-local-files']:
|
||||
continue
|
||||
itempath = os.path.join(srctree, item)
|
||||
if os.path.isdir(itempath):
|
||||
shutil.rmtree(itempath)
|
||||
else:
|
||||
os.remove(itempath)
|
||||
|
||||
# Copy in new ones
|
||||
_copy_source_code(tmpsrctree, srctree)
|
||||
|
||||
(stdout,_) = __run('git ls-files --modified --others --exclude-standard')
|
||||
filelist = stdout.splitlines()
|
||||
pbar = bb.ui.knotty.BBProgress('Adding changed files', len(filelist))
|
||||
pbar.start()
|
||||
batchsize = 100
|
||||
for i in range(0, len(filelist), batchsize):
|
||||
batch = filelist[i:i+batchsize]
|
||||
__run('git add -A %s' % ' '.join(['"%s"' % item for item in batch]))
|
||||
pbar.update(i)
|
||||
pbar.finish()
|
||||
|
||||
useroptions = []
|
||||
oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd)
|
||||
__run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv))
|
||||
__run('git tag -f devtool-base-%s' % newpv)
|
||||
|
||||
(stdout, _) = __run('git rev-parse HEAD')
|
||||
rev = stdout.rstrip()
|
||||
|
||||
if no_patch:
|
||||
patches = oe.recipeutils.get_recipe_patches(crd)
|
||||
if patches:
|
||||
logger.warn('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches]))
|
||||
else:
|
||||
__run('git checkout devtool-patched -b %s' % branch)
|
||||
skiptag = False
|
||||
try:
|
||||
__run('git rebase %s' % rev)
|
||||
except bb.process.ExecutionError as e:
|
||||
skiptag = True
|
||||
if 'conflict' in e.stdout:
|
||||
logger.warn('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip()))
|
||||
else:
|
||||
logger.warn('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
|
||||
if not skiptag:
|
||||
if uri.startswith('git://'):
|
||||
suffix = 'new'
|
||||
else:
|
||||
suffix = newpv
|
||||
__run('git tag -f devtool-patched-%s' % suffix)
|
||||
|
||||
if tmpsrctree:
|
||||
if keep_temp:
|
||||
logger.info('Preserving temporary directory %s' % tmpsrctree)
|
||||
else:
|
||||
shutil.rmtree(tmpsrctree)
|
||||
|
||||
return (rev, md5, sha256, srcbranch, srcsubdir_rel)
|
||||
|
||||
def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd):
|
||||
"""Creates the new recipe under workspace"""
|
||||
|
||||
bpn = rd.getVar('BPN')
|
||||
path = os.path.join(workspace, 'recipes', bpn)
|
||||
bb.utils.mkdirhier(path)
|
||||
copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True)
|
||||
if not copied:
|
||||
raise DevtoolError('Internal error - no files were copied for recipe %s' % bpn)
|
||||
logger.debug('Copied %s to %s' % (copied, path))
|
||||
|
||||
oldpv = rd.getVar('PV')
|
||||
if not newpv:
|
||||
newpv = oldpv
|
||||
origpath = rd.getVar('FILE')
|
||||
fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path)
|
||||
logger.debug('Upgraded %s => %s' % (origpath, fullpath))
|
||||
|
||||
newvalues = {}
|
||||
if _recipe_contains(rd, 'PV') and newpv != oldpv:
|
||||
newvalues['PV'] = newpv
|
||||
|
||||
if srcrev:
|
||||
newvalues['SRCREV'] = srcrev
|
||||
|
||||
if srcbranch:
|
||||
src_uri = oe.recipeutils.split_var_value(rd.getVar('SRC_URI', False) or '')
|
||||
changed = False
|
||||
replacing = True
|
||||
new_src_uri = []
|
||||
for entry in src_uri:
|
||||
scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry)
|
||||
if replacing and scheme in ['git', 'gitsm']:
|
||||
branch = params.get('branch', 'master')
|
||||
if rd.expand(branch) != srcbranch:
|
||||
# Handle case where branch is set through a variable
|
||||
res = re.match(r'\$\{([^}@]+)\}', branch)
|
||||
if res:
|
||||
newvalues[res.group(1)] = srcbranch
|
||||
# We know we won't change SRC_URI now, so break out
|
||||
break
|
||||
else:
|
||||
params['branch'] = srcbranch
|
||||
entry = bb.fetch2.encodeurl((scheme, network, path, user, passwd, params))
|
||||
changed = True
|
||||
replacing = False
|
||||
new_src_uri.append(entry)
|
||||
if changed:
|
||||
newvalues['SRC_URI'] = ' '.join(new_src_uri)
|
||||
|
||||
newvalues['PR'] = None
|
||||
|
||||
# Work out which SRC_URI entries have changed in case the entry uses a name
|
||||
crd = rd.createCopy()
|
||||
crd.setVar('PV', newpv)
|
||||
for var, value in newvalues.items():
|
||||
crd.setVar(var, value)
|
||||
old_src_uri = (rd.getVar('SRC_URI') or '').split()
|
||||
new_src_uri = (crd.getVar('SRC_URI') or '').split()
|
||||
newnames = []
|
||||
addnames = []
|
||||
for newentry in new_src_uri:
|
||||
_, _, _, _, _, params = bb.fetch2.decodeurl(newentry)
|
||||
if 'name' in params:
|
||||
newnames.append(params['name'])
|
||||
if newentry not in old_src_uri:
|
||||
addnames.append(params['name'])
|
||||
# Find what's been set in the original recipe
|
||||
oldnames = []
|
||||
noname = False
|
||||
for varflag in rd.getVarFlags('SRC_URI'):
|
||||
if varflag.endswith(('.md5sum', '.sha256sum')):
|
||||
name = varflag.rsplit('.', 1)[0]
|
||||
if name not in oldnames:
|
||||
oldnames.append(name)
|
||||
elif varflag in ['md5sum', 'sha256sum']:
|
||||
noname = True
|
||||
# Even if SRC_URI has named entries it doesn't have to actually use the name
|
||||
if noname and addnames and addnames[0] not in oldnames:
|
||||
addnames = []
|
||||
# Drop any old names (the name actually might include ${PV})
|
||||
for name in oldnames:
|
||||
if name not in newnames:
|
||||
newvalues['SRC_URI[%s.md5sum]' % name] = None
|
||||
newvalues['SRC_URI[%s.sha256sum]' % name] = None
|
||||
|
||||
if md5 and sha256:
|
||||
if addnames:
|
||||
nameprefix = '%s.' % addnames[0]
|
||||
else:
|
||||
nameprefix = ''
|
||||
newvalues['SRC_URI[%smd5sum]' % nameprefix] = md5
|
||||
newvalues['SRC_URI[%ssha256sum]' % nameprefix] = sha256
|
||||
|
||||
if srcsubdir_new != srcsubdir_old:
|
||||
s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR'))
|
||||
s_subdir_new = os.path.relpath(os.path.abspath(crd.getVar('S')), crd.getVar('WORKDIR'))
|
||||
if srcsubdir_old == s_subdir_old and srcsubdir_new != s_subdir_new:
|
||||
# Subdir for old extracted source matches what S points to (it should!)
|
||||
# but subdir for new extracted source doesn't match what S will be
|
||||
newvalues['S'] = '${WORKDIR}/%s' % srcsubdir_new.replace(newpv, '${PV}')
|
||||
if crd.expand(newvalues['S']) == crd.expand('${WORKDIR}/${BP}'):
|
||||
# It's the default, drop it
|
||||
# FIXME what if S is being set in a .inc?
|
||||
newvalues['S'] = None
|
||||
logger.info('Source subdirectory has changed, dropping S value since it now matches the default ("${WORKDIR}/${BP}")')
|
||||
else:
|
||||
logger.info('Source subdirectory has changed, updating S value')
|
||||
|
||||
rd = tinfoil.parse_recipe_file(fullpath, False)
|
||||
oe.recipeutils.patch_recipe(rd, fullpath, newvalues)
|
||||
|
||||
return fullpath, copied
|
||||
|
||||
|
||||
def _check_git_config():
|
||||
def getconfig(name):
|
||||
try:
|
||||
value = bb.process.run('git config --global %s' % name)[0].strip()
|
||||
except bb.process.ExecutionError as e:
|
||||
if e.exitcode == 1:
|
||||
value = None
|
||||
else:
|
||||
raise
|
||||
return value
|
||||
|
||||
username = getconfig('user.name')
|
||||
useremail = getconfig('user.email')
|
||||
configerr = []
|
||||
if not username:
|
||||
configerr.append('Please set your name using:\n git config --global user.name')
|
||||
if not useremail:
|
||||
configerr.append('Please set your email using:\n git config --global user.email')
|
||||
if configerr:
|
||||
raise DevtoolError('Your git configuration is incomplete which will prevent rebases from working:\n' + '\n'.join(configerr))
|
||||
|
||||
|
||||
def upgrade(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'upgrade' subcommand"""
|
||||
|
||||
if args.recipename in workspace:
|
||||
raise DevtoolError("recipe %s is already in your workspace" % args.recipename)
|
||||
if not args.version and not args.srcrev:
|
||||
raise DevtoolError("You must provide a version using the --version/-V option, or for recipes that fetch from an SCM such as git, the --srcrev/-S option")
|
||||
if args.srcbranch and not args.srcrev:
|
||||
raise DevtoolError("If you specify --srcbranch/-B then you must use --srcrev/-S to specify the revision" % args.recipename)
|
||||
|
||||
_check_git_config()
|
||||
|
||||
tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
|
||||
try:
|
||||
rd = parse_recipe(config, tinfoil, args.recipename, True)
|
||||
if not rd:
|
||||
return 1
|
||||
|
||||
pn = rd.getVar('PN')
|
||||
if pn != args.recipename:
|
||||
logger.info('Mapping %s to %s' % (args.recipename, pn))
|
||||
if pn in workspace:
|
||||
raise DevtoolError("recipe %s is already in your workspace" % pn)
|
||||
|
||||
if args.srctree:
|
||||
srctree = os.path.abspath(args.srctree)
|
||||
else:
|
||||
srctree = standard.get_default_srctree(config, pn)
|
||||
|
||||
standard._check_compatible_recipe(pn, rd)
|
||||
old_srcrev = rd.getVar('SRCREV')
|
||||
if old_srcrev == 'INVALID':
|
||||
old_srcrev = None
|
||||
if old_srcrev and not args.srcrev:
|
||||
raise DevtoolError("Recipe specifies a SRCREV value; you must specify a new one when upgrading")
|
||||
old_ver = rd.getVar('PV')
|
||||
if old_ver == args.version and old_srcrev == args.srcrev:
|
||||
raise DevtoolError("Current and upgrade versions are the same version")
|
||||
if args.version:
|
||||
if bb.utils.vercmp_string(args.version, old_ver) < 0:
|
||||
logger.warning('Upgrade version %s compares as less than the current version %s. If you are using a package feed for on-target upgrades or providing this recipe for general consumption, then you should increment PE in the recipe (or if there is no current PE value set, set it to "1")' % (args.version, old_ver))
|
||||
check_prerelease_version(args.version, 'devtool upgrade')
|
||||
|
||||
rf = None
|
||||
try:
|
||||
logger.info('Extracting current version source...')
|
||||
rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
|
||||
logger.info('Extracting upgraded version source...')
|
||||
rev2, md5, sha256, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch,
|
||||
args.srcrev, args.srcbranch, args.branch, args.keep_temp,
|
||||
tinfoil, rd)
|
||||
rf, copied = _create_new_recipe(args.version, md5, sha256, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd)
|
||||
except bb.process.CmdError as e:
|
||||
_upgrade_error(e, rf, srctree)
|
||||
except DevtoolError as e:
|
||||
_upgrade_error(e, rf, srctree)
|
||||
standard._add_md5(config, pn, os.path.dirname(rf))
|
||||
|
||||
af = _write_append(rf, srctree, args.same_dir, args.no_same_dir, rev2,
|
||||
copied, config.workspace_path, rd)
|
||||
standard._add_md5(config, pn, af)
|
||||
|
||||
update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn])
|
||||
|
||||
logger.info('Upgraded source extracted to %s' % srctree)
|
||||
logger.info('New recipe is %s' % rf)
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
return 0
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from this plugin"""
|
||||
|
||||
defsrctree = standard.get_default_srctree(context.config)
|
||||
|
||||
parser_upgrade = subparsers.add_parser('upgrade', help='Upgrade an existing recipe',
|
||||
description='Upgrades an existing recipe to a new upstream version. Puts the upgraded recipe file into the workspace along with any associated files, and extracts the source tree to a specified location (in case patches need rebasing or adding to as a result of the upgrade).',
|
||||
group='starting')
|
||||
parser_upgrade.add_argument('recipename', help='Name of recipe to upgrade (just name - no version, path or extension)')
|
||||
parser_upgrade.add_argument('srctree', nargs='?', help='Path to where to extract the source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
|
||||
parser_upgrade.add_argument('--version', '-V', help='Version to upgrade to (PV)')
|
||||
parser_upgrade.add_argument('--srcrev', '-S', help='Source revision to upgrade to (required if fetching from an SCM such as git)')
|
||||
parser_upgrade.add_argument('--srcbranch', '-B', help='Branch in source repository containing the revision to use (if fetching from an SCM such as git)')
|
||||
parser_upgrade.add_argument('--branch', '-b', default="devtool", help='Name for new development branch to checkout (default "%(default)s")')
|
||||
parser_upgrade.add_argument('--no-patch', action="store_true", help='Do not apply patches from the recipe to the new source code')
|
||||
parser_upgrade.add_argument('--no-overrides', '-O', action="store_true", help='Do not create branches for other override configurations')
|
||||
group = parser_upgrade.add_mutually_exclusive_group()
|
||||
group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
|
||||
group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
|
||||
parser_upgrade.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
|
||||
parser_upgrade.set_defaults(func=upgrade, fixed_setup=context.fixed_setup)
|
252
scripts/lib/devtool/utilcmds.py
Normal file
252
scripts/lib/devtool/utilcmds.py
Normal file
@@ -0,0 +1,252 @@
|
||||
# Development tool - utility commands plugin
|
||||
#
|
||||
# Copyright (C) 2015-2016 Intel Corporation
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License version 2 as
|
||||
# published by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
|
||||
"""Devtool utility plugins"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import shutil
|
||||
import tempfile
|
||||
import logging
|
||||
import argparse
|
||||
import subprocess
|
||||
import scriptutils
|
||||
from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
|
||||
from devtool import parse_recipe
|
||||
|
||||
logger = logging.getLogger('devtool')
|
||||
|
||||
def _find_recipe_path(args, config, basepath, workspace):
|
||||
if args.any_recipe:
|
||||
logger.warning('-a/--any-recipe option is now always active, and thus the option will be removed in a future release')
|
||||
if args.recipename in workspace:
|
||||
recipefile = workspace[args.recipename]['recipefile']
|
||||
else:
|
||||
recipefile = None
|
||||
if not recipefile:
|
||||
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
|
||||
try:
|
||||
rd = parse_recipe(config, tinfoil, args.recipename, True)
|
||||
if not rd:
|
||||
raise DevtoolError("Failed to find specified recipe")
|
||||
recipefile = rd.getVar('FILE')
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
return recipefile
|
||||
|
||||
|
||||
def find_recipe(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'find-recipe' subcommand"""
|
||||
recipefile = _find_recipe_path(args, config, basepath, workspace)
|
||||
print(recipefile)
|
||||
return 0
|
||||
|
||||
|
||||
def edit_recipe(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'edit-recipe' subcommand"""
|
||||
return scriptutils.run_editor(_find_recipe_path(args, config, basepath, workspace), logger)
|
||||
|
||||
|
||||
def configure_help(args, config, basepath, workspace):
|
||||
"""Entry point for the devtool 'configure-help' subcommand"""
|
||||
import oe.utils
|
||||
|
||||
check_workspace_recipe(workspace, args.recipename)
|
||||
tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
|
||||
try:
|
||||
rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
|
||||
if not rd:
|
||||
return 1
|
||||
b = rd.getVar('B')
|
||||
s = rd.getVar('S')
|
||||
configurescript = os.path.join(s, 'configure')
|
||||
confdisabled = 'noexec' in rd.getVarFlags('do_configure') or 'do_configure' not in (rd.getVar('__BBTASKS', False) or [])
|
||||
configureopts = oe.utils.squashspaces(rd.getVar('CONFIGUREOPTS') or '')
|
||||
extra_oeconf = oe.utils.squashspaces(rd.getVar('EXTRA_OECONF') or '')
|
||||
extra_oecmake = oe.utils.squashspaces(rd.getVar('EXTRA_OECMAKE') or '')
|
||||
do_configure = rd.getVar('do_configure') or ''
|
||||
do_configure_noexpand = rd.getVar('do_configure', False) or ''
|
||||
packageconfig = rd.getVarFlags('PACKAGECONFIG') or []
|
||||
autotools = bb.data.inherits_class('autotools', rd) and ('oe_runconf' in do_configure or 'autotools_do_configure' in do_configure)
|
||||
cmake = bb.data.inherits_class('cmake', rd) and ('cmake_do_configure' in do_configure)
|
||||
cmake_do_configure = rd.getVar('cmake_do_configure')
|
||||
pn = rd.getVar('PN')
|
||||
finally:
|
||||
tinfoil.shutdown()
|
||||
|
||||
if 'doc' in packageconfig:
|
||||
del packageconfig['doc']
|
||||
|
||||
if autotools and not os.path.exists(configurescript):
|
||||
logger.info('Running do_configure to generate configure script')
|
||||
try:
|
||||
stdout, _ = exec_build_env_command(config.init_path, basepath,
|
||||
'bitbake -c configure %s' % args.recipename,
|
||||
stderr=subprocess.STDOUT)
|
||||
except bb.process.ExecutionError:
|
||||
pass
|
||||
|
||||
if confdisabled or do_configure.strip() in ('', ':'):
|
||||
raise DevtoolError("do_configure task has been disabled for this recipe")
|
||||
elif args.no_pager and not os.path.exists(configurescript):
|
||||
raise DevtoolError("No configure script found and no other information to display")
|
||||
else:
|
||||
configopttext = ''
|
||||
if autotools and configureopts:
|
||||
configopttext = '''
|
||||
Arguments currently passed to the configure script:
|
||||
|
||||
%s
|
||||
|
||||
Some of those are fixed.''' % (configureopts + ' ' + extra_oeconf)
|
||||
if extra_oeconf:
|
||||
configopttext += ''' The ones that are specified through EXTRA_OECONF (which you can change or add to easily):
|
||||
|
||||
%s''' % extra_oeconf
|
||||
|
||||
elif cmake:
|
||||
in_cmake = False
|
||||
cmake_cmd = ''
|
||||
for line in cmake_do_configure.splitlines():
|
||||
if in_cmake:
|
||||
cmake_cmd = cmake_cmd + ' ' + line.strip().rstrip('\\')
|
||||
if not line.endswith('\\'):
|
||||
break
|
||||
if line.lstrip().startswith('cmake '):
|
||||
cmake_cmd = line.strip().rstrip('\\')
|
||||
if line.endswith('\\'):
|
||||
in_cmake = True
|
||||
else:
|
||||
break
|
||||
if cmake_cmd:
|
||||
configopttext = '''
|
||||
The current cmake command line:
|
||||
|
||||
%s
|
||||
|
||||
Arguments specified through EXTRA_OECMAKE (which you can change or add to easily)
|
||||
|
||||
%s''' % (oe.utils.squashspaces(cmake_cmd), extra_oecmake)
|
||||
else:
|
||||
configopttext = '''
|
||||
The current implementation of cmake_do_configure:
|
||||
|
||||
cmake_do_configure() {
|
||||
%s
|
||||
}
|
||||
|
||||
Arguments specified through EXTRA_OECMAKE (which you can change or add to easily)
|
||||
|
||||
%s''' % (cmake_do_configure.rstrip(), extra_oecmake)
|
||||
|
||||
elif do_configure:
|
||||
configopttext = '''
|
||||
The current implementation of do_configure:
|
||||
|
||||
do_configure() {
|
||||
%s
|
||||
}''' % do_configure.rstrip()
|
||||
if '${EXTRA_OECONF}' in do_configure_noexpand:
|
||||
configopttext += '''
|
||||
|
||||
Arguments specified through EXTRA_OECONF (which you can change or add to easily):
|
||||
|
||||
%s''' % extra_oeconf
|
||||
|
||||
if packageconfig:
|
||||
configopttext += '''
|
||||
|
||||
Some of these options may be controlled through PACKAGECONFIG; for more details please see the recipe.'''
|
||||
|
||||
if args.arg:
|
||||
helpargs = ' '.join(args.arg)
|
||||
elif cmake:
|
||||
helpargs = '-LH'
|
||||
else:
|
||||
helpargs = '--help'
|
||||
|
||||
msg = '''configure information for %s
|
||||
------------------------------------------
|
||||
%s''' % (pn, configopttext)
|
||||
|
||||
if cmake:
|
||||
msg += '''
|
||||
|
||||
The cmake %s output for %s follows. After "-- Cache values" you should see a list of variables you can add to EXTRA_OECMAKE (prefixed with -D and suffixed with = followed by the desired value, without any spaces).
|
||||
------------------------------------------''' % (helpargs, pn)
|
||||
elif os.path.exists(configurescript):
|
||||
msg += '''
|
||||
|
||||
The ./configure %s output for %s follows.
|
||||
------------------------------------------''' % (helpargs, pn)
|
||||
|
||||
olddir = os.getcwd()
|
||||
tmppath = tempfile.mkdtemp()
|
||||
with tempfile.NamedTemporaryFile('w', delete=False) as tf:
|
||||
if not args.no_header:
|
||||
tf.write(msg + '\n')
|
||||
tf.close()
|
||||
try:
|
||||
try:
|
||||
cmd = 'cat %s' % tf.name
|
||||
if cmake:
|
||||
cmd += '; cmake %s %s 2>&1' % (helpargs, s)
|
||||
os.chdir(b)
|
||||
elif os.path.exists(configurescript):
|
||||
cmd += '; %s %s' % (configurescript, helpargs)
|
||||
if sys.stdout.isatty() and not args.no_pager:
|
||||
pager = os.environ.get('PAGER', 'less')
|
||||
cmd = '(%s) | %s' % (cmd, pager)
|
||||
subprocess.check_call(cmd, shell=True)
|
||||
except subprocess.CalledProcessError as e:
|
||||
return e.returncode
|
||||
finally:
|
||||
os.chdir(olddir)
|
||||
shutil.rmtree(tmppath)
|
||||
os.remove(tf.name)
|
||||
|
||||
|
||||
def register_commands(subparsers, context):
|
||||
"""Register devtool subcommands from this plugin"""
|
||||
parser_edit_recipe = subparsers.add_parser('edit-recipe', help='Edit a recipe file',
|
||||
description='Runs the default editor (as specified by the EDITOR variable) on the specified recipe. Note that this will be quicker for recipes in the workspace as the cache does not need to be loaded in that case.',
|
||||
group='working')
|
||||
parser_edit_recipe.add_argument('recipename', help='Recipe to edit')
|
||||
# FIXME drop -a at some point in future
|
||||
parser_edit_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Does nothing (exists for backwards-compatibility)')
|
||||
parser_edit_recipe.set_defaults(func=edit_recipe)
|
||||
|
||||
# Find-recipe
|
||||
parser_find_recipe = subparsers.add_parser('find-recipe', help='Find a recipe file',
|
||||
description='Finds a recipe file. Note that this will be quicker for recipes in the workspace as the cache does not need to be loaded in that case.',
|
||||
group='working')
|
||||
parser_find_recipe.add_argument('recipename', help='Recipe to find')
|
||||
# FIXME drop -a at some point in future
|
||||
parser_find_recipe.add_argument('--any-recipe', '-a', action="store_true", help='Does nothing (exists for backwards-compatibility)')
|
||||
parser_find_recipe.set_defaults(func=find_recipe)
|
||||
|
||||
# NOTE: Needed to override the usage string here since the default
|
||||
# gets the order wrong - recipename must come before --arg
|
||||
parser_configure_help = subparsers.add_parser('configure-help', help='Get help on configure script options',
|
||||
usage='devtool configure-help [options] recipename [--arg ...]',
|
||||
description='Displays the help for the configure script for the specified recipe (i.e. runs ./configure --help) prefaced by a header describing the current options being specified. Output is piped through less (or whatever PAGER is set to, if set) for easy browsing.',
|
||||
group='working')
|
||||
parser_configure_help.add_argument('recipename', help='Recipe to show configure help for')
|
||||
parser_configure_help.add_argument('-p', '--no-pager', help='Disable paged output', action="store_true")
|
||||
parser_configure_help.add_argument('-n', '--no-header', help='Disable explanatory header text', action="store_true")
|
||||
parser_configure_help.add_argument('--arg', help='Pass remaining arguments to the configure script instead of --help (useful if the script has additional help options)', nargs=argparse.REMAINDER)
|
||||
parser_configure_help.set_defaults(func=configure_help)
|
Reference in New Issue
Block a user