M7350/oe-core/bitbake/lib/bb/utils.py

1079 lines
34 KiB
Python
Raw Normal View History

2024-09-09 08:52:07 +00:00
# ex:ts=4:sw=4:sts=4:et
# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
"""
BitBake Utility Functions
"""
# Copyright (C) 2004 Michael Lauer
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import re, fcntl, os, string, stat, shutil, time
import sys
import errno
import logging
import bb
import bb.msg
2024-09-09 08:57:42 +00:00
import multiprocessing
import fcntl
import subprocess
import glob
import traceback
import errno
2024-09-09 08:52:07 +00:00
from commands import getstatusoutput
from contextlib import contextmanager
logger = logging.getLogger("BitBake.Util")
2024-09-09 08:57:42 +00:00
def clean_context():
return {
"os": os,
"bb": bb,
"time": time,
}
def get_context():
return _context
def set_context(ctx):
_context = ctx
2024-09-09 08:52:07 +00:00
# Context used in better_exec, eval
2024-09-09 08:57:42 +00:00
_context = clean_context()
class VersionStringException(Exception):
"""Exception raised when an invalid version specification is found"""
2024-09-09 08:52:07 +00:00
def explode_version(s):
r = []
alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
numeric_regexp = re.compile('^(\d+)(.*)$')
while (s != ''):
if s[0] in string.digits:
m = numeric_regexp.match(s)
2024-09-09 08:57:42 +00:00
r.append((0, int(m.group(1))))
2024-09-09 08:52:07 +00:00
s = m.group(2)
continue
if s[0] in string.letters:
m = alpha_regexp.match(s)
2024-09-09 08:57:42 +00:00
r.append((1, m.group(1)))
2024-09-09 08:52:07 +00:00
s = m.group(2)
continue
2024-09-09 08:57:42 +00:00
if s[0] == '~':
r.append((-1, s[0]))
else:
r.append((2, s[0]))
2024-09-09 08:52:07 +00:00
s = s[1:]
return r
2024-09-09 08:57:42 +00:00
def split_version(s):
"""Split a version string into its constituent parts (PE, PV, PR)"""
s = s.strip(" <>=")
e = 0
if s.count(':'):
e = int(s.split(":")[0])
s = s.split(":")[1]
r = ""
if s.count('-'):
r = s.rsplit("-", 1)[1]
s = s.rsplit("-", 1)[0]
v = s
return (e, v, r)
2024-09-09 08:52:07 +00:00
def vercmp_part(a, b):
va = explode_version(a)
vb = explode_version(b)
while True:
if va == []:
2024-09-09 08:57:42 +00:00
(oa, ca) = (0, None)
2024-09-09 08:52:07 +00:00
else:
2024-09-09 08:57:42 +00:00
(oa, ca) = va.pop(0)
2024-09-09 08:52:07 +00:00
if vb == []:
2024-09-09 08:57:42 +00:00
(ob, cb) = (0, None)
2024-09-09 08:52:07 +00:00
else:
2024-09-09 08:57:42 +00:00
(ob, cb) = vb.pop(0)
if (oa, ca) == (0, None) and (ob, cb) == (0, None):
2024-09-09 08:52:07 +00:00
return 0
2024-09-09 08:57:42 +00:00
if oa < ob:
2024-09-09 08:52:07 +00:00
return -1
2024-09-09 08:57:42 +00:00
elif oa > ob:
2024-09-09 08:52:07 +00:00
return 1
2024-09-09 08:57:42 +00:00
elif ca < cb:
2024-09-09 08:52:07 +00:00
return -1
2024-09-09 08:57:42 +00:00
elif ca > cb:
return 1
2024-09-09 08:52:07 +00:00
def vercmp(ta, tb):
(ea, va, ra) = ta
(eb, vb, rb) = tb
r = int(ea or 0) - int(eb or 0)
if (r == 0):
r = vercmp_part(va, vb)
if (r == 0):
r = vercmp_part(ra, rb)
return r
2024-09-09 08:57:42 +00:00
def vercmp_string(a, b):
ta = split_version(a)
tb = split_version(b)
return vercmp(ta, tb)
2024-09-09 08:52:07 +00:00
2024-09-09 08:57:42 +00:00
def vercmp_string_op(a, b, op):
2024-09-09 08:52:07 +00:00
"""
2024-09-09 08:57:42 +00:00
Compare two versions and check if the specified comparison operator matches the result of the comparison.
This function is fairly liberal about what operators it will accept since there are a variety of styles
depending on the context.
"""
res = vercmp_string(a, b)
if op in ('=', '=='):
return res == 0
elif op == '<=':
return res <= 0
elif op == '>=':
return res >= 0
elif op in ('>', '>>'):
return res > 0
elif op in ('<', '<<'):
return res < 0
elif op == '!=':
return res != 0
else:
raise VersionStringException('Unsupported comparison operator "%s"' % op)
2024-09-09 08:52:07 +00:00
def explode_deps(s):
"""
Take an RDEPENDS style string of format:
"DEPEND1 (optional version) DEPEND2 (optional version) ..."
and return a list of dependencies.
Version information is ignored.
"""
r = []
l = s.split()
flag = False
for i in l:
if i[0] == '(':
flag = True
#j = []
if not flag:
r.append(i)
#else:
# j.append(i)
if flag and i.endswith(')'):
flag = False
# Ignore version
#r[-1] += ' ' + ' '.join(j)
return r
2024-09-09 08:57:42 +00:00
def explode_dep_versions2(s):
2024-09-09 08:52:07 +00:00
"""
Take an RDEPENDS style string of format:
"DEPEND1 (optional version) DEPEND2 (optional version) ..."
and return a dictionary of dependencies and versions.
"""
r = {}
l = s.replace(",", "").split()
lastdep = None
2024-09-09 08:57:42 +00:00
lastcmp = ""
2024-09-09 08:52:07 +00:00
lastver = ""
2024-09-09 08:57:42 +00:00
incmp = False
2024-09-09 08:52:07 +00:00
inversion = False
for i in l:
if i[0] == '(':
2024-09-09 08:57:42 +00:00
incmp = True
i = i[1:].strip()
if not i:
continue
if incmp:
incmp = False
2024-09-09 08:52:07 +00:00
inversion = True
2024-09-09 08:57:42 +00:00
# This list is based on behavior and supported comparisons from deb, opkg and rpm.
#
# Even though =<, <<, ==, !=, =>, and >> may not be supported,
# we list each possibly valid item.
# The build system is responsible for validation of what it supports.
if i.startswith(('<=', '=<', '<<', '==', '!=', '>=', '=>', '>>')):
lastcmp = i[0:2]
i = i[2:]
elif i.startswith(('<', '>', '=')):
lastcmp = i[0:1]
i = i[1:]
else:
# This is an unsupported case!
raise VersionStringException('Invalid version specification in "(%s" - invalid or missing operator' % i)
lastcmp = (i or "")
i = ""
i.strip()
if not i:
continue
if inversion:
if i.endswith(')'):
i = i[:-1] or ""
inversion = False
if lastver and i:
lastver += " "
if i:
lastver += i
if lastdep not in r:
r[lastdep] = []
r[lastdep].append(lastcmp + " " + lastver)
continue
#if not inversion:
lastdep = i
lastver = ""
lastcmp = ""
if not (i in r and r[i]):
r[lastdep] = []
2024-09-09 08:52:07 +00:00
return r
2024-09-09 08:57:42 +00:00
def explode_dep_versions(s):
r = explode_dep_versions2(s)
for d in r:
if not r[d]:
r[d] = None
continue
if len(r[d]) > 1:
bb.warn("explode_dep_versions(): Item %s appeared in dependency string '%s' multiple times with different values. explode_dep_versions cannot cope with this." % (d, s))
r[d] = r[d][0]
return r
2024-09-09 08:52:07 +00:00
def join_deps(deps, commasep=True):
"""
Take the result from explode_dep_versions and generate a dependency string
"""
result = []
for dep in deps:
if deps[dep]:
2024-09-09 08:57:42 +00:00
if isinstance(deps[dep], list):
for v in deps[dep]:
result.append(dep + " (" + v + ")")
else:
result.append(dep + " (" + deps[dep] + ")")
2024-09-09 08:52:07 +00:00
else:
result.append(dep)
if commasep:
return ", ".join(result)
else:
return " ".join(result)
def _print_trace(body, line):
"""
Print the Environment of a Text Body
"""
2024-09-09 08:57:42 +00:00
error = []
2024-09-09 08:52:07 +00:00
# print the environment of the method
min_line = max(1, line-4)
max_line = min(line + 4, len(body))
2024-09-09 08:57:42 +00:00
for i in range(min_line, max_line + 1):
2024-09-09 08:52:07 +00:00
if line == i:
2024-09-09 08:57:42 +00:00
error.append(' *** %.4d:%s' % (i, body[i-1].rstrip()))
2024-09-09 08:52:07 +00:00
else:
2024-09-09 08:57:42 +00:00
error.append(' %.4d:%s' % (i, body[i-1].rstrip()))
return error
2024-09-09 08:52:07 +00:00
def better_compile(text, file, realfile, mode = "exec"):
"""
A better compile method. This method
2024-09-09 08:57:42 +00:00
will print the offending lines.
2024-09-09 08:52:07 +00:00
"""
try:
return compile(text, file, mode)
except Exception as e:
2024-09-09 08:57:42 +00:00
error = []
2024-09-09 08:52:07 +00:00
# split the text into lines again
body = text.split('\n')
2024-09-09 08:57:42 +00:00
error.append("Error in compiling python function in %s:\n" % realfile)
2024-09-09 08:52:07 +00:00
if e.lineno:
2024-09-09 08:57:42 +00:00
error.append("The code lines resulting in this error were:")
error.extend(_print_trace(body, e.lineno))
2024-09-09 08:52:07 +00:00
else:
2024-09-09 08:57:42 +00:00
error.append("The function causing this error was:")
2024-09-09 08:52:07 +00:00
for line in body:
2024-09-09 08:57:42 +00:00
error.append(line)
error.append("%s: %s" % (e.__class__.__name__, str(e)))
2024-09-09 08:52:07 +00:00
2024-09-09 08:57:42 +00:00
logger.error("\n".join(error))
2024-09-09 08:52:07 +00:00
2024-09-09 08:57:42 +00:00
e = bb.BBHandledException(e)
raise e
2024-09-09 08:52:07 +00:00
2024-09-09 08:57:42 +00:00
def _print_exception(t, value, tb, realfile, text, context):
error = []
try:
2024-09-09 08:52:07 +00:00
exception = traceback.format_exception_only(t, value)
2024-09-09 08:57:42 +00:00
error.append('Error executing a python function in %s:\n' % realfile)
2024-09-09 08:52:07 +00:00
# Strip 'us' from the stack (better_exec call)
tb = tb.tb_next
textarray = text.split('\n')
2024-09-09 08:57:42 +00:00
linefailed = tb.tb_lineno
2024-09-09 08:52:07 +00:00
2024-09-09 08:57:42 +00:00
tbextract = traceback.extract_tb(tb)
tbformat = traceback.format_list(tbextract)
error.append("The stack trace of python calls that resulted in this exception/failure was:")
error.append("File: '%s', lineno: %s, function: %s" % (tbextract[0][0], tbextract[0][1], tbextract[0][2]))
error.extend(_print_trace(textarray, linefailed))
2024-09-09 08:52:07 +00:00
# See if this is a function we constructed and has calls back into other functions in
# "text". If so, try and improve the context of the error by diving down the trace
level = 0
nexttb = tb.tb_next
2024-09-09 08:57:42 +00:00
while nexttb is not None and (level+1) < len(tbextract):
error.append("File: '%s', lineno: %s, function: %s" % (tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2]))
2024-09-09 08:52:07 +00:00
if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
2024-09-09 08:57:42 +00:00
# The code was possibly in the string we compiled ourselves
error.extend(_print_trace(textarray, tbextract[level+1][1]))
elif tbextract[level+1][0].startswith("/"):
# The code looks like it might be in a file, try and load it
try:
with open(tbextract[level+1][0], "r") as f:
text = f.readlines()
error.extend(_print_trace(text, tbextract[level+1][1]))
except:
error.append(tbformat[level+1])
elif "d" in context and tbextract[level+1][2]:
# Try and find the code in the datastore based on the functionname
d = context["d"]
functionname = tbextract[level+1][2]
text = d.getVar(functionname, True)
if text:
error.extend(_print_trace(text.split('\n'), tbextract[level+1][1]))
else:
error.append(tbformat[level+1])
2024-09-09 08:52:07 +00:00
else:
2024-09-09 08:57:42 +00:00
error.append(tbformat[level+1])
2024-09-09 08:52:07 +00:00
nexttb = tb.tb_next
level = level + 1
2024-09-09 08:57:42 +00:00
error.append("Exception: %s" % ''.join(exception))
finally:
logger.error("\n".join(error))
def better_exec(code, context, text = None, realfile = "<code>"):
"""
Similiar to better_compile, better_exec will
print the lines that are responsible for the
error.
"""
import bb.parse
if not text:
text = code
if not hasattr(code, "co_filename"):
code = better_compile(code, realfile, realfile)
try:
exec(code, get_context(), context)
except (bb.BBHandledException, bb.parse.SkipRecipe, bb.build.FuncFailed, bb.data_smart.ExpansionError):
# Error already shown so passthrough, no need for traceback
2024-09-09 08:52:07 +00:00
raise
2024-09-09 08:57:42 +00:00
except Exception as e:
(t, value, tb) = sys.exc_info()
try:
_print_exception(t, value, tb, realfile, text, context)
except Exception as e:
logger.error("Exception handler error: %s" % str(e))
e = bb.BBHandledException(e)
raise e
2024-09-09 08:52:07 +00:00
def simple_exec(code, context):
2024-09-09 08:57:42 +00:00
exec(code, get_context(), context)
2024-09-09 08:52:07 +00:00
def better_eval(source, locals):
2024-09-09 08:57:42 +00:00
return eval(source, get_context(), locals)
2024-09-09 08:52:07 +00:00
@contextmanager
def fileslocked(files):
"""Context manager for locking and unlocking file locks."""
locks = []
if files:
for lockfile in files:
locks.append(bb.utils.lockfile(lockfile))
yield
for lock in locks:
bb.utils.unlockfile(lock)
def lockfile(name, shared=False, retry=True):
"""
Use the file fn as a lock file, return when the lock has been acquired.
Returns a variable to pass to unlockfile().
"""
dirname = os.path.dirname(name)
mkdirhier(dirname)
if not os.access(dirname, os.W_OK):
logger.error("Unable to acquire lock '%s', directory is not writable",
name)
sys.exit(1)
op = fcntl.LOCK_EX
if shared:
op = fcntl.LOCK_SH
if not retry:
op = op | fcntl.LOCK_NB
while True:
# If we leave the lockfiles lying around there is no problem
# but we should clean up after ourselves. This gives potential
# for races though. To work around this, when we acquire the lock
# we check the file we locked was still the lock file on disk.
# by comparing inode numbers. If they don't match or the lockfile
# no longer exists, we start again.
# This implementation is unfair since the last person to request the
# lock is the most likely to win it.
try:
lf = open(name, 'a+')
fileno = lf.fileno()
fcntl.flock(fileno, op)
statinfo = os.fstat(fileno)
if os.path.exists(lf.name):
statinfo2 = os.stat(lf.name)
if statinfo.st_ino == statinfo2.st_ino:
return lf
lf.close()
except Exception:
2024-09-09 08:57:42 +00:00
try:
lf.close()
except Exception:
pass
2024-09-09 08:52:07 +00:00
pass
if not retry:
return None
def unlockfile(lf):
"""
Unlock a file locked using lockfile()
"""
try:
# If we had a shared lock, we need to promote to exclusive before
# removing the lockfile. Attempt this, ignore failures.
fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
os.unlink(lf.name)
except (IOError, OSError):
pass
fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
lf.close()
def md5_file(filename):
"""
Return the hex string representation of the MD5 checksum of filename.
"""
try:
import hashlib
m = hashlib.md5()
except ImportError:
import md5
m = md5.new()
2024-09-09 08:57:42 +00:00
with open(filename, "rb") as f:
for line in f:
m.update(line)
2024-09-09 08:52:07 +00:00
return m.hexdigest()
def sha256_file(filename):
"""
Return the hex string representation of the 256-bit SHA checksum of
filename. On Python 2.4 this will return None, so callers will need to
handle that by either skipping SHA checks, or running a standalone sha256sum
binary.
"""
try:
import hashlib
except ImportError:
return None
s = hashlib.sha256()
2024-09-09 08:57:42 +00:00
with open(filename, "rb") as f:
for line in f:
s.update(line)
2024-09-09 08:52:07 +00:00
return s.hexdigest()
def preserved_envvars_exported():
"""Variables which are taken from the environment and placed in and exported
from the metadata"""
return [
'BB_TASKHASH',
'HOME',
'LOGNAME',
'PATH',
'PWD',
'SHELL',
'TERM',
'USER',
]
def preserved_envvars():
"""Variables which are taken from the environment and placed in the metadata"""
v = [
'BBPATH',
'BB_PRESERVE_ENV',
'BB_ENV_WHITELIST',
'BB_ENV_EXTRAWHITE',
]
2024-09-09 08:57:42 +00:00
return v + preserved_envvars_exported()
2024-09-09 08:52:07 +00:00
def filter_environment(good_vars):
"""
Create a pristine environment for bitbake. This will remove variables that
are not known and may influence the build in a negative way.
"""
2024-09-09 08:57:42 +00:00
removed_vars = {}
2024-09-09 08:52:07 +00:00
for key in os.environ.keys():
if key in good_vars:
continue
2024-09-09 08:57:42 +00:00
removed_vars[key] = os.environ[key]
2024-09-09 08:52:07 +00:00
os.unsetenv(key)
del os.environ[key]
2024-09-09 08:57:42 +00:00
if removed_vars:
logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars.keys()))
2024-09-09 08:52:07 +00:00
return removed_vars
def approved_variables():
"""
Determine and return the list of whitelisted variables which are approved
2024-09-09 08:57:42 +00:00
to remain in the environment.
2024-09-09 08:52:07 +00:00
"""
2024-09-09 08:57:42 +00:00
if 'BB_PRESERVE_ENV' in os.environ:
return os.environ.keys()
2024-09-09 08:52:07 +00:00
approved = []
if 'BB_ENV_WHITELIST' in os.environ:
approved = os.environ['BB_ENV_WHITELIST'].split()
2024-09-09 08:57:42 +00:00
approved.extend(['BB_ENV_WHITELIST'])
2024-09-09 08:52:07 +00:00
else:
approved = preserved_envvars()
if 'BB_ENV_EXTRAWHITE' in os.environ:
approved.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
2024-09-09 08:57:42 +00:00
if 'BB_ENV_EXTRAWHITE' not in approved:
approved.extend(['BB_ENV_EXTRAWHITE'])
2024-09-09 08:52:07 +00:00
return approved
def clean_environment():
"""
Clean up any spurious environment variables. This will remove any
variables the user hasn't chosen to preserve.
"""
if 'BB_PRESERVE_ENV' not in os.environ:
good_vars = approved_variables()
2024-09-09 08:57:42 +00:00
return filter_environment(good_vars)
return {}
2024-09-09 08:52:07 +00:00
def empty_environment():
"""
Remove all variables from the environment.
"""
for s in os.environ.keys():
os.unsetenv(s)
del os.environ[s]
def build_environment(d):
"""
Build an environment from all exported variables.
"""
import bb.data
for var in bb.data.keys(d):
export = d.getVarFlag(var, "export")
if export:
os.environ[var] = d.getVar(var, True) or ""
def remove(path, recurse=False):
"""Equivalent to rm -f or rm -rf"""
if not path:
return
2024-09-09 08:57:42 +00:00
if recurse:
# shutil.rmtree(name) would be ideal but its too slow
subprocess.call(['rm', '-rf'] + glob.glob(path))
return
2024-09-09 08:52:07 +00:00
for name in glob.glob(path):
try:
os.unlink(name)
except OSError as exc:
2024-09-09 08:57:42 +00:00
if exc.errno != errno.ENOENT:
2024-09-09 08:52:07 +00:00
raise
def prunedir(topdir):
# Delete everything reachable from the directory named in 'topdir'.
# CAUTION: This is dangerous!
for root, dirs, files in os.walk(topdir, topdown = False):
for name in files:
os.remove(os.path.join(root, name))
for name in dirs:
if os.path.islink(os.path.join(root, name)):
os.remove(os.path.join(root, name))
else:
os.rmdir(os.path.join(root, name))
os.rmdir(topdir)
#
# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
# but thats possibly insane and suffixes is probably going to be small
#
def prune_suffix(var, suffixes, d):
# See if var ends with any of the suffixes listed and
# remove it if found
for suffix in suffixes:
if var.endswith(suffix):
return var.replace(suffix, "")
return var
def mkdirhier(directory):
"""Create a directory like 'mkdir -p', but does not complain if
directory already exists like os.makedirs
"""
try:
os.makedirs(directory)
except OSError as e:
if e.errno != errno.EEXIST:
raise e
def movefile(src, dest, newmtime = None, sstat = None):
"""Moves a file from src to dest, preserving all permissions and
attributes; mtime will be preserved even when moving across
filesystems. Returns true on success and false on failure. Move is
atomic.
"""
#print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
try:
if not sstat:
sstat = os.lstat(src)
except Exception as e:
print("movefile: Stating source file failed...", e)
return None
destexists = 1
try:
dstat = os.lstat(dest)
except:
dstat = os.lstat(os.path.dirname(dest))
destexists = 0
if destexists:
if stat.S_ISLNK(dstat[stat.ST_MODE]):
try:
os.unlink(dest)
destexists = 0
except Exception as e:
pass
if stat.S_ISLNK(sstat[stat.ST_MODE]):
try:
target = os.readlink(src)
if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
os.unlink(dest)
os.symlink(target, dest)
#os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
os.unlink(src)
return os.lstat(dest)
except Exception as e:
print("movefile: failed to properly create symlink:", dest, "->", target, e)
return None
renamefailed = 1
if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
try:
os.rename(src, dest)
renamefailed = 0
except Exception as e:
if e[0] != errno.EXDEV:
# Some random error.
print("movefile: Failed to move", src, "to", dest, e)
return None
# Invalid cross-device-link 'bind' mounted or actually Cross-Device
if renamefailed:
didcopy = 0
if stat.S_ISREG(sstat[stat.ST_MODE]):
try: # For safety copy then move it over.
shutil.copyfile(src, dest + "#new")
os.rename(dest + "#new", dest)
didcopy = 1
except Exception as e:
print('movefile: copy', src, '->', dest, 'failed.', e)
return None
else:
#we don't yet handle special, so we need to fall back to /bin/mv
a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
if a[0] != 0:
print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
return None # failure
try:
if didcopy:
os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
os.unlink(src)
except Exception as e:
print("movefile: Failed to chown/chmod/unlink", dest, e)
return None
if newmtime:
os.utime(dest, (newmtime, newmtime))
else:
os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
newmtime = sstat[stat.ST_MTIME]
return newmtime
def copyfile(src, dest, newmtime = None, sstat = None):
"""
Copies a file from src to dest, preserving all permissions and
attributes; mtime will be preserved even when moving across
filesystems. Returns true on success and false on failure.
"""
#print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
try:
if not sstat:
sstat = os.lstat(src)
except Exception as e:
2024-09-09 08:57:42 +00:00
logger.warn("copyfile: stat of %s failed (%s)" % (src, e))
2024-09-09 08:52:07 +00:00
return False
destexists = 1
try:
dstat = os.lstat(dest)
except:
dstat = os.lstat(os.path.dirname(dest))
destexists = 0
if destexists:
if stat.S_ISLNK(dstat[stat.ST_MODE]):
try:
os.unlink(dest)
destexists = 0
except Exception as e:
pass
if stat.S_ISLNK(sstat[stat.ST_MODE]):
try:
target = os.readlink(src)
if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
os.unlink(dest)
os.symlink(target, dest)
#os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
return os.lstat(dest)
except Exception as e:
2024-09-09 08:57:42 +00:00
logger.warn("copyfile: failed to create symlink %s to %s (%s)" % (dest, target, e))
2024-09-09 08:52:07 +00:00
return False
if stat.S_ISREG(sstat[stat.ST_MODE]):
try:
srcchown = False
if not os.access(src, os.R_OK):
# Make sure we can read it
srcchown = True
os.chmod(src, sstat[stat.ST_MODE] | stat.S_IRUSR)
# For safety copy then move it over.
shutil.copyfile(src, dest + "#new")
os.rename(dest + "#new", dest)
except Exception as e:
2024-09-09 08:57:42 +00:00
logger.warn("copyfile: copy %s to %s failed (%s)" % (src, dest, e))
2024-09-09 08:52:07 +00:00
return False
finally:
if srcchown:
os.chmod(src, sstat[stat.ST_MODE])
os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
else:
#we don't yet handle special, so we need to fall back to /bin/mv
a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
if a[0] != 0:
2024-09-09 08:57:42 +00:00
logger.warn("copyfile: failed to copy special file %s to %s (%s)" % (src, dest, a))
2024-09-09 08:52:07 +00:00
return False # failure
try:
os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
except Exception as e:
2024-09-09 08:57:42 +00:00
logger.warn("copyfile: failed to chown/chmod %s (%s)" % (dest, e))
2024-09-09 08:52:07 +00:00
return False
if newmtime:
os.utime(dest, (newmtime, newmtime))
else:
os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
newmtime = sstat[stat.ST_MTIME]
return newmtime
2024-09-09 08:57:42 +00:00
def which(path, item, direction = 0, history = False):
2024-09-09 08:52:07 +00:00
"""
Locate a file in a PATH
"""
2024-09-09 08:57:42 +00:00
hist = []
2024-09-09 08:52:07 +00:00
paths = (path or "").split(':')
if direction != 0:
paths.reverse()
for p in paths:
next = os.path.join(p, item)
2024-09-09 08:57:42 +00:00
hist.append(next)
2024-09-09 08:52:07 +00:00
if os.path.exists(next):
2024-09-09 08:57:42 +00:00
if not os.path.isabs(next):
next = os.path.abspath(next)
if history:
return next, hist
2024-09-09 08:52:07 +00:00
return next
2024-09-09 08:57:42 +00:00
if history:
return "", hist
2024-09-09 08:52:07 +00:00
return ""
def to_boolean(string, default=None):
if not string:
return default
normalized = string.lower()
if normalized in ("y", "yes", "1", "true"):
return True
elif normalized in ("n", "no", "0", "false"):
return False
else:
raise ValueError("Invalid value for to_boolean: %s" % string)
def contains(variable, checkvalues, truevalue, falsevalue, d):
val = d.getVar(variable, True)
if not val:
return falsevalue
val = set(val.split())
if isinstance(checkvalues, basestring):
checkvalues = set(checkvalues.split())
else:
checkvalues = set(checkvalues)
if checkvalues.issubset(val):
return truevalue
return falsevalue
2024-09-09 08:57:42 +00:00
def contains_any(variable, checkvalues, truevalue, falsevalue, d):
val = d.getVar(variable, True)
if not val:
return falsevalue
val = set(val.split())
if isinstance(checkvalues, basestring):
checkvalues = set(checkvalues.split())
else:
checkvalues = set(checkvalues)
if checkvalues & val:
return truevalue
return falsevalue
def cpu_count():
return multiprocessing.cpu_count()
def nonblockingfd(fd):
fcntl.fcntl(fd, fcntl.F_SETFL, fcntl.fcntl(fd, fcntl.F_GETFL) | os.O_NONBLOCK)
def process_profilelog(fn):
pout = open(fn + '.processed', 'w')
import pstats
p = pstats.Stats(fn, stream=pout)
p.sort_stats('time')
p.print_stats()
p.print_callers()
p.sort_stats('cumulative')
p.print_stats()
pout.flush()
pout.close()
#
# Was present to work around multiprocessing pool bugs in python < 2.7.3
#
def multiprocessingpool(*args, **kwargs):
import multiprocessing.pool
#import multiprocessing.util
#multiprocessing.util.log_to_stderr(10)
# Deal with a multiprocessing bug where signals to the processes would be delayed until the work
# completes. Putting in a timeout means the signals (like SIGINT/SIGTERM) get processed.
def wrapper(func):
def wrap(self, timeout=None):
return func(self, timeout=timeout if timeout is not None else 1e100)
return wrap
multiprocessing.pool.IMapIterator.next = wrapper(multiprocessing.pool.IMapIterator.next)
return multiprocessing.Pool(*args, **kwargs)
def exec_flat_python_func(func, *args, **kwargs):
"""Execute a flat python function (defined with def funcname(args):...)"""
# Prepare a small piece of python code which calls the requested function
# To do this we need to prepare two things - a set of variables we can use to pass
# the values of arguments into the calling function, and the list of arguments for
# the function being called
context = {}
funcargs = []
# Handle unnamed arguments
aidx = 1
for arg in args:
argname = 'arg_%s' % aidx
context[argname] = arg
funcargs.append(argname)
aidx += 1
# Handle keyword arguments
context.update(kwargs)
funcargs.extend(['%s=%s' % (arg, arg) for arg in kwargs.iterkeys()])
code = 'retval = %s(%s)' % (func, ', '.join(funcargs))
comp = bb.utils.better_compile(code, '<string>', '<string>')
bb.utils.better_exec(comp, context, code, '<string>')
return context['retval']
def edit_metadata_file(meta_file, variables, func):
"""Edit a recipe or config file and modify one or more specified
variable values set in the file using a specified callback function.
The file is only written to if the value(s) actually change.
"""
var_res = {}
for var in variables:
var_res[var] = re.compile(r'^%s[ \t]*[?+]*=' % var)
updated = False
varset_start = ''
newlines = []
in_var = None
full_value = ''
def handle_var_end():
(newvalue, indent, minbreak) = func(in_var, full_value)
if newvalue != full_value:
if isinstance(newvalue, list):
intentspc = ' ' * indent
if minbreak:
# First item on first line
if len(newvalue) == 1:
newlines.append('%s "%s"\n' % (varset_start, newvalue[0]))
else:
newlines.append('%s "%s\\\n' % (varset_start, newvalue[0]))
for item in newvalue[1:]:
newlines.append('%s%s \\\n' % (intentspc, item))
newlines.append('%s"\n' % indentspc)
else:
# No item on first line
newlines.append('%s " \\\n' % varset_start)
for item in newvalue:
newlines.append('%s%s \\\n' % (intentspc, item))
newlines.append('%s"\n' % intentspc)
else:
newlines.append('%s "%s"\n' % (varset_start, newvalue))
return True
return False
with open(meta_file, 'r') as f:
for line in f:
if in_var:
value = line.rstrip()
full_value += value[:-1]
if value.endswith('"') or value.endswith("'"):
if handle_var_end():
updated = True
in_var = None
else:
matched = False
for (varname, var_re) in var_res.iteritems():
if var_re.match(line):
splitvalue = line.split('"', 1)
varset_start = splitvalue[0].rstrip()
value = splitvalue[1].rstrip()
if value.endswith('\\'):
value = value[:-1]
full_value = value
if value.endswith('"') or value.endswith("'"):
if handle_var_end():
updated = True
else:
in_var = varname
matched = True
break
if not matched:
newlines.append(line)
if updated:
with open(meta_file, 'w') as f:
f.writelines(newlines)
def edit_bblayers_conf(bblayers_conf, add, remove):
"""Edit bblayers.conf, adding and/or removing layers"""
import fnmatch
def remove_trailing_sep(pth):
if pth and pth[-1] == os.sep:
pth = pth[:-1]
return pth
def layerlist_param(value):
if not value:
return []
elif isinstance(value, list):
return [remove_trailing_sep(x) for x in value]
else:
return [remove_trailing_sep(value)]
notadded = []
notremoved = []
addlayers = layerlist_param(add)
removelayers = layerlist_param(remove)
# Need to use a list here because we can't set non-local variables from a callback in python 2.x
bblayercalls = []
def handle_bblayers(varname, origvalue):
bblayercalls.append(varname)
updated = False
bblayers = [remove_trailing_sep(x) for x in origvalue.split()]
if removelayers:
for removelayer in removelayers:
matched = False
for layer in bblayers:
if fnmatch.fnmatch(layer, removelayer):
updated = True
matched = True
bblayers.remove(layer)
break
if not matched:
notremoved.append(removelayer)
if addlayers:
for addlayer in addlayers:
if addlayer not in bblayers:
updated = True
bblayers.append(addlayer)
else:
notadded.append(addlayer)
if updated:
return (bblayers, 2, False)
else:
return (origvalue, 2, False)
edit_metadata_file(bblayers_conf, ['BBLAYERS'], handle_bblayers)
if not bblayercalls:
raise Exception('Unable to find BBLAYERS in %s' % bblayers_conf)
return (notadded, notremoved)