Bump buildroot to 2020.11-rc1 (#985)

* Update buildroot-patches for 2020.11-rc1 buildroot

* Update buildroot to 2020.11-rc1

Signed-off-by: Stefan Agner <stefan@agner.ch>

* Don't rely on sfdisk --list-free output

The --list-free (-F) argument does not allow machine readable mode. And
it seems that the output format changes over time (different spacing,
using size postfixes instead of raw blocks).

Use sfdisk json output and calculate free partition space ourselfs. This
works for 2.35 and 2.36 and is more robust since we rely on output which
is meant for scripts to parse.

* Migrate defconfigs for Buildroot 2020.11-rc1

In particular, rename BR2_TARGET_UBOOT_BOOT_SCRIPT(_SOURCE) to
BR2_PACKAGE_HOST_UBOOT_TOOLS_BOOT_SCRIPT(_SOURCE).

* Rebase/remove systemd patches for systemd 246

* Drop apparmor/libapparmor from buildroot-external

* hassos-persists: use /run as directory for lockfiles

The U-Boot tools use /var/lock by default which is not created any more
by systemd by default (it is under tmpfiles legacy.conf, which we no
longer install).

* Disable systemd-update-done.service

The service is not suited for pure read-only systems. In particular the
service needs to be able to write a file in /etc and /var. Remove the
service. Note: This is a static service and cannot be removed using
systemd-preset.

* Disable apparmor.service for now

The service loads all default profiles. Some might actually cause
problems. E.g. the profile for ping seems not to match our setup for
/etc/resolv.conf:
[85503.634653] audit: type=1400 audit(1605286002.684:236): apparmor="DENIED" operation="open" profile="ping" name="/run/resolv.conf" pid=27585 comm="ping" requested_mask="r" denied_mask="r" fsuid=0 ouid=0
This commit is contained in:
Stefan Agner
2020-11-13 18:25:44 +01:00
committed by GitHub
parent 25a0dd3082
commit a0871be6c0
4024 changed files with 68095 additions and 47900 deletions

View File

@@ -113,8 +113,8 @@ function apply_patch {
echo " to be applied : ${path}/${patch}"
exit 1
fi
if grep -q "^rename from" ${path}/${patch} && \
grep -q "^rename to" ${path}/${patch} ; then
if ${uncomp} "${path}/$patch" | grep -q "^rename from" && \
${uncomp} "${path}/$patch" | grep -q "^rename to" ; then
echo "Error: patch contains some renames, not supported by old patch versions"
exit 1
fi

View File

@@ -0,0 +1,80 @@
#!/usr/bin/env python3
# This script expect to run from the Buildroot top directory.
import os
import pexpect
import sys
import time
def main():
if not (len(sys.argv) == 2):
print("Error: incorrect number of arguments")
print("""Usage: boot-qemu-image.py <qemu_arch_defconfig>""")
sys.exit(1)
# Ignore non Qemu defconfig
if not sys.argv[1].startswith('qemu_'):
sys.exit(0)
qemu_start = os.path.join(os.getcwd(), 'output/images/start-qemu.sh')
child = pexpect.spawn(qemu_start, ['serial-only'],
timeout=5, encoding='utf-8',
env={"QEMU_AUDIO_DRV": "none"})
# We want only stdout into the log to avoid double echo
child.logfile = sys.stdout
# Let the spawn actually try to fork+exec to the wrapper, and then
# let the wrapper exec the qemu process.
time.sleep(1)
try:
child.expect(["buildroot login:", pexpect.TIMEOUT], timeout=60)
except pexpect.EOF as e:
# Some emulations require a fork of qemu-system, which may be
# missing on the system, and is not provided by Buildroot.
# In this case, spawn above will succeed at starting the wrapper
# start-qemu.sh, but that one will fail (exit with 127) in such
# a situation.
exit = [int(l.split(' ')[1])
for l in e.value.splitlines()
if l.startswith('exitstatus: ')]
if len(exit) and exit[0] == 127:
print('qemu-start.sh could not find the qemu binary')
sys.exit(0)
print("Connection problem, exiting.")
sys.exit(1)
except pexpect.TIMEOUT:
print("System did not boot in time, exiting.")
sys.exit(1)
child.sendline("root\r")
try:
child.expect(["# ", pexpect.TIMEOUT], timeout=60)
except pexpect.EOF:
print("Cannot connect to shell")
sys.exit(1)
except pexpect.TIMEOUT:
print("Timeout while waiting for shell")
sys.exit(1)
child.sendline("poweroff\r")
try:
child.expect(["System halted", pexpect.TIMEOUT], timeout=60)
child.expect(pexpect.EOF)
except pexpect.EOF:
pass
except pexpect.TIMEOUT:
# Qemu may not exit properly after "System halted", ignore.
print("Cannot halt machine")
sys.exit(0)
if __name__ == "__main__":
main()

View File

@@ -161,6 +161,8 @@ do_kconfig() {
toolchains
jpeg
openssl
skeleton
init
)
for br2 in "${items[@]}"; do
@@ -224,6 +226,22 @@ do_kconfig() {
else
printf '# No openssl from: %s\n\n' "${br2_desc}"
fi >>"${outputdir}/.br2-external.in.openssl"
if [ -f "${br2_ext}/provides/skeleton.in" ]; then
printf 'comment "skeleton from: %s"\n' "${br2_desc}"
printf 'source "%s/provides/skeleton.in"\n' "${br2_ext}"
printf '\n'
else
printf '# No skeleton from: %s\n\n' "${br2_desc}"
fi >>"${outputdir}/.br2-external.in.skeleton"
if [ -f "${br2_ext}/provides/init.in" ]; then
printf 'comment "init from: %s"\n' "${br2_desc}"
printf 'source "%s/provides/init.in"\n' "${br2_ext}"
printf '\n'
else
printf '# No init from: %s\n\n' "${br2_desc}"
fi >>"${outputdir}/.br2-external.in.init"
done
printf 'endmenu\n' >>"${outputdir}/.br2-external.in.menus"

View File

@@ -25,6 +25,9 @@ declare -a IGNORES=(
# it for a different architecture (e.g. i386 grub on x86_64).
"/lib/grub"
"/usr/lib/grub"
# Guile modules are ELF files, with a "None" machine
"/usr/lib/guile"
)
while getopts p:l:r:a:i: OPT ; do

View File

@@ -0,0 +1,42 @@
#!/usr/bin/env python3
# This scripts check that all lines present in the defconfig are
# still present in the .config
import sys
def main():
if not (len(sys.argv) == 3):
print("Error: incorrect number of arguments")
print("""Usage: check-dotconfig <configfile> <defconfig>""")
sys.exit(1)
configfile = sys.argv[1]
defconfig = sys.argv[2]
# strip() to get rid of trailing \n
with open(configfile) as configf:
configlines = [l.strip() for l in configf.readlines()]
defconfiglines = []
with open(defconfig) as defconfigf:
# strip() to get rid of trailing \n
for line in (line.strip() for line in defconfigf.readlines()):
if line.startswith("BR2_"):
defconfiglines.append(line)
elif line.startswith('# BR2_') and line.endswith(' is not set'):
defconfiglines.append(line)
# Check that all the defconfig lines are still present
missing = [line for line in defconfiglines if line not in configlines]
if missing:
print("WARN: defconfig {} can't be used:".format(defconfig))
for m in missing:
print(" Missing: {}".format(m))
sys.exit(1)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,196 @@
#!/usr/bin/env python
# Copyright (C) 2009 by Thomas Petazzoni <thomas.petazzoni@free-electrons.com>
# Copyright (C) 2020 by Gregory CLEMENT <gregory.clement@bootlin.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import argparse
import datetime
import os
import json
import sys
import cve as cvecheck
class Package:
def __init__(self, name, version, ignored_cves):
self.name = name
self.version = version
self.cves = list()
self.ignored_cves = ignored_cves
def check_package_cves(nvd_path, packages):
if not os.path.isdir(nvd_path):
os.makedirs(nvd_path)
for cve in cvecheck.CVE.read_nvd_dir(nvd_path):
for pkg_name in cve.pkg_names:
pkg = packages.get(pkg_name, '')
if pkg and cve.affects(pkg.name, pkg.version, pkg.ignored_cves) == cve.CVE_AFFECTS:
pkg.cves.append(cve.identifier)
html_header = """
<head>
<script src=\"https://www.kryogenix.org/code/browser/sorttable/sorttable.js\"></script>
<style type=\"text/css\">
table {
width: 100%;
}
td {
border: 1px solid black;
}
td.centered {
text-align: center;
}
td.wrong {
background: #ff9a69;
}
td.correct {
background: #d2ffc4;
}
</style>
<title>CVE status for Buildroot configuration</title>
</head>
<p id=\"sortable_hint\"></p>
"""
html_footer = """
</body>
<script>
if (typeof sorttable === \"object\") {
document.getElementById(\"sortable_hint\").innerHTML =
\"hint: the table can be sorted by clicking the column headers\"
}
</script>
</html>
"""
def dump_html_pkg(f, pkg):
f.write(" <tr>\n")
f.write(" <td>%s</td>\n" % pkg.name)
# Current version
if len(pkg.version) > 20:
version = pkg.version[:20] + "..."
else:
version = pkg.version
f.write(" <td class=\"centered\">%s</td>\n" % version)
# CVEs
td_class = ["centered"]
if len(pkg.cves) == 0:
td_class.append("correct")
else:
td_class.append("wrong")
f.write(" <td class=\"%s\">\n" % " ".join(td_class))
for cve in pkg.cves:
f.write(" <a href=\"https://security-tracker.debian.org/tracker/%s\">%s<br/>\n" % (cve, cve))
f.write(" </td>\n")
f.write(" </tr>\n")
def dump_html_all_pkgs(f, packages):
f.write("""
<table class=\"sortable\">
<tr>
<td>Package</td>
<td class=\"centered\">Version</td>
<td class=\"centered\">CVEs</td>
</tr>
""")
for pkg in packages:
dump_html_pkg(f, pkg)
f.write("</table>")
def dump_html_gen_info(f, date):
f.write("<p><i>Generated on %s</i></p>\n" % (str(date)))
def dump_html(packages, date, output):
with open(output, 'w') as f:
f.write(html_header)
dump_html_all_pkgs(f, packages)
dump_html_gen_info(f, date)
f.write(html_footer)
def dump_json(packages, date, output):
# Format packages as a dictionnary instead of a list
pkgs = {
pkg.name: {
"version": pkg.version,
"cves": pkg.cves,
} for pkg in packages
}
# The actual structure to dump, add date to it
final = {'packages': pkgs,
'date': str(date)}
with open(output, 'w') as f:
json.dump(final, f, indent=2, separators=(',', ': '))
f.write('\n')
def resolvepath(path):
return os.path.abspath(os.path.expanduser(path))
def parse_args():
parser = argparse.ArgumentParser()
output = parser.add_argument_group('output', 'Output file(s)')
output.add_argument('--html', dest='html', type=resolvepath,
help='HTML output file')
output.add_argument('--json', dest='json', type=resolvepath,
help='JSON output file')
parser.add_argument('--nvd-path', dest='nvd_path',
help='Path to the local NVD database', type=resolvepath,
required=True)
args = parser.parse_args()
if not args.html and not args.json:
parser.error('at least one of --html or --json (or both) is required')
return args
def __main__():
packages = list()
content = json.load(sys.stdin)
for item in content:
pkg = content[item]
p = Package(item, pkg.get('version', ''), pkg.get('ignore_cves', ''))
packages.append(p)
args = parse_args()
date = datetime.datetime.utcnow()
print("Checking packages CVEs")
check_package_cves(args.nvd_path, {p.name: p for p in packages})
if args.html:
print("Write HTML")
dump_html(packages, date, args.html)
if args.json:
print("Write JSON")
dump_json(packages, date, args.json)
__main__()

244
buildroot/support/scripts/cve.py Executable file
View File

@@ -0,0 +1,244 @@
#!/usr/bin/env python
# Copyright (C) 2009 by Thomas Petazzoni <thomas.petazzoni@free-electrons.com>
# Copyright (C) 2020 by Gregory CLEMENT <gregory.clement@bootlin.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import datetime
import os
import requests # URL checking
import distutils.version
import time
import gzip
import sys
import operator
try:
import ijson
except ImportError:
sys.stderr.write("You need ijson to parse NVD for CVE check\n")
exit(1)
sys.path.append('utils/')
NVD_START_YEAR = 2002
NVD_JSON_VERSION = "1.1"
NVD_BASE_URL = "https://nvd.nist.gov/feeds/json/cve/" + NVD_JSON_VERSION
ops = {
'>=': operator.ge,
'>': operator.gt,
'<=': operator.le,
'<': operator.lt,
'=': operator.eq
}
class CVE:
"""An accessor class for CVE Items in NVD files"""
CVE_AFFECTS = 1
CVE_DOESNT_AFFECT = 2
CVE_UNKNOWN = 3
def __init__(self, nvd_cve):
"""Initialize a CVE from its NVD JSON representation"""
self.nvd_cve = nvd_cve
@staticmethod
def download_nvd_year(nvd_path, year):
metaf = "nvdcve-%s-%s.meta" % (NVD_JSON_VERSION, year)
path_metaf = os.path.join(nvd_path, metaf)
jsonf_gz = "nvdcve-%s-%s.json.gz" % (NVD_JSON_VERSION, year)
path_jsonf_gz = os.path.join(nvd_path, jsonf_gz)
# If the database file is less than a day old, we assume the NVD data
# locally available is recent enough.
if os.path.exists(path_jsonf_gz) and os.stat(path_jsonf_gz).st_mtime >= time.time() - 86400:
return path_jsonf_gz
# If not, we download the meta file
url = "%s/%s" % (NVD_BASE_URL, metaf)
print("Getting %s" % url)
page_meta = requests.get(url)
page_meta.raise_for_status()
# If the meta file already existed, we compare the existing
# one with the data newly downloaded. If they are different,
# we need to re-download the database.
# If the database does not exist locally, we need to redownload it in
# any case.
if os.path.exists(path_metaf) and os.path.exists(path_jsonf_gz):
meta_known = open(path_metaf, "r").read()
if page_meta.text == meta_known:
return path_jsonf_gz
# Grab the compressed JSON NVD, and write files to disk
url = "%s/%s" % (NVD_BASE_URL, jsonf_gz)
print("Getting %s" % url)
page_json = requests.get(url)
page_json.raise_for_status()
open(path_jsonf_gz, "wb").write(page_json.content)
open(path_metaf, "w").write(page_meta.text)
return path_jsonf_gz
@classmethod
def read_nvd_dir(cls, nvd_dir):
"""
Iterate over all the CVEs contained in NIST Vulnerability Database
feeds since NVD_START_YEAR. If the files are missing or outdated in
nvd_dir, a fresh copy will be downloaded, and kept in .json.gz
"""
for year in range(NVD_START_YEAR, datetime.datetime.now().year + 1):
filename = CVE.download_nvd_year(nvd_dir, year)
try:
content = ijson.items(gzip.GzipFile(filename), 'CVE_Items.item')
except: # noqa: E722
print("ERROR: cannot read %s. Please remove the file then rerun this script" % filename)
raise
for cve in content:
yield cls(cve)
def each_product(self):
"""Iterate over each product section of this cve"""
for vendor in self.nvd_cve['cve']['affects']['vendor']['vendor_data']:
for product in vendor['product']['product_data']:
yield product
def parse_node(self, node):
"""
Parse the node inside the configurations section to extract the
cpe information usefull to know if a product is affected by
the CVE. Actually only the product name and the version
descriptor are needed, but we also provide the vendor name.
"""
# The node containing the cpe entries matching the CVE can also
# contain sub-nodes, so we need to manage it.
for child in node.get('children', ()):
for parsed_node in self.parse_node(child):
yield parsed_node
for cpe in node.get('cpe_match', ()):
if not cpe['vulnerable']:
return
vendor, product, version = cpe['cpe23Uri'].split(':')[3:6]
op_start = ''
op_end = ''
v_start = ''
v_end = ''
if version != '*' and version != '-':
# Version is defined, this is a '=' match
op_start = '='
v_start = version
elif version == '-':
# no version information is available
op_start = '='
v_start = version
else:
# Parse start version, end version and operators
if 'versionStartIncluding' in cpe:
op_start = '>='
v_start = cpe['versionStartIncluding']
if 'versionStartExcluding' in cpe:
op_start = '>'
v_start = cpe['versionStartExcluding']
if 'versionEndIncluding' in cpe:
op_end = '<='
v_end = cpe['versionEndIncluding']
if 'versionEndExcluding' in cpe:
op_end = '<'
v_end = cpe['versionEndExcluding']
yield {
'vendor': vendor,
'product': product,
'v_start': v_start,
'op_start': op_start,
'v_end': v_end,
'op_end': op_end
}
def each_cpe(self):
for node in self.nvd_cve['configurations']['nodes']:
for cpe in self.parse_node(node):
yield cpe
@property
def identifier(self):
"""The CVE unique identifier"""
return self.nvd_cve['cve']['CVE_data_meta']['ID']
@property
def pkg_names(self):
"""The set of package names referred by this CVE definition"""
return set(p['product'] for p in self.each_cpe())
def affects(self, name, version, cve_ignore_list):
"""
True if the Buildroot Package object passed as argument is affected
by this CVE.
"""
if self.identifier in cve_ignore_list:
return self.CVE_DOESNT_AFFECT
pkg_version = distutils.version.LooseVersion(version)
if not hasattr(pkg_version, "version"):
print("Cannot parse package '%s' version '%s'" % (name, version))
pkg_version = None
for cpe in self.each_cpe():
if cpe['product'] != name:
continue
if cpe['v_start'] == '-':
return self.CVE_AFFECTS
if not cpe['v_start'] and not cpe['v_end']:
print("No CVE affected version")
continue
if not pkg_version:
continue
if cpe['v_start']:
try:
cve_affected_version = distutils.version.LooseVersion(cpe['v_start'])
inrange = ops.get(cpe['op_start'])(pkg_version, cve_affected_version)
except TypeError:
return self.CVE_UNKNOWN
# current package version is before v_start, so we're
# not affected by the CVE
if not inrange:
continue
if cpe['v_end']:
try:
cve_affected_version = distutils.version.LooseVersion(cpe['v_end'])
inrange = ops.get(cpe['op_end'])(pkg_version, cve_affected_version)
except TypeError:
return self.CVE_UNKNOWN
# current package version is after v_end, so we're
# not affected by the CVE
if not inrange:
continue
# We're in the version range affected by this CVE
return self.CVE_AFFECTS
return self.CVE_DOESNT_AFFECT

View File

@@ -0,0 +1,498 @@
#!/usr/bin/env python3
import os.path
import re
import requests
import textwrap
BASE_URL = "https://toolchains.bootlin.com/downloads/releases/toolchains"
AUTOGENERATED_COMMENT = """# This file was auto-generated by support/scripts/gen-bootlin-toolchains
# Do not edit
"""
# In the below dict:
# - 'conditions' indicate the cumulative conditions under which the
# toolchain will be made available. In several situations, a given
# toolchain is usable on several architectures variants (for
# example, an ARMv6 toolchain can be used on ARMv7)
# - 'test_options' indicate one specific configuration where the
# toolchain can be used. It is used to create the runtime test
# cases. If 'test_options' does not exist, the code assumes it can
# be made equal to 'conditions'
# - 'prefix' is the prefix of the cross-compilation toolchain tools
arches = {
'aarch64': {
'conditions': ['BR2_aarch64'],
'prefix': 'aarch64',
},
'aarch64be': {
'conditions': ['BR2_aarch64_be'],
'prefix': 'aarch64_be',
},
'arcle-750d': {
'conditions': ['BR2_arcle', 'BR2_arc750d'],
'prefix': 'arc',
},
'arcle-hs38': {
'conditions': ['BR2_arcle', 'BR2_archs38'],
'prefix': 'arc',
},
'armv5-eabi': {
'conditions': ['BR2_ARM_CPU_ARMV5', 'BR2_ARM_EABI'],
'test_options': ['BR2_arm', 'BR2_arm926t', 'BR2_ARM_EABI'],
'prefix': 'arm',
},
'armv6-eabihf': {
'conditions': ['BR2_ARM_CPU_ARMV6', 'BR2_ARM_EABIHF'],
'test_options': ['BR2_arm', 'BR2_arm1176jzf_s', 'BR2_ARM_EABIHF'],
'prefix': 'arm',
},
'armv7-eabihf': {
'conditions': ['BR2_ARM_CPU_ARMV7A', 'BR2_ARM_EABIHF'],
'test_options': ['BR2_arm', 'BR2_cortex_a8', 'BR2_ARM_EABIHF'],
'prefix': 'arm',
},
'armv7m': {
'conditions': ['BR2_ARM_CPU_ARMV7M'],
'test_options': ['BR2_arm', 'BR2_cortex_m4'],
'prefix': 'arm',
},
'm68k-68xxx': {
'conditions': ['BR2_m68k_m68k'],
'test_options': ['BR2_m68k', 'BR2_m68k_68040'],
'prefix': 'm68k',
},
'm68k-coldfire': {
'conditions': ['BR2_m68k_cf'],
'test_options': ['BR2_m68k', 'BR2_m68k_cf5208'],
'prefix': 'm68k',
},
'microblazebe': {
'conditions': ['BR2_microblazebe'],
'prefix': 'microblaze',
},
'microblazeel': {
'conditions': ['BR2_microblazeel'],
'prefix': 'microblazeel',
},
'mips32': {
# Not sure it could be used by other mips32 variants?
'conditions': ['BR2_mips', 'BR2_mips_32', '!BR2_MIPS_SOFT_FLOAT'],
'prefix': 'mips',
},
'mips32el': {
# Not sure it could be used by other mips32el variants?
'conditions': ['BR2_mipsel', 'BR2_mips_32', '!BR2_MIPS_SOFT_FLOAT'],
'prefix': 'mipsel',
},
'mips32r5el': {
'conditions': ['BR2_mipsel', 'BR2_mips_32r5', '!BR2_MIPS_SOFT_FLOAT'],
'prefix': 'mipsel',
},
'mips32r6el': {
'conditions': ['BR2_mipsel', 'BR2_mips_32r6', '!BR2_MIPS_SOFT_FLOAT'],
'prefix': 'mipsel',
},
'mips64': {
# Not sure it could be used by other mips64 variants?
'conditions': ['BR2_mips64', 'BR2_mips_64', '!BR2_MIPS_SOFT_FLOAT'],
'prefix': 'mips64',
},
'mips64-n32': {
# Not sure it could be used by other mips64 variants?
'conditions': ['BR2_mips64', 'BR2_mips_64', 'BR2_MIPS_NABI32', '!BR2_MIPS_SOFT_FLOAT'],
'prefix': 'mips64',
},
'mips64el-n32': {
# Not sure it could be used by other mips64el variants?
'conditions': ['BR2_mips64el', 'BR2_mips_64', 'BR2_MIPS_NABI32', '!BR2_MIPS_SOFT_FLOAT'],
'prefix': 'mips64el',
},
'mips64r6el-n32': {
'conditions': ['BR2_mips64el', 'BR2_mips_64r6', 'BR2_MIPS_NABI32', '!BR2_MIPS_SOFT_FLOAT'],
'prefix': 'mips64el',
},
'nios2': {
'conditions': ['BR2_nios2'],
'prefix': 'nios2',
},
'openrisc': {
'conditions': ['BR2_or1k'],
'prefix': 'or1k',
},
'powerpc-440fp': {
# Not sure it could be used by other powerpc variants?
'conditions': ['BR2_powerpc', 'BR2_powerpc_440fp'],
'prefix': 'powerpc',
},
'powerpc-e300c3': {
# Not sure it could be used by other powerpc variants?
'conditions': ['BR2_powerpc', 'BR2_powerpc_e300c3'],
'prefix': 'powerpc',
},
'powerpc-e500mc': {
# Not sure it could be used by other powerpc variants?
'conditions': ['BR2_powerpc', 'BR2_powerpc_e500mc'],
'prefix': 'powerpc',
},
'powerpc64-e5500': {
'conditions': ['BR2_powerpc64', 'BR2_powerpc_e5500'],
'prefix': 'powerpc64',
},
'powerpc64-e6500': {
'conditions': ['BR2_powerpc64', 'BR2_powerpc_e6500'],
'prefix': 'powerpc64',
},
'powerpc64-power8': {
'conditions': ['BR2_powerpc64', 'BR2_powerpc_power8'],
'prefix': 'powerpc64',
},
'powerpc64le-power8': {
'conditions': ['BR2_powerpc64le', 'BR2_powerpc_power8'],
'prefix': 'powerpc64le',
},
'riscv32-ilp32d': {
'conditions': ['BR2_riscv', 'BR2_riscv_g', 'BR2_RISCV_32', 'BR2_RISCV_ABI_ILP32D'],
'prefix': 'riscv32',
},
'riscv64': {
'conditions': ['BR2_riscv', 'BR2_riscv_g', 'BR2_RISCV_64', 'BR2_RISCV_ABI_LP64'],
'prefix': 'riscv64',
},
'sh-sh4': {
'conditions': ['BR2_sh', 'BR2_sh4'],
'prefix': 'sh4',
},
'sh-sh4aeb': {
'conditions': ['BR2_sh', 'BR2_sh4aeb'],
'prefix': 'sh4aeb',
},
'sparc64': {
'conditions': ['BR2_sparc64', 'BR2_sparc_v9'],
'prefix': 'sparc64',
},
'sparcv8': {
'conditions': ['BR2_sparc', 'BR2_sparc_v8'],
'prefix': 'sparc',
},
'x86-64-core-i7': {
'conditions': ['BR2_x86_64',
'BR2_X86_CPU_HAS_MMX',
'BR2_X86_CPU_HAS_SSE',
'BR2_X86_CPU_HAS_SSE2',
'BR2_X86_CPU_HAS_SSE3',
'BR2_X86_CPU_HAS_SSSE3',
'BR2_X86_CPU_HAS_SSE4',
'BR2_X86_CPU_HAS_SSE42'],
'test_options': ['BR2_x86_64', 'BR2_x86_corei7'],
'prefix': 'x86_64',
},
'x86-core2': {
'conditions': ['BR2_i386',
'BR2_X86_CPU_HAS_MMX',
'BR2_X86_CPU_HAS_SSE',
'BR2_X86_CPU_HAS_SSE2',
'BR2_X86_CPU_HAS_SSE3',
'BR2_X86_CPU_HAS_SSSE3'],
'test_options': ['BR2_i386', 'BR2_x86_core2'],
'prefix': 'i686',
},
'x86-i686': {
'conditions': ['BR2_i386',
'!BR2_x86_i486',
'!BR2_x86_i586',
'!BR2_x86_x1000'],
'test_options': ['BR2_i386',
'BR2_x86_i686'],
'prefix': 'i686',
},
'xtensa-lx60': {
'conditions': ['BR2_xtensa', 'BR2_xtensa_fsf'],
'prefix': 'xtensa',
},
}
class Toolchain:
def __init__(self, arch, libc, variant, version):
self.arch = arch
self.libc = libc
self.variant = variant
self.version = version
self.fname_prefix = "%s--%s--%s-%s" % (self.arch, self.libc, self.variant, self.version)
self.option_name = "BR2_TOOLCHAIN_EXTERNAL_BOOTLIN_%s_%s_%s" % \
(self.arch.replace("-", "_").upper(), self.libc.upper(), self.variant.replace("-", "_").upper())
self.fragment = requests.get(self.fragment_url).text.split("\n")
self.sha256 = requests.get(self.hash_url).text.split(" ")[0]
@property
def tarball_url(self):
return os.path.join(BASE_URL, self.arch, "tarballs",
self.fname_prefix + ".tar.bz2")
@property
def hash_url(self):
return os.path.join(BASE_URL, self.arch, "tarballs",
self.fname_prefix + ".sha256")
@property
def fragment_url(self):
return os.path.join(BASE_URL, self.arch, "fragments",
self.fname_prefix + ".frag")
def gen_config_in_options(self, f):
f.write("config %s\n" % self.option_name)
f.write("\tbool \"%s %s %s %s\"\n" %
(self.arch, self.libc, self.variant, self.version))
depends = []
selects = []
for c in arches[self.arch]['conditions']:
depends.append(c)
for frag in self.fragment:
# libc type
if frag.startswith("BR2_TOOLCHAIN_EXTERNAL_CUSTOM_UCLIBC"):
selects.append("BR2_TOOLCHAIN_EXTERNAL_UCLIBC")
elif frag.startswith("BR2_TOOLCHAIN_EXTERNAL_CUSTOM_GLIBC"):
# glibc needs mmu support
depends.append("BR2_USE_MMU")
# glibc doesn't support static only configuration
depends.append("!BR2_STATIC_LIBS")
selects.append("BR2_TOOLCHAIN_EXTERNAL_GLIBC")
# all glibc toolchains have RPC support
selects.append("BR2_TOOLCHAIN_HAS_NATIVE_RPC")
elif frag.startswith("BR2_TOOLCHAIN_EXTERNAL_CUSTOM_MUSL"):
# musl needs mmu support
depends.append("BR2_USE_MMU")
selects.append("BR2_TOOLCHAIN_EXTERNAL_MUSL")
# gcc version
if frag.startswith("BR2_TOOLCHAIN_EXTERNAL_GCC_"):
m = re.match("^BR2_TOOLCHAIN_EXTERNAL_GCC_([0-9_]*)=y$", frag)
assert m, "Cannot get gcc version for toolchain %s" % self.fname_prefix
selects.append("BR2_TOOLCHAIN_GCC_AT_LEAST_%s" % m[1])
# kernel headers version
if frag.startswith("BR2_TOOLCHAIN_EXTERNAL_HEADERS_"):
m = re.match("^BR2_TOOLCHAIN_EXTERNAL_HEADERS_([0-9_]*)=y$", frag)
assert m, "Cannot get kernel headers version for toolchain %s" % self.fname_prefix
selects.append("BR2_TOOLCHAIN_HEADERS_AT_LEAST_%s" % m[1])
# C++
if frag.startswith("BR2_TOOLCHAIN_EXTERNAL_CXX"):
selects.append("BR2_INSTALL_LIBSTDCPP")
# SSP
if frag.startswith("BR2_TOOLCHAIN_EXTERNAL_HAS_SSP"):
selects.append("BR2_TOOLCHAIN_HAS_SSP")
# wchar
if frag.startswith("BR2_TOOLCHAIN_EXTERNAL_WCHAR"):
selects.append("BR2_USE_WCHAR")
# locale
if frag.startswith("BR2_TOOLCHAIN_EXTERNAL_LOCALE"):
# locale implies the availability of wchar
selects.append("BR2_USE_WCHAR")
selects.append("BR2_ENABLE_LOCALE")
# thread support
if frag.startswith("BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS"):
selects.append("BR2_TOOLCHAIN_HAS_THREADS")
if frag.startswith("BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_DEBUG"):
selects.append("BR2_TOOLCHAIN_HAS_THREADS_DEBUG")
if frag.startswith("BR2_TOOLCHAIN_EXTERNAL_HAS_THREADS_NPTL"):
selects.append("BR2_TOOLCHAIN_HAS_THREADS_NPTL")
# RPC
if frag.startswith("BR2_TOOLCHAIN_EXTERNAL_INET_RPC"):
selects.append("BR2_TOOLCHAIN_HAS_NATIVE_RPC")
# D language
if frag.startswith("BR2_TOOLCHAIN_EXTERNAL_DLANG"):
selects.append("BR2_TOOLCHAIN_HAS_DLANG")
# fortran
if frag.startswith("BR2_TOOLCHAIN_EXTERNAL_FORTRAN"):
selects.append("BR2_TOOLCHAIN_HAS_FORTRAN")
# OpenMP
if frag.startswith("BR2_TOOLCHAIN_EXTERNAL_OPENMP"):
selects.append("BR2_TOOLCHAIN_HAS_OPENMP")
for depend in depends:
f.write("\tdepends on %s\n" % depend)
for select in selects:
f.write("\tselect %s\n" % select)
f.write("\thelp\n")
desc = "Bootlin toolchain for the %s architecture, using the %s C library. " % \
(self.arch, self.libc)
if self.variant == "stable":
desc += "This is a stable version, which means it is using stable and proven versions of gcc, gdb and binutils."
else:
desc += "This is a bleeding-edge version, which means it is using the latest versions of gcc, gdb and binutils."
f.write(textwrap.fill(desc, width=62, initial_indent="\t ", subsequent_indent="\t ") + "\n")
f.write("\n")
f.write("\t https://toolchains.bootlin.com/\n")
f.write("\n")
def gen_mk(self, f):
f.write("ifeq ($(%s),y)\n" % self.option_name)
f.write("TOOLCHAIN_EXTERNAL_BOOTLIN_VERSION = %s\n" % self.version)
f.write("TOOLCHAIN_EXTERNAL_BOOTLIN_SOURCE = %s--%s--%s-$(TOOLCHAIN_EXTERNAL_BOOTLIN_VERSION).tar.bz2\n" %
(self.arch, self.libc, self.variant))
f.write("TOOLCHAIN_EXTERNAL_BOOTLIN_SITE = %s\n" %
os.path.join(BASE_URL, self.arch, "tarballs"))
f.write("endif\n\n")
pass
def gen_hash(self, f):
f.write("# From %s\n" % self.hash_url)
f.write("sha256 %s %s\n" % (self.sha256, os.path.basename(self.tarball_url)))
def gen_test(self, f):
if self.variant == "stable":
variant = "Stable"
else:
variant = "BleedingEdge"
testname = "TestExternalToolchainBootlin" + \
self.arch.replace("-", "").capitalize() + \
self.libc.capitalize() + variant
f.write("\n\n")
f.write("class %s(TestExternalToolchain):\n" % testname)
f.write(" config = \"\"\"\n")
if 'test_options' in arches[self.arch]:
test_options = arches[self.arch]['test_options']
else:
test_options = arches[self.arch]['conditions']
for opt in test_options:
if opt.startswith("!"):
f.write(" # %s is not set\n" % opt[1:])
else:
f.write(" %s=y\n" % opt)
f.write(" BR2_TOOLCHAIN_EXTERNAL=y\n")
f.write(" BR2_TOOLCHAIN_EXTERNAL_BOOTLIN=y\n")
f.write(" %s=y\n" % self.option_name)
f.write(" # BR2_TARGET_ROOTFS_TAR is not set\n")
f.write(" \"\"\"\n")
f.write(" toolchain_prefix = \"%s-linux\"\n" % arches[self.arch]['prefix'])
f.write("\n")
f.write(" def test_run(self):\n")
f.write(" TestExternalToolchain.common_check(self)\n")
def __repr__(self):
return "Toolchain(arch=%s libc=%s variant=%s version=%s, option=%s)" % \
(self.arch, self.libc, self.variant, self.version, self.option_name)
def get_toolchains():
toolchains = list()
for arch, details in arches.items():
print(arch)
url = os.path.join(BASE_URL, arch, "available_toolchains")
page = requests.get(url).text
fnames = sorted(re.findall(r'<td><a href="(\w[^"]+)"', page))
# This dict will allow us to keep only the latest version for
# each toolchain.
tmp = dict()
for fname in fnames:
parts = fname.split('--')
assert parts[0] == arch, "Arch does not match: %s vs. %s" % (parts[0], arch)
libc = parts[1]
if parts[2].startswith("stable-"):
variant = "stable"
version = parts[2][len("stable-"):]
elif parts[2].startswith("bleeding-edge-"):
variant = "bleeding-edge"
version = parts[2][len("bleeding-edge-"):]
tmp[(arch, libc, variant)] = version
toolchains += [Toolchain(k[0], k[1], k[2], v) for k, v in tmp.items()]
return toolchains
def gen_config_in_options(toolchains, fpath):
with open(fpath, "w") as f:
f.write(AUTOGENERATED_COMMENT)
f.write("config BR2_TOOLCHAIN_EXTERNAL_BOOTLIN_ARCH_SUPPORTS\n")
f.write("\tbool\n")
for arch, details in arches.items():
f.write("\tdefault y if %s\n" % " && ".join(details['conditions']))
f.write("\n")
f.write("if BR2_TOOLCHAIN_EXTERNAL_BOOTLIN\n\n")
f.write("config BR2_TOOLCHAIN_EXTERNAL_PREFIX\n")
f.write("\tdefault \"$(ARCH)-linux\"\n")
f.write("\n")
f.write("config BR2_PACKAGE_PROVIDES_TOOLCHAIN_EXTERNAL\n")
f.write("\tdefault \"toolchain-external-bootlin\"\n")
f.write("\n")
f.write("choice\n")
f.write("\tprompt \"Bootlin toolchain variant\"\n")
for toolchain in toolchains:
toolchain.gen_config_in_options(f)
f.write("endchoice\n")
f.write("endif\n")
def gen_mk(toolchains, fpath):
with open(fpath, "w") as f:
f.write("#" * 80 + "\n")
f.write("#\n")
f.write("# toolchain-external-bootlin\n")
f.write("#\n")
f.write("#" * 80 + "\n")
f.write("\n")
f.write(AUTOGENERATED_COMMENT)
for toolchain in toolchains:
toolchain.gen_mk(f)
f.write("$(eval $(toolchain-external-package))\n")
def gen_hash(toolchains, fpath):
with open(fpath, "w") as f:
f.write(AUTOGENERATED_COMMENT)
for toolchain in toolchains:
toolchain.gen_hash(f)
def gen_runtime_test(toolchains, fpath):
with open(fpath, "w") as f:
f.write(AUTOGENERATED_COMMENT)
f.write("from tests.toolchain.test_external import TestExternalToolchain\n")
for toolchain in toolchains:
toolchain.gen_test(f)
def gen_toolchains(toolchains):
maindir = "toolchain/toolchain-external/toolchain-external-bootlin"
gen_config_in_options(toolchains, os.path.join(maindir, "Config.in.options"))
gen_mk(toolchains, os.path.join(maindir, "toolchain-external-bootlin.mk"))
gen_hash(toolchains, os.path.join(maindir, "toolchain-external-bootlin.hash"))
gen_runtime_test(toolchains,
os.path.join("support", "testing", "tests", "toolchain", "test_external_bootlin.py"))
toolchains = get_toolchains()
gen_toolchains(toolchains)

View File

@@ -2,16 +2,105 @@
set -e
set -o pipefail
input="${1}"
main() {
local template="${1}"
cat "${input}"
preamble "${template}"
gen_tests
}
(
cd configs
LC_ALL=C ls -1 *_defconfig
) \
| sed 's/$/: { extends: .defconfig }/'
preamble() {
local template="${1}"
./support/testing/run-tests -l 2>&1 \
| sed -r -e '/^test_run \((.*)\).*/!d; s//\1: { extends: .runtime_test }/' \
| LC_ALL=C sort
cat - "${template}" <<-_EOF_
# This file is generated; do not edit!
# Builds appear on https://gitlab.com/buildroot.org/buildroot/pipelines
image: ${CI_JOB_IMAGE}
_EOF_
}
gen_tests() {
local -a basics defconfigs runtimes
local do_basics do_defconfigs do_runtime
local defconfigs_ext cfg tst
basics=( DEVELOPERS flake8 package )
defconfigs=( $(cd configs; LC_ALL=C ls -1 *_defconfig) )
runtimes=( $(./support/testing/run-tests -l 2>&1 \
| sed -r -e '/^test_run \((.*)\).*/!d; s//\1/' \
| LC_ALL=C sort)
)
if [ -n "${CI_COMMIT_TAG}" ]; then
do_basics=true
do_defconfigs=base
do_runtime=true
elif [ "${CI_PIPELINE_SOURCE}" = "trigger" ]; then
case "${BR_SCHEDULE_JOBS}" in
(basic)
do_basics=true
do_defconfigs=check
defconfigs_ext=_check
;;
(defconfig)
do_defconfigs=base
;;
(runtime)
do_runtime=true
;;
esac
else
case "${CI_COMMIT_REF_NAME}" in
(*-basics)
do_basics=true
do_defconfigs=check
defconfigs_ext=_check
;;
(*-defconfigs)
do_defconfigs=base
;;
(*-*_defconfig)
defconfigs=( "${CI_COMMIT_REF_NAME##*-}" )
do_defconfigs=base
;;
(*-runtime-tests)
do_runtime=true
;;
(*-tests.*)
runtimes=( "${CI_COMMIT_REF_NAME##*-}" )
do_runtime=true
;;
esac
fi
# If nothing else, at least do the basics to generate a valid pipeline
if [ -z "${do_defconfigs}" \
-a -z "${do_runtime}" \
]
then
do_basics=true
fi
if ${do_basics:-false}; then
for tst in "${basics[@]}"; do
printf 'check-%s: { extends: .check-%s_base }\n' "${tst}" "${tst}"
done
fi
if [ -n "${do_defconfigs}" ]; then
for cfg in "${defconfigs[@]}"; do
printf '%s%s: { extends: .defconfig_%s }\n' \
"${cfg}" "${defconfigs_ext}" "${do_defconfigs}"
done
fi
if ${do_runtime:-false}; then
printf '%s: { extends: .runtime_test_base }\n' "${runtimes[@]}"
fi
}
main "${@}"

View File

@@ -25,20 +25,13 @@ import os
from collections import defaultdict
import re
import subprocess
import requests # NVD database download
import json
import ijson
import distutils.version
import time
import gzip
import sys
sys.path.append('utils/')
from getdeveloperlib import parse_developers # noqa: E402
import cve as cvecheck # noqa: E402
NVD_START_YEAR = 2002
NVD_JSON_VERSION = "1.0"
NVD_BASE_URL = "https://nvd.nist.gov/feeds/json/cve/" + NVD_JSON_VERSION
INFRA_RE = re.compile(r"\$\(eval \$\(([a-z-]*)-package\)\)")
URL_RE = re.compile(r"\s*https?://\S*\s*$")
@@ -48,10 +41,6 @@ RM_API_STATUS_FOUND_BY_DISTRO = 2
RM_API_STATUS_FOUND_BY_PATTERN = 3
RM_API_STATUS_NOT_FOUND = 4
CVE_AFFECTS = 1
CVE_DOESNT_AFFECT = 2
CVE_UNKNOWN = 3
class Defconfig:
def __init__(self, name, path):
@@ -244,11 +233,12 @@ class Package:
self.status['pkg-check'] = ("error", "{} warnings".format(self.warnings))
return
def is_cve_ignored(self, cve):
@property
def ignored_cves(self):
"""
Tells if the CVE is ignored by the package
Give the list of CVEs ignored by the package
"""
return cve in self.all_ignored_cves.get(self.pkgvar(), [])
return list(self.all_ignored_cves.get(self.pkgvar(), []))
def set_developers(self, developers):
"""
@@ -280,120 +270,6 @@ class Package:
self.is_status_ok('license-files'), self.status['hash'], self.patch_count)
class CVE:
"""An accessor class for CVE Items in NVD files"""
def __init__(self, nvd_cve):
"""Initialize a CVE from its NVD JSON representation"""
self.nvd_cve = nvd_cve
@staticmethod
def download_nvd_year(nvd_path, year):
metaf = "nvdcve-%s-%s.meta" % (NVD_JSON_VERSION, year)
path_metaf = os.path.join(nvd_path, metaf)
jsonf_gz = "nvdcve-%s-%s.json.gz" % (NVD_JSON_VERSION, year)
path_jsonf_gz = os.path.join(nvd_path, jsonf_gz)
# If the database file is less than a day old, we assume the NVD data
# locally available is recent enough.
if os.path.exists(path_jsonf_gz) and os.stat(path_jsonf_gz).st_mtime >= time.time() - 86400:
return path_jsonf_gz
# If not, we download the meta file
url = "%s/%s" % (NVD_BASE_URL, metaf)
print("Getting %s" % url)
page_meta = requests.get(url)
page_meta.raise_for_status()
# If the meta file already existed, we compare the existing
# one with the data newly downloaded. If they are different,
# we need to re-download the database.
# If the database does not exist locally, we need to redownload it in
# any case.
if os.path.exists(path_metaf) and os.path.exists(path_jsonf_gz):
meta_known = open(path_metaf, "r").read()
if page_meta.text == meta_known:
return path_jsonf_gz
# Grab the compressed JSON NVD, and write files to disk
url = "%s/%s" % (NVD_BASE_URL, jsonf_gz)
print("Getting %s" % url)
page_json = requests.get(url)
page_json.raise_for_status()
open(path_jsonf_gz, "wb").write(page_json.content)
open(path_metaf, "w").write(page_meta.text)
return path_jsonf_gz
@classmethod
def read_nvd_dir(cls, nvd_dir):
"""
Iterate over all the CVEs contained in NIST Vulnerability Database
feeds since NVD_START_YEAR. If the files are missing or outdated in
nvd_dir, a fresh copy will be downloaded, and kept in .json.gz
"""
for year in range(NVD_START_YEAR, datetime.datetime.now().year + 1):
filename = CVE.download_nvd_year(nvd_dir, year)
try:
content = ijson.items(gzip.GzipFile(filename), 'CVE_Items.item')
except: # noqa: E722
print("ERROR: cannot read %s. Please remove the file then rerun this script" % filename)
raise
for cve in content:
yield cls(cve['cve'])
def each_product(self):
"""Iterate over each product section of this cve"""
for vendor in self.nvd_cve['affects']['vendor']['vendor_data']:
for product in vendor['product']['product_data']:
yield product
@property
def identifier(self):
"""The CVE unique identifier"""
return self.nvd_cve['CVE_data_meta']['ID']
@property
def pkg_names(self):
"""The set of package names referred by this CVE definition"""
return set(p['product_name'] for p in self.each_product())
def affects(self, br_pkg):
"""
True if the Buildroot Package object passed as argument is affected
by this CVE.
"""
if br_pkg.is_cve_ignored(self.identifier):
return CVE_DOESNT_AFFECT
for product in self.each_product():
if product['product_name'] != br_pkg.name:
continue
for v in product['version']['version_data']:
if v["version_affected"] == "=":
if br_pkg.current_version == v["version_value"]:
return CVE_AFFECTS
elif v["version_affected"] == "<=":
pkg_version = distutils.version.LooseVersion(br_pkg.current_version)
if not hasattr(pkg_version, "version"):
print("Cannot parse package '%s' version '%s'" % (br_pkg.name, br_pkg.current_version))
continue
cve_affected_version = distutils.version.LooseVersion(v["version_value"])
if not hasattr(cve_affected_version, "version"):
print("Cannot parse CVE affected version '%s'" % v["version_value"])
continue
try:
affected = pkg_version <= cve_affected_version
except TypeError:
return CVE_UNKNOWN
if affected:
return CVE_AFFECTS
else:
return CVE_DOESNT_AFFECT
else:
print("version_affected: %s" % v['version_affected'])
return CVE_DOESNT_AFFECT
def get_pkglist(npackages, package_list):
"""
Builds the list of Buildroot packages, returning a list of Package
@@ -417,7 +293,6 @@ def get_pkglist(npackages, package_list):
"package/x11r7/x11r7.mk",
"package/doc-asciidoc.mk",
"package/pkg-.*.mk",
"package/nvidia-tegra23/nvidia-tegra23.mk",
"toolchain/toolchain-external/pkg-toolchain-external.mk",
"toolchain/toolchain-external/toolchain-external.mk",
"toolchain/toolchain.mk",
@@ -656,10 +531,12 @@ def check_package_cves(nvd_path, packages):
if not os.path.isdir(nvd_path):
os.makedirs(nvd_path)
for cve in CVE.read_nvd_dir(nvd_path):
for cve in cvecheck.CVE.read_nvd_dir(nvd_path):
for pkg_name in cve.pkg_names:
if pkg_name in packages and cve.affects(packages[pkg_name]) == CVE_AFFECTS:
packages[pkg_name].cves.append(cve.identifier)
if pkg_name in packages:
pkg = packages[pkg_name]
if cve.affects(pkg.name, pkg.current_version, pkg.ignored_cves) == cve.CVE_AFFECTS:
pkg.cves.append(cve.identifier)
def calculate_stats(packages):

View File

@@ -15,7 +15,7 @@ import re
import sys
def compile_one(host_path, strip_root=None):
def compile_one(host_path, strip_root=None, verbose=False):
"""
Compile a .py file into a .pyc file located next to it.
@@ -24,6 +24,8 @@ def compile_one(host_path, strip_root=None):
:arg strip_root:
Prefix to remove from the original source paths encoded in compiled
files.
:arg verbose:
Print compiled file paths.
"""
if os.path.islink(host_path) or not os.path.isfile(host_path):
return # only compile real files
@@ -39,6 +41,9 @@ def compile_one(host_path, strip_root=None):
else:
runtime_path = host_path
if verbose:
print(" PYC {}".format(runtime_path))
# will raise an error if the file cannot be compiled
py_compile.compile(host_path, cfile=host_path + "c",
dfile=runtime_path, doraise=True)
@@ -63,6 +68,8 @@ def main():
Prefix to remove from the original source paths encoded
in compiled files
""")
parser.add_argument("--verbose", action="store_true",
help="Print compiled files")
args = parser.parse_args()
@@ -72,7 +79,8 @@ def main():
parser.error("DIR: not inside ROOT dir: {!r}".format(d))
for parent, _, files in os.walk(d):
for f in files:
compile_one(os.path.join(parent, f), args.strip_root)
compile_one(os.path.join(parent, f), args.strip_root,
args.verbose)
except Exception as e:
print("error: {}".format(e))