Update Buildroot to 2019.02.3 (#415)

* Update Buildroot to 2019-02.3

* Fix enter script

* Update ova_defconfig

* Fix network manager

* Remove runc patches

* Use same docker version

* Fix build

* Fix vmtools

* Fix depens

* Fix handling with tempfiles

* Fix permission handling

* Fix cp

* Cleanup

* Fix mounts
This commit is contained in:
Pascal Vizeli
2019-06-27 11:58:50 +02:00
committed by GitHub
parent bb201fb842
commit 41d3f59002
2416 changed files with 36288 additions and 21885 deletions

View File

@@ -20,6 +20,11 @@ declare -a IGNORES=(
# pru-software-support) legitimately install ELF binaries that
# are not for the target architecture
"/usr/share"
# Skip files in {/usr,}/lib/grub, since it is possible to have
# it for a different architecture (e.g. i386 grub on x86_64).
"/lib/grub"
"/usr/lib/grub"
)
while getopts p:l:r:a:i: OPT ; do

View File

@@ -39,6 +39,11 @@ is_elf() {
|grep -E 'Requesting program interpreter:' >/dev/null 2>&1
}
# This function tells whether a given ELF executable (first argument)
# needs a RPATH pointing to the host library directory or not. It
# needs such an RPATH if at least of the libraries used by the ELF
# executable is available in the host library directory. This function
# returns 0 when a RPATH is needed, 1 otherwise.
elf_needs_rpath() {
local file="${1}"
local hostdir="${2}"
@@ -54,6 +59,13 @@ elf_needs_rpath() {
return 1
}
# This function checks whether at least one of the RPATH of the given
# ELF executable (first argument) properly points to the host library
# directory (second argument), either through an absolute RPATH or a
# relative RPATH. Having such a RPATH will make sure the ELF
# executable will find at runtime the shared libraries it depends
# on. This function returns 0 when a proper RPATH was found, or 1
# otherwise.
check_elf_has_rpath() {
local file="${1}"
local hostdir="${2}"
@@ -63,7 +75,8 @@ check_elf_has_rpath() {
for dir in ${rpath//:/ }; do
# Remove duplicate and trailing '/' for proper match
dir="$( sed -r -e 's:/+:/:g; s:/$::;' <<<"${dir}" )"
[ "${dir}" = "${hostdir}/lib" -o "${dir}" = "\$ORIGIN/../lib" ] && return 0
[ "${dir}" = "${hostdir}/lib" ] && return 0
[ "${dir}" = "\$ORIGIN/../lib" ] && return 0
done
done < <( readelf -d "${file}" \
|sed -r -e '/.* \(R(UN)?PATH\) +Library r(un)?path: \[(.+)\]$/!d' \

View File

@@ -0,0 +1,39 @@
#!/bin/sh
#
# Check if a given custom skeleton or overlay complies to the merged /usr
# requirements:
# /
# /bin -> usr/bin
# /lib -> usr/lib
# /sbin -> usr/sbin
# /usr/bin/
# /usr/lib/
# /usr/sbin/
#
# Output: the list of non-compliant paths (empty if compliant).
#
# Extract the inode numbers for all of those directories. In case any is
# a symlink, we want to get the inode of the pointed-to directory, so we
# append '/.' to be sure we get the target directory. Since the symlinks
# can be anyway (/bin -> /usr/bin or /usr/bin -> /bin), we do that for
# all of them.
#
lib_inode=$(stat -c '%i' "${1}/lib/." 2>/dev/null)
bin_inode=$(stat -c '%i' "${1}/bin/." 2>/dev/null)
sbin_inode=$(stat -c '%i' "${1}/sbin/." 2>/dev/null)
usr_lib_inode=$(stat -c '%i' "${1}/usr/lib/." 2>/dev/null)
usr_bin_inode=$(stat -c '%i' "${1}/usr/bin/." 2>/dev/null)
usr_sbin_inode=$(stat -c '%i' "${1}/usr/sbin/." 2>/dev/null)
not_merged_dirs=""
test -z "$lib_inode" || \
test "$lib_inode" = "$usr_lib_inode" || \
not_merged_dirs="/lib"
test -z "$bin_inode" || \
test "$bin_inode" = "$usr_bin_inode" || \
not_merged_dirs="$not_merged_dirs /bin"
test -z "$sbin_inode" || \
test "$sbin_inode" = "$usr_sbin_inode" || \
not_merged_dirs="$not_merged_dirs /sbin"
echo "${not_merged_dirs# }"

View File

@@ -0,0 +1,17 @@
#!/usr/bin/env bash
set -e
set -o pipefail
input="${1}"
cat "${input}"
(
cd configs
LC_ALL=C ls -1 *_defconfig
) \
| sed 's/$/: { extends: .defconfig }/'
./support/testing/run-tests -l 2>&1 \
| sed -r -e '/^test_run \((.*)\).*/!d; s//\1: { extends: .runtime_test }/' \
| LC_ALL=C sort

View File

@@ -69,14 +69,14 @@ import matplotlib.font_manager as fm # noqa: E402
import csv # noqa: E402
import argparse # noqa: E402
steps = ['extract', 'patch', 'configure', 'build',
steps = ['download', 'extract', 'patch', 'configure', 'build',
'install-target', 'install-staging', 'install-images',
'install-host']
default_colors = ['#e60004', '#009836', '#2e1d86', '#ffed00',
default_colors = ['#8d02ff', '#e60004', '#009836', '#2e1d86', '#ffed00',
'#0068b5', '#f28e00', '#940084', '#97c000']
alternate_colors = ['#00e0e0', '#3f7f7f', '#ff0000', '#00c000',
alternate_colors = ['#ffbe0a', '#96bdff', '#3f7f7f', '#ff0000', '#00c000',
'#0080ff', '#c000ff', '#00eeee', '#e0e000']

View File

@@ -100,12 +100,6 @@ def pkg_node_name(pkg):
return "_" + pkg.replace("-", "")
TARGET_EXCEPTIONS = [
"target-finalize",
"target-post-image",
]
# Basic cache for the results of the is_dep() function, in order to
# optimize the execution time. The cache is a dict of dict of boolean
# values. The key to the primary dict is "pkg", and the key of the
@@ -176,10 +170,15 @@ def remove_transitive_deps(pkg, deps):
return new_d
# List of dependencies that all/many packages have, and that we want
# to trim when generating the dependency graph.
MANDATORY_DEPS = ['toolchain', 'skeleton']
# This function removes the dependency on some 'mandatory' package, like the
# 'toolchain' package, or the 'skeleton' package
def remove_mandatory_deps(pkg, deps):
return [p for p in deps[pkg] if p not in ['toolchain', 'skeleton']]
return [p for p in deps[pkg] if p not in MANDATORY_DEPS]
# This function will check that there is no loop in the dependency chain
@@ -211,12 +210,12 @@ def check_circular_deps(deps):
# This functions trims down the dependency list of all packages.
# It applies in sequence all the dependency-elimination methods.
def remove_extra_deps(deps, transitive):
def remove_extra_deps(deps, rootpkg, transitive):
for pkg in list(deps.keys()):
if not pkg == 'all':
if not pkg == rootpkg:
deps[pkg] = remove_mandatory_deps(pkg, deps)
for pkg in list(deps.keys()):
if not transitive or pkg == 'all':
if not transitive or pkg == rootpkg:
deps[pkg] = remove_transitive_deps(pkg, deps)
return deps
@@ -384,9 +383,6 @@ def main():
allpkgs.append('all')
filtered_targets = []
for tg in targets:
# Skip uninteresting targets
if tg in TARGET_EXCEPTIONS:
continue
dependencies.append(('all', tg))
filtered_targets.append(tg)
deps = get_all_depends(filtered_targets, get_depends_func)
@@ -410,7 +406,7 @@ def main():
if check_only:
sys.exit(0)
dict_deps = remove_extra_deps(dict_deps, args.transitive)
dict_deps = remove_extra_deps(dict_deps, rootpkg, args.transitive)
dict_version = brpkgutil.get_version([pkg for pkg in allpkgs
if pkg != "all" and not pkg.startswith("root")])

View File

@@ -25,10 +25,19 @@ import re
import subprocess
import sys
import requests # URL checking
import json
import certifi
from urllib3 import HTTPSConnectionPool
from urllib3.exceptions import HTTPError
from multiprocessing import Pool
INFRA_RE = re.compile("\$\(eval \$\(([a-z-]*)-package\)\)")
URL_RE = re.compile("\s*https?://\S*\s*$")
INFRA_RE = re.compile(r"\$\(eval \$\(([a-z-]*)-package\)\)")
URL_RE = re.compile(r"\s*https?://\S*\s*$")
RM_API_STATUS_ERROR = 1
RM_API_STATUS_FOUND_BY_DISTRO = 2
RM_API_STATUS_FOUND_BY_PATTERN = 3
RM_API_STATUS_NOT_FOUND = 4
class Package:
@@ -49,6 +58,7 @@ class Package:
self.url = None
self.url_status = None
self.url_worker = None
self.latest_version = (RM_API_STATUS_ERROR, None, None)
def pkgvar(self):
return self.name.upper().replace("-", "_")
@@ -298,6 +308,73 @@ def check_package_urls(packages):
pkg.url_status = pkg.url_worker.get(timeout=3600)
def release_monitoring_get_latest_version_by_distro(pool, name):
try:
req = pool.request('GET', "/api/project/Buildroot/%s" % name)
except HTTPError:
return (RM_API_STATUS_ERROR, None, None)
if req.status != 200:
return (RM_API_STATUS_NOT_FOUND, None, None)
data = json.loads(req.data)
if 'version' in data:
return (RM_API_STATUS_FOUND_BY_DISTRO, data['version'], data['id'])
else:
return (RM_API_STATUS_FOUND_BY_DISTRO, None, data['id'])
def release_monitoring_get_latest_version_by_guess(pool, name):
try:
req = pool.request('GET', "/api/projects/?pattern=%s" % name)
except HTTPError:
return (RM_API_STATUS_ERROR, None, None)
if req.status != 200:
return (RM_API_STATUS_NOT_FOUND, None, None)
data = json.loads(req.data)
projects = data['projects']
projects.sort(key=lambda x: x['id'])
for p in projects:
if p['name'] == name and 'version' in p:
return (RM_API_STATUS_FOUND_BY_PATTERN, p['version'], p['id'])
return (RM_API_STATUS_NOT_FOUND, None, None)
def check_package_latest_version(packages):
"""
Fills in the .latest_version field of all Package objects
This field has a special format:
(status, version, id)
with:
- status: one of RM_API_STATUS_ERROR,
RM_API_STATUS_FOUND_BY_DISTRO, RM_API_STATUS_FOUND_BY_PATTERN,
RM_API_STATUS_NOT_FOUND
- version: string containing the latest version known by
release-monitoring.org for this package
- id: string containing the id of the project corresponding to this
package, as known by release-monitoring.org
"""
pool = HTTPSConnectionPool('release-monitoring.org', port=443,
cert_reqs='CERT_REQUIRED', ca_certs=certifi.where(),
timeout=30)
count = 0
for pkg in packages:
v = release_monitoring_get_latest_version_by_distro(pool, pkg.name)
if v[0] == RM_API_STATUS_NOT_FOUND:
v = release_monitoring_get_latest_version_by_guess(pool, pkg.name)
pkg.latest_version = v
print("[%d/%d] Package %s" % (count, len(packages), pkg.name))
count += 1
def calculate_stats(packages):
stats = defaultdict(int)
for pkg in packages:
@@ -322,6 +399,16 @@ def calculate_stats(packages):
stats["hash"] += 1
else:
stats["no-hash"] += 1
if pkg.latest_version[0] == RM_API_STATUS_FOUND_BY_DISTRO:
stats["rmo-mapping"] += 1
else:
stats["rmo-no-mapping"] += 1
if not pkg.latest_version[1]:
stats["version-unknown"] += 1
elif pkg.latest_version[1] == pkg.current_version:
stats["version-uptodate"] += 1
else:
stats["version-not-uptodate"] += 1
stats["patches"] += pkg.patch_count
return stats
@@ -354,6 +441,7 @@ td.somepatches {
td.lotsofpatches {
background: #ff9a69;
}
td.good_url {
background: #d2ffc4;
}
@@ -363,6 +451,20 @@ td.missing_url {
td.invalid_url {
background: #ff9a69;
}
td.version-good {
background: #d2ffc4;
}
td.version-needs-update {
background: #ff9a69;
}
td.version-unknown {
background: #ffd870;
}
td.version-error {
background: #ccc;
}
</style>
<title>Statistics of Buildroot packages</title>
</head>
@@ -465,6 +567,37 @@ def dump_html_pkg(f, pkg):
current_version = pkg.current_version
f.write(" <td class=\"centered\">%s</td>\n" % current_version)
# Latest version
if pkg.latest_version[0] == RM_API_STATUS_ERROR:
td_class.append("version-error")
if pkg.latest_version[1] is None:
td_class.append("version-unknown")
elif pkg.latest_version[1] != pkg.current_version:
td_class.append("version-needs-update")
else:
td_class.append("version-good")
if pkg.latest_version[0] == RM_API_STATUS_ERROR:
latest_version_text = "<b>Error</b>"
elif pkg.latest_version[0] == RM_API_STATUS_NOT_FOUND:
latest_version_text = "<b>Not found</b>"
else:
if pkg.latest_version[1] is None:
latest_version_text = "<b>Found, but no version</b>"
else:
latest_version_text = "<a href=\"https://release-monitoring.org/project/%s\"><b>%s</b></a>" % \
(pkg.latest_version[2], str(pkg.latest_version[1]))
latest_version_text += "<br/>"
if pkg.latest_version[0] == RM_API_STATUS_FOUND_BY_DISTRO:
latest_version_text += "found by <a href=\"https://release-monitoring.org/distro/Buildroot/\">distro</a>"
else:
latest_version_text += "found by guess"
f.write(" <td class=\"%s\">%s</td>\n" %
(" ".join(td_class), latest_version_text))
# Warnings
td_class = ["centered"]
if pkg.warnings == 0:
@@ -502,6 +635,7 @@ def dump_html_all_pkgs(f, packages):
<td class=\"centered\">License files</td>
<td class=\"centered\">Hash file</td>
<td class=\"centered\">Current version</td>
<td class=\"centered\">Latest version</td>
<td class=\"centered\">Warnings</td>
<td class=\"centered\">Upstream URL</td>
</tr>
@@ -532,6 +666,16 @@ def dump_html_stats(f, stats):
stats["no-hash"])
f.write(" <tr><td>Total number of patches</td><td>%s</td></tr>\n" %
stats["patches"])
f.write("<tr><td>Packages having a mapping on <i>release-monitoring.org</i></td><td>%s</td></tr>\n" %
stats["rmo-mapping"])
f.write("<tr><td>Packages lacking a mapping on <i>release-monitoring.org</i></td><td>%s</td></tr>\n" %
stats["rmo-no-mapping"])
f.write("<tr><td>Packages that are up-to-date</td><td>%s</td></tr>\n" %
stats["version-uptodate"])
f.write("<tr><td>Packages that are not up-to-date</td><td>%s</td></tr>\n" %
stats["version-not-uptodate"])
f.write("<tr><td>Packages with no known upstream version</td><td>%s</td></tr>\n" %
stats["version-unknown"])
f.write("</table>\n")
@@ -587,6 +731,8 @@ def __main__():
pkg.set_url()
print("Checking URL status")
check_package_urls(packages)
print("Getting latest versions ...")
check_package_latest_version(packages)
print("Calculate stats")
stats = calculate_stats(packages)
print("Write HTML")

View File

@@ -53,7 +53,7 @@ if head=`git rev-parse --verify --short HEAD 2>/dev/null`; then
fi
# Check for mercurial and a mercurial repo.
if hgid=`hg id 2>/dev/null`; then
if hgid=`HGRCPATH= hg id --id --tags 2>/dev/null`; then
tag=`printf '%s' "$hgid" | cut -d' ' -f2 --only-delimited`
# Do we have an untagged version?