Update buildroot 2020.02.01 (#622)

* Update buildroot 2020.02.01

Signed-off-by: Pascal Vizeli <pvizeli@syshack.ch>

* Fix LN

* Fix wpa

Signed-off-by: Pascal Vizeli <pvizeli@syshack.ch>

* Fix lint

Signed-off-by: Pascal Vizeli <pvizeli@syshack.ch>

* fix-network

Signed-off-by: Pascal Vizeli <pvizeli@syshack.ch>

* Fix script

Signed-off-by: Pascal Vizeli <pvizeli@syshack.ch>
This commit is contained in:
Pascal Vizeli
2020-04-16 20:03:01 +02:00
committed by GitHub
parent 0c2b5aff65
commit 5a6678147e
6201 changed files with 73436 additions and 70757 deletions

View File

@@ -14,14 +14,11 @@ MANUAL_URL='https://buildroot.org/manual.html\#br2-external-converting'
main() {
local OPT OPTARG
local br2_ext ofile ofmt
local br2_ext outputdir
while getopts :hkmo: OPT; do
while getopts :d: OPT; do
case "${OPT}" in
h) help; exit 0;;
o) ofile="${OPTARG}";;
k) ofmt="kconfig";;
m) ofmt="mk";;
d) outputdir="${OPTARG}";;
:) error "option '%s' expects a mandatory argument\n" "${OPTARG}";;
\?) error "unknown option '%s'\n" "${OPTARG}";;
esac
@@ -29,23 +26,18 @@ main() {
# Forget options; keep only positional args
shift $((OPTIND-1))
case "${ofmt}" in
mk|kconfig)
;;
*) error "no output format specified (-m/-k)\n";;
esac
if [ -z "${ofile}" ]; then
error "no output file specified (-o)\n"
if [ -z "${outputdir}" ]; then
error "no output directory specified (-d)\n"
fi
exec >"${ofile}"
# Trap any unexpected error to generate a meaningful error message
trap "error 'unexpected error while generating ${ofile}\n'" ERR
do_validate ${@//:/ }
do_${ofmt}
mkdir -p "${outputdir}"
do_mk "${outputdir}"
do_kconfig "${outputdir}"
}
# Validates the br2-external trees passed as arguments. Makes each of
@@ -119,103 +111,121 @@ do_validate_one() {
# Generate the .mk snippet that defines makefile variables
# for the br2-external tree
do_mk() {
local br2_name br2_ext
local outputdir="${1}"
local br2_name br2_desc br2_ext
printf '#\n# Automatically generated file; DO NOT EDIT.\n#\n'
printf '\n'
printf 'BR2_EXTERNAL ?='
for br2_name in "${BR2_EXT_NAMES[@]}"; do
eval br2_ext="\"\${BR2_EXT_PATHS_${br2_name}}\""
printf ' %s' "${br2_ext}"
done
printf '\n'
printf 'BR2_EXTERNAL_NAMES = \n'
printf 'BR2_EXTERNAL_DIRS = \n'
printf 'BR2_EXTERNAL_MKS = \n'
if [ ${#BR2_EXT_NAMES[@]} -eq 0 ]; then
{
printf '#\n# Automatically generated file; DO NOT EDIT.\n#\n'
printf '\n'
printf '# No br2-external tree defined.\n'
return
fi
for br2_name in "${BR2_EXT_NAMES[@]}"; do
eval br2_desc="\"\${BR2_EXT_DESCS_${br2_name}}\""
eval br2_ext="\"\${BR2_EXT_PATHS_${br2_name}}\""
printf 'BR2_EXTERNAL ?='
for br2_name in "${BR2_EXT_NAMES[@]}"; do
eval br2_ext="\"\${BR2_EXT_PATHS_${br2_name}}\""
printf ' %s' "${br2_ext}"
done
printf '\n'
printf 'BR2_EXTERNAL_NAMES += %s\n' "${br2_name}"
printf 'BR2_EXTERNAL_DIRS += %s\n' "${br2_ext}"
printf 'BR2_EXTERNAL_MKS += %s/external.mk\n' "${br2_ext}"
printf 'export BR2_EXTERNAL_%s_PATH = %s\n' "${br2_name}" "${br2_ext}"
printf 'export BR2_EXTERNAL_%s_DESC = %s\n' "${br2_name}" "${br2_desc}"
done
printf 'BR2_EXTERNAL_NAMES = \n'
printf 'BR2_EXTERNAL_DIRS = \n'
printf 'BR2_EXTERNAL_MKS = \n'
if [ ${#BR2_EXT_NAMES[@]} -eq 0 ]; then
printf '\n'
printf '# No br2-external tree defined.\n'
return
fi
for br2_name in "${BR2_EXT_NAMES[@]}"; do
eval br2_desc="\"\${BR2_EXT_DESCS_${br2_name}}\""
eval br2_ext="\"\${BR2_EXT_PATHS_${br2_name}}\""
printf '\n'
printf 'BR2_EXTERNAL_NAMES += %s\n' "${br2_name}"
printf 'BR2_EXTERNAL_DIRS += %s\n' "${br2_ext}"
printf 'BR2_EXTERNAL_MKS += %s/external.mk\n' "${br2_ext}"
printf 'export BR2_EXTERNAL_%s_PATH = %s\n' "${br2_name}" "${br2_ext}"
printf 'export BR2_EXTERNAL_%s_DESC = %s\n' "${br2_name}" "${br2_desc}"
done
} >"${outputdir}/.br2-external.mk"
}
# Generate the kconfig snippet for the br2-external tree.
# Generate the kconfig snippets for the br2-external tree.
do_kconfig() {
local br2_name br2_ext
local outputdir="${1}"
local br2_name br2_desc br2_ext br2
local -a items
printf '#\n# Automatically generated file; DO NOT EDIT.\n#\n'
printf '\n'
items=(
paths
menus
toolchains
jpeg
openssl
)
for br2 in "${items[@]}"; do
{
printf '#\n# Automatically generated file; DO NOT EDIT.\n#\n'
printf '\n'
if [ ${#BR2_EXT_NAMES[@]} -eq 0 ]; then
printf '# No br2-external tree defined.\n'
fi
} >"${outputdir}/.br2-external.in.${br2}"
done
if [ ${#BR2_EXT_NAMES[@]} -eq 0 ]; then
printf '# No br2-external tree defined.\n'
return
fi
printf 'menu "External options"\n'
printf '\n'
printf 'menu "External options"\n\n' >>"${outputdir}/.br2-external.in.menus"
for br2_name in "${BR2_EXT_NAMES[@]}"; do
eval br2_desc="\"\${BR2_EXT_DESCS_${br2_name}}\""
eval br2_ext="\"\${BR2_EXT_PATHS_${br2_name}}\""
if [ ${#BR2_EXT_NAMES[@]} -gt 1 ]; then
printf 'menu "%s"\n' "${br2_desc}"
fi
printf 'comment "%s (in %s)"\n' "${br2_desc}" "${br2_ext}"
printf 'config BR2_EXTERNAL_%s_PATH\n' "${br2_name}"
printf '\tstring\n'
printf '\tdefault "%s"\n' "${br2_ext}"
printf 'source "%s/Config.in"\n' "${br2_ext}"
if [ ${#BR2_EXT_NAMES[@]} -gt 1 ]; then
printf 'endmenu # %s\n' "${br2_name}"
fi
printf '\n'
{
printf 'config BR2_EXTERNAL_%s_PATH\n' "${br2_name}"
printf '\tstring\n'
printf '\tdefault "%s"\n' "${br2_ext}"
printf '\n'
} >>"${outputdir}/.br2-external.in.paths"
{
if [ ${#BR2_EXT_NAMES[@]} -gt 1 ]; then
printf 'menu "%s"\n' "${br2_desc}"
fi
printf 'comment "%s (in %s)"\n' "${br2_desc}" "${br2_ext}"
printf 'source "%s/Config.in"\n' "${br2_ext}"
if [ ${#BR2_EXT_NAMES[@]} -gt 1 ]; then
printf 'endmenu # %s\n' "${br2_name}"
fi
printf '\n'
} >>"${outputdir}/.br2-external.in.menus"
if [ -f "${br2_ext}/provides/toolchains.in" ]; then
printf 'comment "Toolchains from: %s"\n' "${br2_desc}"
printf 'source "%s/provides/toolchains.in"\n' "${br2_ext}"
printf '\n'
else
printf '# No toolchain from: %s\n\n' "${br2_desc}"
fi >>"${outputdir}/.br2-external.in.toolchains"
if [ -f "${br2_ext}/provides/jpeg.in" ]; then
printf 'comment "jpeg from: %s"\n' "${br2_desc}"
printf 'source "%s/provides/jpeg.in"\n' "${br2_ext}"
printf '\n'
else
printf '# No jpeg from: %s\n\n' "${br2_desc}"
fi >>"${outputdir}/.br2-external.in.jpeg"
if [ -f "${br2_ext}/provides/openssl.in" ]; then
printf 'comment "openssl from: %s"\n' "${br2_desc}"
printf 'source "%s/provides/openssl.in"\n' "${br2_ext}"
printf '\n'
else
printf '# No openssl from: %s\n\n' "${br2_desc}"
fi >>"${outputdir}/.br2-external.in.openssl"
done
printf "endmenu # User-provided options\n"
}
help() {
cat <<-_EOF_
Usage:
${my_name} <-m|-k> -o FILE PATH
With -m, ${my_name} generates the makefile fragment that defines
variables related to the br2-external trees passed as positional
arguments.
With -k, ${my_name} generates the kconfig snippet to include the
configuration options specified in the br2-external trees passed
as positional arguments.
Using -k and -m together is not possible. The last one wins.
Options:
-m Generate the makefile fragment.
-k Generate the kconfig snippet.
-o FILE
FILE in which to generate the kconfig snippet or makefile
fragment.
Returns:
0 If no error
!0 If any error
_EOF_
printf 'endmenu\n' >>"${outputdir}/.br2-external.in.menus"
}
error() { local fmt="${1}"; shift; printf "BR2_EXTERNAL_ERROR = ${fmt}" "${@}"; exit 1; }

View File

@@ -1,67 +1,50 @@
# Copyright (C) 2010-2013 Thomas Petazzoni <thomas.petazzoni@free-electrons.com>
# Copyright (C) 2019 Yann E. MORIN <yann.morin.1998@free.fr>
import json
import logging
import sys
import os
import subprocess
from collections import defaultdict
# Execute the "make <pkg>-show-version" command to get the version of a given
# list of packages, and return the version formatted as a Python dictionary.
def get_version(pkgs):
logging.info("Getting version for %s" % pkgs)
cmd = ["make", "-s", "--no-print-directory"]
for pkg in pkgs:
cmd.append("%s-show-version" % pkg)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True)
output = p.communicate()[0]
if p.returncode != 0:
logging.error("Error getting version %s" % pkgs)
sys.exit(1)
output = output.split("\n")
if len(output) != len(pkgs) + 1:
logging.error("Error getting version")
sys.exit(1)
version = {}
for i in range(0, len(pkgs)):
pkg = pkgs[i]
version[pkg] = output[i]
return version
# This function returns a tuple of four dictionaries, all using package
# names as keys:
# - a dictionary which values are the lists of packages that are the
# dependencies of the package used as key;
# - a dictionary which values are the lists of packages that are the
# reverse dependencies of the package used as key;
# - a dictionary which values are the type of the package used as key;
# - a dictionary which values are the version of the package used as key,
# 'virtual' for a virtual package, or the empty string for a rootfs.
def get_dependency_tree():
logging.info("Getting dependency tree...")
def _get_depends(pkgs, rule):
logging.info("Getting dependencies for %s" % pkgs)
cmd = ["make", "-s", "--no-print-directory"]
for pkg in pkgs:
cmd.append("%s-%s" % (pkg, rule))
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True)
output = p.communicate()[0]
if p.returncode != 0:
logging.error("Error getting dependencies %s\n" % pkgs)
sys.exit(1)
output = output.split("\n")
if len(output) != len(pkgs) + 1:
logging.error("Error getting dependencies")
sys.exit(1)
deps = {}
for i in range(0, len(pkgs)):
pkg = pkgs[i]
pkg_deps = output[i].split(" ")
if pkg_deps == ['']:
deps[pkg] = []
else:
deps[pkg] = pkg_deps
return deps
rdeps = defaultdict(list)
types = {}
versions = {}
# Special case for the 'all' top-level fake package
deps['all'] = []
types['all'] = 'target'
versions['all'] = ''
# Execute the "make <pkg>-show-depends" command to get the list of
# dependencies of a given list of packages, and return the list of
# dependencies formatted as a Python dictionary.
def get_depends(pkgs):
return _get_depends(pkgs, 'show-depends')
cmd = ["make", "-s", "--no-print-directory", "show-info"]
with open(os.devnull, 'wb') as devnull:
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=devnull,
universal_newlines=True)
pkg_list = json.loads(p.communicate()[0])
for pkg in pkg_list:
deps['all'].append(pkg)
types[pkg] = pkg_list[pkg]["type"]
deps[pkg] = pkg_list[pkg].get("dependencies", [])
for p in deps[pkg]:
rdeps[p].append(pkg)
versions[pkg] = \
None if pkg_list[pkg]["type"] == "rootfs" \
else "virtual" if pkg_list[pkg]["virtual"] \
else pkg_list[pkg]["version"]
# Execute the "make <pkg>-show-rdepends" command to get the list of
# reverse dependencies of a given list of packages, and return the
# list of dependencies formatted as a Python dictionary.
def get_rdepends(pkgs):
return _get_depends(pkgs, 'show-rdepends')
return (deps, rdeps, types, versions)

View File

@@ -11,6 +11,7 @@ export LC_ALL=C
main() {
local pkg="${1}"
local hostdir="${2}"
local perpackagedir="${3}"
local file ret
# Remove duplicate and trailing '/' for proper match
@@ -20,7 +21,7 @@ main() {
while read file; do
is_elf "${file}" || continue
elf_needs_rpath "${file}" "${hostdir}" || continue
check_elf_has_rpath "${file}" "${hostdir}" && continue
check_elf_has_rpath "${file}" "${hostdir}" "${perpackagedir}" && continue
if [ ${ret} -eq 0 ]; then
ret=1
printf "***\n"
@@ -44,6 +45,15 @@ is_elf() {
# needs such an RPATH if at least of the libraries used by the ELF
# executable is available in the host library directory. This function
# returns 0 when a RPATH is needed, 1 otherwise.
#
# With per-package directory support, ${hostdir} will point to the
# current package per-package host directory, and this is where this
# function will check if the libraries needed by the executable are
# located (or not). In practice, the ELF executable RPATH may point to
# another package per-package host directory, but that is fine because
# if such an executable is within the current package per-package host
# directory, its libraries will also have been copied into the current
# package per-package host directory.
elf_needs_rpath() {
local file="${1}"
local hostdir="${2}"
@@ -62,13 +72,19 @@ elf_needs_rpath() {
# This function checks whether at least one of the RPATH of the given
# ELF executable (first argument) properly points to the host library
# directory (second argument), either through an absolute RPATH or a
# relative RPATH. Having such a RPATH will make sure the ELF
# executable will find at runtime the shared libraries it depends
# on. This function returns 0 when a proper RPATH was found, or 1
# otherwise.
# relative RPATH. In the context of per-package directory support,
# ${hostdir} (second argument) points to the current package host
# directory. However, it is perfectly valid for an ELF binary to have
# a RPATH pointing to another package per-package host directory,
# which is why such RPATH is also accepted (the per-package directory
# gets passed as third argument). Having a RPATH pointing to the host
# directory will make sure the ELF executable will find at runtime the
# shared libraries it depends on. This function returns 0 when a
# proper RPATH was found, or 1 otherwise.
check_elf_has_rpath() {
local file="${1}"
local hostdir="${2}"
local perpackagedir="${3}"
local rpath dir
while read rpath; do
@@ -77,6 +93,12 @@ check_elf_has_rpath() {
dir="$( sed -r -e 's:/+:/:g; s:/$::;' <<<"${dir}" )"
[ "${dir}" = "${hostdir}/lib" ] && return 0
[ "${dir}" = "\$ORIGIN/../lib" ] && return 0
# This check is done even for builds where
# BR2_PER_PACKAGE_DIRECTORIES is disabled. In this case,
# PER_PACKAGE_DIR and therefore ${perpackagedir} points to
# a non-existent directory, and this check will always be
# false.
[[ ${dir} =~ ${perpackagedir}/[^/]+/host/lib ]] && return 0
done
done < <( readelf -d "${file}" \
|sed -r -e '/.* \(R(UN)?PATH\) +Library r(un)?path: \[(.+)\]$/!d' \

View File

@@ -1,14 +1,39 @@
#!/bin/sh
# This script (and the embedded C code) will check that the actual
# headers version match the user told us they were:
#
# - if both versions are the same, all is well.
#
# - if the actual headers are older than the user told us, this is
# an error.
#
# - if the actual headers are more recent than the user told us, and
# we are doing a strict check, then this is an error.
#
# - if the actual headers are more recent than the user told us, and
# we are doing a loose check, then a warning is printed, but this is
# not an error.
BUILDDIR="${1}"
SYSROOT="${2}"
# Make sure we have enough version components
HDR_VER="${3}.0.0"
CHECK="${4}" # 'strict' or 'loose'
HDR_M="${HDR_VER%%.*}"
HDR_V="${HDR_VER#*.}"
HDR_m="${HDR_V%%.*}"
# Exit on any error, so we don't try to run an unexisting program if the
# compilation fails.
set -e
# Set the clean-up trap in advance to prevent a race condition in which we
# create the file but get a SIGTERM before setting it. Notice that we don't
# need to care about EXEC being empty, since 'rm -f ""' does nothing.
trap 'rm -f "${EXEC}"' EXIT
EXEC="$(mktemp -p "${BUILDDIR}" -t .check-headers.XXXXXX)"
# We do not want to account for the patch-level, since headers are
@@ -19,13 +44,18 @@ ${HOSTCC} -imacros "${SYSROOT}/usr/include/linux/version.h" \
-x c -o "${EXEC}" - <<_EOF_
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
int main(int argc __attribute__((unused)),
char** argv __attribute__((unused)))
{
if((LINUX_VERSION_CODE & ~0xFF)
!= KERNEL_VERSION(${HDR_M},${HDR_m},0))
{
int l = LINUX_VERSION_CODE & ~0xFF;
int h = KERNEL_VERSION(${HDR_M},${HDR_m},0);
if ((l >= h) && !strcmp("${CHECK}", "loose"))
return 0;
if (l != h) {
printf("Incorrect selection of kernel headers: ");
printf("expected %d.%d.x, got %d.%d.x\n", ${HDR_M}, ${HDR_m},
((LINUX_VERSION_CODE>>16) & 0xFF),
@@ -37,6 +67,3 @@ int main(int argc __attribute__((unused)),
_EOF_
"${EXEC}"
ret=${?}
rm -f "${EXEC}"
exit ${ret}

View File

@@ -1,48 +0,0 @@
#!/usr/bin/env python
import sys
import argparse
from collections import defaultdict
warn = 'Warning: {0} file "{1}" is touched by more than one package: {2}\n'
def main():
parser = argparse.ArgumentParser()
parser.add_argument('packages_file_list', nargs='*',
help='The packages-file-list to check from')
parser.add_argument('-t', '--type', metavar="TYPE",
help='Report as a TYPE file (TYPE is either target, staging, or host)')
args = parser.parse_args()
if not len(args.packages_file_list) == 1:
sys.stderr.write('No packages-file-list was provided.\n')
return False
if args.type is None:
sys.stderr.write('No type was provided\n')
return False
file_to_pkg = defaultdict(list)
with open(args.packages_file_list[0], 'rb') as pkg_file_list:
for line in pkg_file_list.readlines():
pkg, _, file = line.rstrip(b'\n').partition(b',')
file_to_pkg[file].append(pkg)
for file in file_to_pkg:
if len(file_to_pkg[file]) > 1:
# If possible, try to decode the binary strings with
# the default user's locale
try:
sys.stderr.write(warn.format(args.type, file.decode(),
[p.decode() for p in file_to_pkg[file]]))
except UnicodeDecodeError:
# ... but fallback to just dumping them raw if they
# contain non-representable chars
sys.stderr.write(warn.format(args.type, file,
file_to_pkg[file]))
if __name__ == "__main__":
sys.exit(main())

View File

@@ -127,14 +127,29 @@ main() {
while read file ; do
# check if it's an ELF file
if ${PATCHELF} --print-rpath "${file}" > /dev/null 2>&1; then
# make files writable if necessary
changed=$(chmod -c u+w "${file}")
# call patchelf to sanitize the rpath
${PATCHELF} --make-rpath-relative "${rootdir}" ${sanitize_extra_args[@]} "${file}"
# restore the original permission
test "${changed}" != "" && chmod u-w "${file}"
rpath=$(${PATCHELF} --print-rpath "${file}" 2>&1)
if test $? -ne 0 ; then
continue
fi
# make files writable if necessary
changed=$(chmod -c u+w "${file}")
# With per-package directory support, most RPATH of host
# binaries will point to per-package directories. This won't
# work with the --make-rpath-relative ${rootdir} invocation as
# the per-package host directory is not within ${rootdir}. So,
# we rewrite all RPATHs pointing to per-package directories so
# that they point to the global host directry.
changed_rpath=$(echo ${rpath} | sed "s@${PER_PACKAGE_DIR}/[^/]\+/host@${HOST_DIR}@")
if test "${rpath}" != "${changed_rpath}" ; then
${PATCHELF} --set-rpath ${changed_rpath} "${file}"
fi
# call patchelf to sanitize the rpath
${PATCHELF} --make-rpath-relative "${rootdir}" ${sanitize_extra_args[@]} "${file}"
# restore the original permission
test "${changed}" != "" && chmod u-w "${file}"
done < <(find "${rootdir}" ${find_args[@]})
# Restore patched patchelf utility

View File

@@ -30,10 +30,18 @@ done
[ -n "${GENIMAGE_CFG}" ] || die "Missing argument"
# Pass an empty rootpath. genimage makes a full copy of the given rootpath to
# ${GENIMAGE_TMP}/root so passing TARGET_DIR would be a waste of time and disk
# space. We don't rely on genimage to build the rootfs image, just to insert a
# pre-built one in the disk image.
trap 'rm -rf "${ROOTPATH_TMP}"' EXIT
ROOTPATH_TMP="$(mktemp -d)"
rm -rf "${GENIMAGE_TMP}"
genimage \
--rootpath "${TARGET_DIR}" \
--rootpath "${ROOTPATH_TMP}" \
--tmppath "${GENIMAGE_TMP}" \
--inputpath "${BINARIES_DIR}" \
--outputpath "${BINARIES_DIR}" \

View File

@@ -20,10 +20,10 @@
# configuration.
#
# Copyright (C) 2010-2013 Thomas Petazzoni <thomas.petazzoni@free-electrons.com>
# Copyright (C) 2019 Yann E. MORIN <yann.morin.1998@free.fr>
import logging
import sys
import subprocess
import argparse
from fnmatch import fnmatch
@@ -36,63 +36,6 @@ MODE_PKG = 2 # draw dependency graph for a given package
allpkgs = []
# Execute the "make show-targets" command to get the list of the main
# Buildroot PACKAGES and return it formatted as a Python list. This
# list is used as the starting point for full dependency graphs
def get_targets():
logging.info("Getting targets")
cmd = ["make", "-s", "--no-print-directory", "show-targets"]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, universal_newlines=True)
output = p.communicate()[0].strip()
if p.returncode != 0:
return None
if output == '':
return []
return output.split(' ')
# Recursive function that builds the tree of dependencies for a given
# list of packages. The dependencies are built in a list called
# 'dependencies', which contains tuples of the form (pkg1 ->
# pkg2_on_which_pkg1_depends, pkg3 -> pkg4_on_which_pkg3_depends) and
# the function finally returns this list.
def get_all_depends(pkgs, get_depends_func):
dependencies = []
# Filter the packages for which we already have the dependencies
filtered_pkgs = []
for pkg in pkgs:
if pkg in allpkgs:
continue
filtered_pkgs.append(pkg)
allpkgs.append(pkg)
if len(filtered_pkgs) == 0:
return []
depends = get_depends_func(filtered_pkgs)
deps = set()
for pkg in filtered_pkgs:
pkg_deps = depends[pkg]
# This package has no dependency.
if pkg_deps == []:
continue
# Add dependencies to the list of dependencies
for dep in pkg_deps:
dependencies.append((pkg, dep))
deps.add(dep)
if len(deps) != 0:
newdeps = get_all_depends(deps, get_depends_func)
if newdeps is not None:
dependencies += newdeps
return dependencies
# The Graphviz "dot" utility doesn't like dashes in node names. So for
# node names, we strip all dashes. Also, nodes can't start with a number,
# so we prepend an underscore.
@@ -172,7 +115,7 @@ def remove_transitive_deps(pkg, deps):
# List of dependencies that all/many packages have, and that we want
# to trim when generating the dependency graph.
MANDATORY_DEPS = ['toolchain', 'skeleton']
MANDATORY_DEPS = ['toolchain', 'skeleton', 'host-skeleton', 'host-tar', 'host-gzip', 'host-ccache']
# This function removes the dependency on some 'mandatory' package, like the
@@ -181,6 +124,12 @@ def remove_mandatory_deps(pkg, deps):
return [p for p in deps[pkg] if p not in MANDATORY_DEPS]
# This function returns all dependencies of pkg that are part of the
# mandatory dependencies:
def get_mandatory_deps(pkg, deps):
return [p for p in deps[pkg] if p in MANDATORY_DEPS]
# This function will check that there is no loop in the dependency chain
# As a side effect, it builds up the dependency cache.
def check_circular_deps(deps):
@@ -210,10 +159,17 @@ def check_circular_deps(deps):
# This functions trims down the dependency list of all packages.
# It applies in sequence all the dependency-elimination methods.
def remove_extra_deps(deps, rootpkg, transitive):
for pkg in list(deps.keys()):
if not pkg == rootpkg:
deps[pkg] = remove_mandatory_deps(pkg, deps)
def remove_extra_deps(deps, rootpkg, transitive, arrow_dir):
# For the direct dependencies, find and eliminate mandatory
# deps, and add them to the root package. Don't do it for a
# reverse graph, because mandatory deps are only direct deps.
if arrow_dir == "forward":
for pkg in list(deps.keys()):
if not pkg == rootpkg:
for d in get_mandatory_deps(pkg, deps):
if d not in deps[rootpkg]:
deps[rootpkg].append(d)
deps[pkg] = remove_mandatory_deps(pkg, deps)
for pkg in list(deps.keys()):
if not transitive or pkg == rootpkg:
deps[pkg] = remove_transitive_deps(pkg, deps)
@@ -221,7 +177,7 @@ def remove_extra_deps(deps, rootpkg, transitive):
# Print the attributes of a node: label and fill-color
def print_attrs(outfile, pkg, version, depth, colors):
def print_attrs(outfile, pkg, pkg_type, pkg_version, depth, colors):
name = pkg_node_name(pkg)
if pkg == 'all':
label = 'ALL'
@@ -230,13 +186,11 @@ def print_attrs(outfile, pkg, version, depth, colors):
if depth == 0:
color = colors[0]
else:
if pkg.startswith('host') \
or pkg.startswith('toolchain') \
or pkg.startswith('rootfs'):
if pkg_type == "host":
color = colors[2]
else:
color = colors[1]
if version == "virtual":
if pkg_version == "virtual":
outfile.write("%s [label = <<I>%s</I>>]\n" % (name, label))
else:
outfile.write("%s [label = \"%s\"]\n" % (name, label))
@@ -247,13 +201,13 @@ done_deps = []
# Print the dependency graph of a package
def print_pkg_deps(outfile, dict_deps, dict_version, stop_list, exclude_list,
def print_pkg_deps(outfile, dict_deps, dict_types, dict_versions, stop_list, exclude_list,
arrow_dir, draw_graph, depth, max_depth, pkg, colors):
if pkg in done_deps:
return
done_deps.append(pkg)
if draw_graph:
print_attrs(outfile, pkg, dict_version.get(pkg), depth, colors)
print_attrs(outfile, pkg, dict_types[pkg], dict_versions[pkg], depth, colors)
elif depth != 0:
outfile.write("%s " % pkg)
if pkg not in dict_deps:
@@ -261,17 +215,15 @@ def print_pkg_deps(outfile, dict_deps, dict_version, stop_list, exclude_list,
for p in stop_list:
if fnmatch(pkg, p):
return
if dict_version.get(pkg) == "virtual" and "virtual" in stop_list:
if dict_versions[pkg] == "virtual" and "virtual" in stop_list:
return
if pkg.startswith("host-") and "host" in stop_list:
if dict_types[pkg] == "host" and "host" in stop_list:
return
if max_depth == 0 or depth < max_depth:
for d in dict_deps[pkg]:
if dict_version.get(d) == "virtual" \
and "virtual" in exclude_list:
if dict_versions[d] == "virtual" and "virtual" in exclude_list:
continue
if d.startswith("host-") \
and "host" in exclude_list:
if dict_types[d] == "host" and "host" in exclude_list:
continue
add = True
for p in exclude_list:
@@ -281,7 +233,7 @@ def print_pkg_deps(outfile, dict_deps, dict_version, stop_list, exclude_list,
if add:
if draw_graph:
outfile.write("%s -> %s [dir=%s]\n" % (pkg_node_name(pkg), pkg_node_name(d), arrow_dir))
print_pkg_deps(outfile, dict_deps, dict_version, stop_list, exclude_list,
print_pkg_deps(outfile, dict_deps, dict_types, dict_versions, stop_list, exclude_list,
arrow_dir, draw_graph, depth + 1, max_depth, d, colors)
@@ -302,6 +254,8 @@ def parse_args():
"'host' to stop on host packages.")
parser.add_argument("--exclude", "-x", metavar="PACKAGE", dest="exclude_list", action="append",
help="Like --stop-on, but do not add PACKAGE to the graph.")
parser.add_argument("--exclude-mandatory", "-X", action="store_true",
help="Like if -x was passed for all mandatory dependencies.")
parser.add_argument("--colors", "-c", metavar="COLOR_LIST", dest="colors",
default="lightblue,grey,gainsboro",
help="Comma-separated list of the three colors to use" +
@@ -341,6 +295,7 @@ def main():
if args.package is None:
mode = MODE_FULL
rootpkg = 'all'
else:
mode = MODE_PKG
rootpkg = args.package
@@ -355,14 +310,15 @@ def main():
else:
exclude_list = args.exclude_list
if args.exclude_mandatory:
exclude_list += MANDATORY_DEPS
if args.direct:
get_depends_func = brpkgutil.get_depends
arrow_dir = "forward"
else:
if mode == MODE_FULL:
logging.error("--reverse needs a package")
sys.exit(1)
get_depends_func = brpkgutil.get_rdepends
arrow_dir = "back"
draw_graph = not args.flat_list
@@ -375,46 +331,20 @@ def main():
logging.error("Error: incorrect color list '%s'" % args.colors)
sys.exit(1)
# In full mode, start with the result of get_targets() to get the main
# targets and then use get_all_depends() for all targets
if mode == MODE_FULL:
targets = get_targets()
dependencies = []
allpkgs.append('all')
filtered_targets = []
for tg in targets:
dependencies.append(('all', tg))
filtered_targets.append(tg)
deps = get_all_depends(filtered_targets, get_depends_func)
if deps is not None:
dependencies += deps
rootpkg = 'all'
# In pkg mode, start directly with get_all_depends() on the requested
# package
elif mode == MODE_PKG:
dependencies = get_all_depends([rootpkg], get_depends_func)
# Make the dependencies a dictionnary { 'pkg':[dep1, dep2, ...] }
dict_deps = {}
for dep in dependencies:
if dep[0] not in dict_deps:
dict_deps[dep[0]] = []
dict_deps[dep[0]].append(dep[1])
deps, rdeps, dict_types, dict_versions = brpkgutil.get_dependency_tree()
dict_deps = deps if args.direct else rdeps
check_circular_deps(dict_deps)
if check_only:
sys.exit(0)
dict_deps = remove_extra_deps(dict_deps, rootpkg, args.transitive)
dict_version = brpkgutil.get_version([pkg for pkg in allpkgs
if pkg != "all" and not pkg.startswith("root")])
dict_deps = remove_extra_deps(dict_deps, rootpkg, args.transitive, arrow_dir)
# Start printing the graph data
if draw_graph:
outfile.write("digraph G {\n")
print_pkg_deps(outfile, dict_deps, dict_version, stop_list, exclude_list,
print_pkg_deps(outfile, dict_deps, dict_types, dict_versions, stop_list, exclude_list,
arrow_dir, draw_graph, 0, args.depth, rootpkg, colors)
if draw_graph:

View File

@@ -23,14 +23,21 @@ import os
from collections import defaultdict
import re
import subprocess
import sys
import requests # URL checking
import json
import ijson
import certifi
import distutils.version
import time
import gzip
from urllib3 import HTTPSConnectionPool
from urllib3.exceptions import HTTPError
from multiprocessing import Pool
NVD_START_YEAR = 2002
NVD_JSON_VERSION = "1.0"
NVD_BASE_URL = "https://nvd.nist.gov/feeds/json/cve/" + NVD_JSON_VERSION
INFRA_RE = re.compile(r"\$\(eval \$\(([a-z-]*)-package\)\)")
URL_RE = re.compile(r"\s*https?://\S*\s*$")
@@ -39,11 +46,16 @@ RM_API_STATUS_FOUND_BY_DISTRO = 2
RM_API_STATUS_FOUND_BY_PATTERN = 3
RM_API_STATUS_NOT_FOUND = 4
# Used to make multiple requests to the same host. It is global
# because it's used by sub-processes.
http_pool = None
class Package:
all_licenses = list()
all_license_files = list()
all_versions = dict()
all_ignored_cves = dict()
def __init__(self, name, path):
self.name = name
@@ -58,6 +70,7 @@ class Package:
self.url = None
self.url_status = None
self.url_worker = None
self.cves = list()
self.latest_version = (RM_API_STATUS_ERROR, None, None)
def pkgvar(self):
@@ -144,11 +157,17 @@ class Package:
o = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[1]
lines = o.splitlines()
for line in lines:
m = re.match("^([0-9]*) warnings generated", line)
m = re.match("^([0-9]*) warnings generated", line.decode())
if m:
self.warnings = int(m.group(1))
return
def is_cve_ignored(self, cve):
"""
Tells if the CVE is ignored by the package
"""
return cve in self.all_ignored_cves.get(self.pkgvar(), [])
def __eq__(self, other):
return self.path == other.path
@@ -160,6 +179,113 @@ class Package:
(self.name, self.path, self.has_license, self.has_license_files, self.has_hash, self.patch_count)
class CVE:
"""An accessor class for CVE Items in NVD files"""
def __init__(self, nvd_cve):
"""Initialize a CVE from its NVD JSON representation"""
self.nvd_cve = nvd_cve
@staticmethod
def download_nvd_year(nvd_path, year):
metaf = "nvdcve-%s-%s.meta" % (NVD_JSON_VERSION, year)
path_metaf = os.path.join(nvd_path, metaf)
jsonf_gz = "nvdcve-%s-%s.json.gz" % (NVD_JSON_VERSION, year)
path_jsonf_gz = os.path.join(nvd_path, jsonf_gz)
# If the database file is less than a day old, we assume the NVD data
# locally available is recent enough.
if os.path.exists(path_jsonf_gz) and os.stat(path_jsonf_gz).st_mtime >= time.time() - 86400:
return path_jsonf_gz
# If not, we download the meta file
url = "%s/%s" % (NVD_BASE_URL, metaf)
print("Getting %s" % url)
page_meta = requests.get(url)
page_meta.raise_for_status()
# If the meta file already existed, we compare the existing
# one with the data newly downloaded. If they are different,
# we need to re-download the database.
# If the database does not exist locally, we need to redownload it in
# any case.
if os.path.exists(path_metaf) and os.path.exists(path_jsonf_gz):
meta_known = open(path_metaf, "r").read()
if page_meta.text == meta_known:
return path_jsonf_gz
# Grab the compressed JSON NVD, and write files to disk
url = "%s/%s" % (NVD_BASE_URL, jsonf_gz)
print("Getting %s" % url)
page_json = requests.get(url)
page_json.raise_for_status()
open(path_jsonf_gz, "wb").write(page_json.content)
open(path_metaf, "w").write(page_meta.text)
return path_jsonf_gz
@classmethod
def read_nvd_dir(cls, nvd_dir):
"""
Iterate over all the CVEs contained in NIST Vulnerability Database
feeds since NVD_START_YEAR. If the files are missing or outdated in
nvd_dir, a fresh copy will be downloaded, and kept in .json.gz
"""
for year in range(NVD_START_YEAR, datetime.datetime.now().year + 1):
filename = CVE.download_nvd_year(nvd_dir, year)
try:
content = ijson.items(gzip.GzipFile(filename), 'CVE_Items.item')
except:
print("ERROR: cannot read %s. Please remove the file then rerun this script" % filename)
raise
for cve in content:
yield cls(cve['cve'])
def each_product(self):
"""Iterate over each product section of this cve"""
for vendor in self.nvd_cve['affects']['vendor']['vendor_data']:
for product in vendor['product']['product_data']:
yield product
@property
def identifier(self):
"""The CVE unique identifier"""
return self.nvd_cve['CVE_data_meta']['ID']
@property
def pkg_names(self):
"""The set of package names referred by this CVE definition"""
return set(p['product_name'] for p in self.each_product())
def affects(self, br_pkg):
"""
True if the Buildroot Package object passed as argument is affected
by this CVE.
"""
if br_pkg.is_cve_ignored(self.identifier):
return False
for product in self.each_product():
if product['product_name'] != br_pkg.name:
continue
for v in product['version']['version_data']:
if v["version_affected"] == "=":
if br_pkg.current_version == v["version_value"]:
return True
elif v["version_affected"] == "<=":
pkg_version = distutils.version.LooseVersion(br_pkg.current_version)
if not hasattr(pkg_version, "version"):
print("Cannot parse package '%s' version '%s'" % (br_pkg.name, br_pkg.current_version))
continue
cve_affected_version = distutils.version.LooseVersion(v["version_value"])
if not hasattr(cve_affected_version, "version"):
print("Cannot parse CVE affected version '%s'" % v["version_value"])
continue
return pkg_version <= cve_affected_version
else:
print("version_affected: %s" % v['version_affected'])
return False
def get_pkglist(npackages, package_list):
"""
Builds the list of Buildroot packages, returning a list of Package
@@ -222,70 +348,45 @@ def get_pkglist(npackages, package_list):
def package_init_make_info():
# Licenses
o = subprocess.check_output(["make", "BR2_HAVE_DOT_CONFIG=y",
"-s", "printvars", "VARS=%_LICENSE"])
for l in o.splitlines():
# Get variable name and value
pkgvar, value = l.split("=")
# If present, strip HOST_ from variable name
if pkgvar.startswith("HOST_"):
pkgvar = pkgvar[5:]
# Strip _LICENSE
pkgvar = pkgvar[:-8]
# If value is "unknown", no license details available
if value == "unknown":
continue
Package.all_licenses.append(pkgvar)
# License files
o = subprocess.check_output(["make", "BR2_HAVE_DOT_CONFIG=y",
"-s", "printvars", "VARS=%_LICENSE_FILES"])
for l in o.splitlines():
# Get variable name and value
pkgvar, value = l.split("=")
# If present, strip HOST_ from variable name
if pkgvar.startswith("HOST_"):
pkgvar = pkgvar[5:]
if pkgvar.endswith("_MANIFEST_LICENSE_FILES"):
continue
# Strip _LICENSE_FILES
pkgvar = pkgvar[:-14]
Package.all_license_files.append(pkgvar)
# Version
o = subprocess.check_output(["make", "BR2_HAVE_DOT_CONFIG=y",
"-s", "printvars", "VARS=%_VERSION"])
# Fetch all variables at once
variables = subprocess.check_output(["make", "BR2_HAVE_DOT_CONFIG=y", "-s", "printvars",
"VARS=%_LICENSE %_LICENSE_FILES %_VERSION %_IGNORE_CVES"])
variable_list = variables.decode().splitlines()
# We process first the host package VERSION, and then the target
# package VERSION. This means that if a package exists in both
# target and host variants, with different version numbers
# (unlikely), we'll report the target version number.
version_list = o.splitlines()
version_list = [x for x in version_list if x.startswith("HOST_")] + \
[x for x in version_list if not x.startswith("HOST_")]
for l in version_list:
# target and host variants, with different values (eg. version
# numbers (unlikely)), we'll report the target one.
variable_list = [x[5:] for x in variable_list if x.startswith("HOST_")] + \
[x for x in variable_list if not x.startswith("HOST_")]
for l in variable_list:
# Get variable name and value
pkgvar, value = l.split("=")
# If present, strip HOST_ from variable name
if pkgvar.startswith("HOST_"):
pkgvar = pkgvar[5:]
# Strip the suffix according to the variable
if pkgvar.endswith("_LICENSE"):
# If value is "unknown", no license details available
if value == "unknown":
continue
pkgvar = pkgvar[:-8]
Package.all_licenses.append(pkgvar)
if pkgvar.endswith("_DL_VERSION"):
continue
elif pkgvar.endswith("_LICENSE_FILES"):
if pkgvar.endswith("_MANIFEST_LICENSE_FILES"):
continue
pkgvar = pkgvar[:-14]
Package.all_license_files.append(pkgvar)
# Strip _VERSION
pkgvar = pkgvar[:-8]
elif pkgvar.endswith("_VERSION"):
if pkgvar.endswith("_DL_VERSION"):
continue
pkgvar = pkgvar[:-8]
Package.all_versions[pkgvar] = value
Package.all_versions[pkgvar] = value
elif pkgvar.endswith("_IGNORE_CVES"):
pkgvar = pkgvar[:-12]
Package.all_ignored_cves[pkgvar] = value.split()
def check_url_status_worker(url, url_status):
@@ -301,11 +402,13 @@ def check_url_status_worker(url, url_status):
def check_package_urls(packages):
Package.pool = Pool(processes=64)
pool = Pool(processes=64)
for pkg in packages:
pkg.url_worker = pkg.pool.apply_async(check_url_status_worker, (pkg.url, pkg.url_status))
pkg.url_worker = pool.apply_async(check_url_status_worker, (pkg.url, pkg.url_status))
for pkg in packages:
pkg.url_status = pkg.url_worker.get(timeout=3600)
del pkg.url_worker
pool.terminate()
def release_monitoring_get_latest_version_by_distro(pool, name):
@@ -346,6 +449,15 @@ def release_monitoring_get_latest_version_by_guess(pool, name):
return (RM_API_STATUS_NOT_FOUND, None, None)
def check_package_latest_version_worker(name):
"""Wrapper to try both by name then by guess"""
print(name)
res = release_monitoring_get_latest_version_by_distro(http_pool, name)
if res[0] == RM_API_STATUS_NOT_FOUND:
res = release_monitoring_get_latest_version_by_guess(http_pool, name)
return res
def check_package_latest_version(packages):
"""
Fills in the .latest_version field of all Package objects
@@ -361,18 +473,26 @@ def check_package_latest_version(packages):
- id: string containing the id of the project corresponding to this
package, as known by release-monitoring.org
"""
pool = HTTPSConnectionPool('release-monitoring.org', port=443,
cert_reqs='CERT_REQUIRED', ca_certs=certifi.where(),
timeout=30)
count = 0
for pkg in packages:
v = release_monitoring_get_latest_version_by_distro(pool, pkg.name)
if v[0] == RM_API_STATUS_NOT_FOUND:
v = release_monitoring_get_latest_version_by_guess(pool, pkg.name)
global http_pool
http_pool = HTTPSConnectionPool('release-monitoring.org', port=443,
cert_reqs='CERT_REQUIRED', ca_certs=certifi.where(),
timeout=30)
worker_pool = Pool(processes=64)
results = worker_pool.map(check_package_latest_version_worker, (pkg.name for pkg in packages))
for pkg, r in zip(packages, results):
pkg.latest_version = r
worker_pool.terminate()
del http_pool
pkg.latest_version = v
print("[%d/%d] Package %s" % (count, len(packages), pkg.name))
count += 1
def check_package_cves(nvd_path, packages):
if not os.path.isdir(nvd_path):
os.makedirs(nvd_path)
for cve in CVE.read_nvd_dir(nvd_path):
for pkg_name in cve.pkg_names:
if pkg_name in packages and cve.affects(packages[pkg_name]):
packages[pkg_name].cves.append(cve.identifier)
def calculate_stats(packages):
@@ -410,6 +530,9 @@ def calculate_stats(packages):
else:
stats["version-not-uptodate"] += 1
stats["patches"] += pkg.patch_count
stats["total-cves"] += len(pkg.cves)
if len(pkg.cves) != 0:
stats["pkg-cves"] += 1
return stats
@@ -621,6 +744,17 @@ def dump_html_pkg(f, pkg):
f.write(" <td class=\"%s\">%s</td>\n" %
(" ".join(td_class), url_str))
# CVEs
td_class = ["centered"]
if len(pkg.cves) == 0:
td_class.append("correct")
else:
td_class.append("wrong")
f.write(" <td class=\"%s\">\n" % " ".join(td_class))
for cve in pkg.cves:
f.write(" <a href=\"https://security-tracker.debian.org/tracker/%s\">%s<br/>\n" % (cve, cve))
f.write(" </td>\n")
f.write(" </tr>\n")
@@ -638,6 +772,7 @@ def dump_html_all_pkgs(f, packages):
<td class=\"centered\">Latest version</td>
<td class=\"centered\">Warnings</td>
<td class=\"centered\">Upstream URL</td>
<td class=\"centered\">CVEs</td>
</tr>
""")
for pkg in sorted(packages):
@@ -676,46 +811,85 @@ def dump_html_stats(f, stats):
stats["version-not-uptodate"])
f.write("<tr><td>Packages with no known upstream version</td><td>%s</td></tr>\n" %
stats["version-unknown"])
f.write("<tr><td>Packages affected by CVEs</td><td>%s</td></tr>\n" %
stats["pkg-cves"])
f.write("<tr><td>Total number of CVEs affecting all packages</td><td>%s</td></tr>\n" %
stats["total-cves"])
f.write("</table>\n")
def dump_gen_info(f):
def dump_html_gen_info(f, date, commit):
# Updated on Mon Feb 19 08:12:08 CET 2018, Git commit aa77030b8f5e41f1c53eb1c1ad664b8c814ba032
o = subprocess.check_output(["git", "log", "master", "-n", "1", "--pretty=format:%H"])
git_commit = o.splitlines()[0]
f.write("<p><i>Updated on %s, git commit %s</i></p>\n" %
(str(datetime.datetime.utcnow()), git_commit))
f.write("<p><i>Updated on %s, git commit %s</i></p>\n" % (str(date), commit))
def dump_html(packages, stats, output):
def dump_html(packages, stats, date, commit, output):
with open(output, 'w') as f:
f.write(html_header)
dump_html_all_pkgs(f, packages)
dump_html_stats(f, stats)
dump_gen_info(f)
dump_html_gen_info(f, date, commit)
f.write(html_footer)
def dump_json(packages, stats, date, commit, output):
# Format packages as a dictionnary instead of a list
# Exclude local field that does not contains real date
excluded_fields = ['url_worker', 'name']
pkgs = {
pkg.name: {
k: v
for k, v in pkg.__dict__.items()
if k not in excluded_fields
} for pkg in packages
}
# Aggregate infrastructures into a single dict entry
statistics = {
k: v
for k, v in stats.items()
if not k.startswith('infra-')
}
statistics['infra'] = {k[6:]: v for k, v in stats.items() if k.startswith('infra-')}
# The actual structure to dump, add commit and date to it
final = {'packages': pkgs,
'stats': statistics,
'commit': commit,
'date': str(date)}
with open(output, 'w') as f:
json.dump(final, f, indent=2, separators=(',', ': '))
f.write('\n')
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-o', dest='output', action='store', required=True,
output = parser.add_argument_group('output', 'Output file(s)')
output.add_argument('--html', dest='html', action='store',
help='HTML output file')
parser.add_argument('-n', dest='npackages', type=int, action='store',
help='Number of packages')
parser.add_argument('-p', dest='packages', action='store',
help='List of packages (comma separated)')
return parser.parse_args()
output.add_argument('--json', dest='json', action='store',
help='JSON output file')
packages = parser.add_mutually_exclusive_group()
packages.add_argument('-n', dest='npackages', type=int, action='store',
help='Number of packages')
packages.add_argument('-p', dest='packages', action='store',
help='List of packages (comma separated)')
parser.add_argument('--nvd-path', dest='nvd_path',
help='Path to the local NVD database')
args = parser.parse_args()
if not args.html and not args.json:
parser.error('at least one of --html or --json (or both) is required')
return args
def __main__():
args = parse_args()
if args.npackages and args.packages:
print("ERROR: -n and -p are mutually exclusive")
sys.exit(1)
if args.packages:
package_list = args.packages.split(",")
else:
package_list = None
date = datetime.datetime.utcnow()
commit = subprocess.check_output(['git', 'rev-parse',
'HEAD']).splitlines()[0].decode()
print("Build package list ...")
packages = get_pkglist(args.npackages, package_list)
print("Getting package make info ...")
@@ -733,10 +907,17 @@ def __main__():
check_package_urls(packages)
print("Getting latest versions ...")
check_package_latest_version(packages)
if args.nvd_path:
print("Checking packages CVEs")
check_package_cves(args.nvd_path, {p.name: p for p in packages})
print("Calculate stats")
stats = calculate_stats(packages)
print("Write HTML")
dump_html(packages, stats, args.output)
if args.html:
print("Write HTML")
dump_html(packages, stats, date, commit, args.html)
if args.json:
print("Write JSON")
dump_json(packages, stats, date, commit, args.json)
__main__()

View File

@@ -22,6 +22,7 @@ import os.path
import argparse
import csv
import collections
import math
try:
import matplotlib
@@ -32,8 +33,13 @@ except ImportError:
sys.stderr.write("You need python-matplotlib to generate the size graph\n")
exit(1)
colors = ['#e60004', '#009836', '#2e1d86', '#ffed00',
'#0068b5', '#f28e00', '#940084', '#97c000']
class Config:
biggest_first = False
iec = False
size_limit = 0.01
colors = ['#e60004', '#f28e00', '#ffed00', '#940084',
'#2e1d86', '#0068b5', '#009836', '#97c000']
#
@@ -66,8 +72,8 @@ def add_file(filesdict, relpath, abspath, pkg):
#
def build_package_dict(builddir):
filesdict = {}
with open(os.path.join(builddir, "build", "packages-file-list.txt")) as filelistf:
for l in filelistf.readlines():
with open(os.path.join(builddir, "build", "packages-file-list.txt")) as f:
for l in f.readlines():
pkg, fpath = l.split(",", 1)
# remove the initial './' in each file path
fpath = fpath.strip()[2:]
@@ -127,23 +133,46 @@ def build_package_size(filesdict, builddir):
# outputf: output file for the graph
#
def draw_graph(pkgsize, outputf):
def size2string(sz):
if Config.iec:
divider = 1024.0
prefixes = ['', 'Ki', 'Mi', 'Gi', 'Ti']
else:
divider = 1000.0
prefixes = ['', 'k', 'M', 'G', 'T']
while sz > divider and len(prefixes) > 1:
prefixes = prefixes[1:]
sz = sz/divider
# precision is made so that there are always at least three meaningful
# digits displayed (e.g. '3.14' and '10.4', not just '3' and '10')
precision = int(2-math.floor(math.log10(sz))) if sz < 1000 else 0
return '{:.{prec}f} {}B'.format(sz, prefixes[0], prec=precision)
total = sum(pkgsize.values())
labels = []
values = []
other_value = 0
for (p, sz) in sorted(pkgsize.items(), key=lambda x: x[1]):
if sz < (total * 0.01):
unknown_value = 0
for (p, sz) in sorted(pkgsize.items(), key=lambda x: x[1],
reverse=Config.biggest_first):
if sz < (total * Config.size_limit):
other_value += sz
elif p == "unknown":
unknown_value = sz
else:
labels.append("%s (%d kB)" % (p, sz / 1000.))
labels.append("%s (%s)" % (p, size2string(sz)))
values.append(sz)
labels.append("Other (%d kB)" % (other_value / 1000.))
values.append(other_value)
if unknown_value != 0:
labels.append("Unknown (%s)" % (size2string(unknown_value)))
values.append(unknown_value)
if other_value != 0:
labels.append("Other (%s)" % (size2string(other_value)))
values.append(other_value)
plt.figure()
patches, texts, autotexts = plt.pie(values, labels=labels,
autopct='%1.1f%%', shadow=True,
colors=colors)
colors=Config.colors)
# Reduce text size
proptease = fm.FontProperties()
proptease.set_size('xx-small')
@@ -151,7 +180,8 @@ def draw_graph(pkgsize, outputf):
plt.setp(texts, fontproperties=proptease)
plt.suptitle("Filesystem size per package", fontsize=18, y=.97)
plt.title("Total filesystem size: %d kB" % (total / 1000.), fontsize=10, y=.96)
plt.title("Total filesystem size: %s" % (size2string(total)), fontsize=10,
y=.96)
plt.savefig(outputf)
@@ -209,32 +239,70 @@ def gen_packages_csv(pkgsizes, outputf):
total = sum(pkgsizes.values())
with open(outputf, 'w') as csvfile:
wr = csv.writer(csvfile, delimiter=',', quoting=csv.QUOTE_MINIMAL)
wr.writerow(["Package name", "Package size", "Package size in system (%)"])
wr.writerow(["Package name", "Package size",
"Package size in system (%)"])
for (pkg, size) in pkgsizes.items():
wr.writerow([pkg, size, "%.1f" % (float(size) / total * 100)])
parser = argparse.ArgumentParser(description='Draw size statistics graphs')
#
# Our special action for --iec, --binary, --si, --decimal
#
class PrefixAction(argparse.Action):
def __init__(self, option_strings, dest, **kwargs):
for key in ["type", "nargs"]:
if key in kwargs:
raise ValueError('"{}" not allowed'.format(key))
super(PrefixAction, self).__init__(option_strings, dest, nargs=0,
type=bool, **kwargs)
parser.add_argument("--builddir", '-i', metavar="BUILDDIR", required=True,
help="Buildroot output directory")
parser.add_argument("--graph", '-g', metavar="GRAPH",
help="Graph output file (.pdf or .png extension)")
parser.add_argument("--file-size-csv", '-f', metavar="FILE_SIZE_CSV",
help="CSV output file with file size statistics")
parser.add_argument("--package-size-csv", '-p', metavar="PKG_SIZE_CSV",
help="CSV output file with package size statistics")
args = parser.parse_args()
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, option_string in ["--iec", "--binary"])
# Find out which package installed what files
pkgdict = build_package_dict(args.builddir)
# Collect the size installed by each package
pkgsize = build_package_size(pkgdict, args.builddir)
def main():
parser = argparse.ArgumentParser(description='Draw size statistics graphs')
if args.graph:
draw_graph(pkgsize, args.graph)
if args.file_size_csv:
gen_files_csv(pkgdict, pkgsize, args.file_size_csv)
if args.package_size_csv:
gen_packages_csv(pkgsize, args.package_size_csv)
parser.add_argument("--builddir", '-i', metavar="BUILDDIR", required=True,
help="Buildroot output directory")
parser.add_argument("--graph", '-g', metavar="GRAPH",
help="Graph output file (.pdf or .png extension)")
parser.add_argument("--file-size-csv", '-f', metavar="FILE_SIZE_CSV",
help="CSV output file with file size statistics")
parser.add_argument("--package-size-csv", '-p', metavar="PKG_SIZE_CSV",
help="CSV output file with package size statistics")
parser.add_argument("--biggest-first", action='store_true',
help="Sort packages in decreasing size order, " +
"rather than in increasing size order")
parser.add_argument("--iec", "--binary", "--si", "--decimal",
action=PrefixAction,
help="Use IEC (binary, powers of 1024) or SI (decimal, "
"powers of 1000, the default) prefixes")
parser.add_argument("--size-limit", "-l", type=float,
help='Under this size ratio, files are accounted to ' +
'the generic "Other" package. Default: 0.01 (1%%)')
args = parser.parse_args()
Config.biggest_first = args.biggest_first
Config.iec = args.iec
if args.size_limit is not None:
if args.size_limit < 0.0 or args.size_limit > 1.0:
parser.error("--size-limit must be in [0.0..1.0]")
Config.size_limit = args.size_limit
# Find out which package installed what files
pkgdict = build_package_dict(args.builddir)
# Collect the size installed by each package
pkgsize = build_package_size(pkgdict, args.builddir)
if args.graph:
draw_graph(pkgsize, args.graph)
if args.file_size_csv:
gen_files_csv(pkgdict, pkgsize, args.file_size_csv)
if args.package_size_csv:
gen_packages_csv(pkgsize, args.package_size_csv)
if __name__ == "__main__":
main()