Update buildroot to 2020.02.7 (#923)

Signed-off-by: Pascal Vizeli <pvizeli@syshack.ch>
This commit is contained in:
Pascal Vizeli
2020-10-22 17:05:36 +02:00
committed by GitHub
parent fdcb94f0d8
commit dcfb296dcf
332 changed files with 10767 additions and 1806 deletions

View File

@@ -119,7 +119,7 @@ function apply_patch {
exit 1
fi
echo "${path}/${patch}" >> ${builddir}/.applied_patches_list
${uncomp} "${path}/$patch" | patch -g0 -p1 -E -d "${builddir}" -t -N $silent
${uncomp} "${path}/$patch" | patch -g0 -p1 -E --no-backup-if-mismatch -d "${builddir}" -t -N $silent
if [ $? != 0 ] ; then
echo "Patch failed! Please fix ${patch}!"
exit 1
@@ -168,6 +168,3 @@ if [ "`find $builddir/ '(' -name '*.rej' -o -name '.*.rej' ')' -print`" ] ; then
echo "Aborting. Reject files found."
exit 1
fi
# Remove backup files
find $builddir/ '(' -name '*.orig' -o -name '.*.orig' ')' -exec rm -f {} \;

View File

@@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
# Copyright (C) 2009 by Thomas Petazzoni <thomas.petazzoni@free-electrons.com>
#
@@ -16,23 +16,25 @@
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import aiohttp
import argparse
import asyncio
import datetime
import fnmatch
import os
from collections import defaultdict
import re
import subprocess
import requests # URL checking
import requests # NVD database download
import json
import ijson
import certifi
import distutils.version
import time
import gzip
from urllib3 import HTTPSConnectionPool
from urllib3.exceptions import HTTPError
from multiprocessing import Pool
import sys
sys.path.append('utils/')
from getdeveloperlib import parse_developers # noqa: E402
NVD_START_YEAR = 2002
NVD_JSON_VERSION = "1.0"
@@ -46,32 +48,68 @@ RM_API_STATUS_FOUND_BY_DISTRO = 2
RM_API_STATUS_FOUND_BY_PATTERN = 3
RM_API_STATUS_NOT_FOUND = 4
# Used to make multiple requests to the same host. It is global
# because it's used by sub-processes.
http_pool = None
CVE_AFFECTS = 1
CVE_DOESNT_AFFECT = 2
CVE_UNKNOWN = 3
class Defconfig:
def __init__(self, name, path):
self.name = name
self.path = path
self.developers = None
def set_developers(self, developers):
"""
Fills in the .developers field
"""
self.developers = [
developer.name
for developer in developers
if developer.hasfile(self.path)
]
def get_defconfig_list():
"""
Builds the list of Buildroot defconfigs, returning a list of Defconfig
objects.
"""
return [
Defconfig(name[:-len('_defconfig')], os.path.join('configs', name))
for name in os.listdir('configs')
if name.endswith('_defconfig')
]
class Package:
all_licenses = list()
all_licenses = dict()
all_license_files = list()
all_versions = dict()
all_ignored_cves = dict()
# This is the list of all possible checks. Add new checks to this list so
# a tool that post-processeds the json output knows the checks before
# iterating over the packages.
status_checks = ['cve', 'developers', 'hash', 'license',
'license-files', 'patches', 'pkg-check', 'url', 'version']
def __init__(self, name, path):
self.name = name
self.path = path
self.pkg_path = os.path.dirname(path)
self.infras = None
self.license = None
self.has_license = False
self.has_license_files = False
self.has_hash = False
self.patch_count = 0
self.patch_files = []
self.warnings = 0
self.current_version = None
self.url = None
self.url_status = None
self.url_worker = None
self.cves = list()
self.latest_version = (RM_API_STATUS_ERROR, None, None)
self.latest_version = {'status': RM_API_STATUS_ERROR, 'version': None, 'id': None}
self.status = {}
def pkgvar(self):
return self.name.upper().replace("-", "_")
@@ -80,19 +118,32 @@ class Package:
"""
Fills in the .url field
"""
self.url_status = "No Config.in"
self.status['url'] = ("warning", "no Config.in")
for filename in os.listdir(os.path.dirname(self.path)):
if fnmatch.fnmatch(filename, 'Config.*'):
fp = open(os.path.join(os.path.dirname(self.path), filename), "r")
for config_line in fp:
if URL_RE.match(config_line):
self.url = config_line.strip()
self.url_status = "Found"
self.status['url'] = ("ok", "found")
fp.close()
return
self.url_status = "Missing"
self.status['url'] = ("error", "missing")
fp.close()
@property
def patch_count(self):
return len(self.patch_files)
@property
def has_valid_infra(self):
try:
if self.infras[0][1] == 'virtual':
return False
except IndexError:
return False
return True
def set_infra(self):
"""
Fills in the .infras field
@@ -112,29 +163,55 @@ class Package:
def set_license(self):
"""
Fills in the .has_license and .has_license_files fields
Fills in the .status['license'] and .status['license-files'] fields
"""
if not self.has_valid_infra:
self.status['license'] = ("na", "no valid package infra")
self.status['license-files'] = ("na", "no valid package infra")
return
var = self.pkgvar()
self.status['license'] = ("error", "missing")
self.status['license-files'] = ("error", "missing")
if var in self.all_licenses:
self.has_license = True
self.license = self.all_licenses[var]
self.status['license'] = ("ok", "found")
if var in self.all_license_files:
self.has_license_files = True
self.status['license-files'] = ("ok", "found")
def set_hash_info(self):
"""
Fills in the .has_hash field
Fills in the .status['hash'] field
"""
if not self.has_valid_infra:
self.status['hash'] = ("na", "no valid package infra")
self.status['hash-license'] = ("na", "no valid package infra")
return
hashpath = self.path.replace(".mk", ".hash")
self.has_hash = os.path.exists(hashpath)
if os.path.exists(hashpath):
self.status['hash'] = ("ok", "found")
else:
self.status['hash'] = ("error", "missing")
def set_patch_count(self):
"""
Fills in the .patch_count field
Fills in the .patch_count, .patch_files and .status['patches'] fields
"""
self.patch_count = 0
if not self.has_valid_infra:
self.status['patches'] = ("na", "no valid package infra")
return
pkgdir = os.path.dirname(self.path)
for subdir, _, _ in os.walk(pkgdir):
self.patch_count += len(fnmatch.filter(os.listdir(subdir), '*.patch'))
self.patch_files = fnmatch.filter(os.listdir(subdir), '*.patch')
if self.patch_count == 0:
self.status['patches'] = ("ok", "no patches")
elif self.patch_count < 5:
self.status['patches'] = ("warning", "some patches")
else:
self.status['patches'] = ("error", "lots of patches")
def set_current_version(self):
"""
@@ -146,10 +223,11 @@ class Package:
def set_check_package_warnings(self):
"""
Fills in the .warnings field
Fills in the .warnings and .status['pkg-check'] fields
"""
cmd = ["./utils/check-package"]
pkgdir = os.path.dirname(self.path)
self.status['pkg-check'] = ("error", "Missing")
for root, dirs, files in os.walk(pkgdir):
for f in files:
if f.endswith(".mk") or f.endswith(".hash") or f == "Config.in" or f == "Config.in.host":
@@ -160,6 +238,10 @@ class Package:
m = re.match("^([0-9]*) warnings generated", line.decode())
if m:
self.warnings = int(m.group(1))
if self.warnings == 0:
self.status['pkg-check'] = ("ok", "no warnings")
else:
self.status['pkg-check'] = ("error", "{} warnings".format(self.warnings))
return
def is_cve_ignored(self, cve):
@@ -168,6 +250,24 @@ class Package:
"""
return cve in self.all_ignored_cves.get(self.pkgvar(), [])
def set_developers(self, developers):
"""
Fills in the .developers and .status['developers'] field
"""
self.developers = [
dev.name
for dev in developers
if dev.hasfile(self.path)
]
if self.developers:
self.status['developers'] = ("ok", "{} developers".format(len(self.developers)))
else:
self.status['developers'] = ("warning", "no developers")
def is_status_ok(self, name):
return self.status[name][0] == 'ok'
def __eq__(self, other):
return self.path == other.path
@@ -176,7 +276,8 @@ class Package:
def __str__(self):
return "%s (path='%s', license='%s', license_files='%s', hash='%s', patches=%d)" % \
(self.name, self.path, self.has_license, self.has_license_files, self.has_hash, self.patch_count)
(self.name, self.path, self.is_status_ok('license'),
self.is_status_ok('license-files'), self.status['hash'], self.patch_count)
class CVE:
@@ -233,7 +334,7 @@ class CVE:
filename = CVE.download_nvd_year(nvd_dir, year)
try:
content = ijson.items(gzip.GzipFile(filename), 'CVE_Items.item')
except:
except: # noqa: E722
print("ERROR: cannot read %s. Please remove the file then rerun this script" % filename)
raise
for cve in content:
@@ -261,7 +362,7 @@ class CVE:
by this CVE.
"""
if br_pkg.is_cve_ignored(self.identifier):
return False
return CVE_DOESNT_AFFECT
for product in self.each_product():
if product['product_name'] != br_pkg.name:
@@ -270,7 +371,7 @@ class CVE:
for v in product['version']['version_data']:
if v["version_affected"] == "=":
if br_pkg.current_version == v["version_value"]:
return True
return CVE_AFFECTS
elif v["version_affected"] == "<=":
pkg_version = distutils.version.LooseVersion(br_pkg.current_version)
if not hasattr(pkg_version, "version"):
@@ -280,10 +381,17 @@ class CVE:
if not hasattr(cve_affected_version, "version"):
print("Cannot parse CVE affected version '%s'" % v["version_value"])
continue
return pkg_version <= cve_affected_version
try:
affected = pkg_version <= cve_affected_version
except TypeError:
return CVE_UNKNOWN
if affected:
return CVE_AFFECTS
else:
return CVE_DOESNT_AFFECT
else:
print("version_affected: %s" % v['version_affected'])
return False
return CVE_DOESNT_AFFECT
def get_pkglist(npackages, package_list):
@@ -370,7 +478,7 @@ def package_init_make_info():
if value == "unknown":
continue
pkgvar = pkgvar[:-8]
Package.all_licenses.append(pkgvar)
Package.all_licenses[pkgvar] = value
elif pkgvar.endswith("_LICENSE_FILES"):
if pkgvar.endswith("_MANIFEST_LICENSE_FILES"):
@@ -389,82 +497,140 @@ def package_init_make_info():
Package.all_ignored_cves[pkgvar] = value.split()
def check_url_status_worker(url, url_status):
if url_status != "Missing" and url_status != "No Config.in":
try:
url_status_code = requests.head(url, timeout=30).status_code
if url_status_code >= 400:
return "Invalid(%s)" % str(url_status_code)
except requests.exceptions.RequestException:
return "Invalid(Err)"
return "Ok"
return url_status
check_url_count = 0
def check_package_urls(packages):
pool = Pool(processes=64)
for pkg in packages:
pkg.url_worker = pool.apply_async(check_url_status_worker, (pkg.url, pkg.url_status))
for pkg in packages:
pkg.url_status = pkg.url_worker.get(timeout=3600)
del pkg.url_worker
pool.terminate()
async def check_url_status(session, pkg, npkgs, retry=True):
global check_url_count
def release_monitoring_get_latest_version_by_distro(pool, name):
try:
req = pool.request('GET', "/api/project/Buildroot/%s" % name)
except HTTPError:
return (RM_API_STATUS_ERROR, None, None)
async with session.get(pkg.url) as resp:
if resp.status >= 400:
pkg.status['url'] = ("error", "invalid {}".format(resp.status))
check_url_count += 1
print("[%04d/%04d] %s" % (check_url_count, npkgs, pkg.name))
return
except (aiohttp.ClientError, asyncio.TimeoutError):
if retry:
return await check_url_status(session, pkg, npkgs, retry=False)
else:
pkg.status['url'] = ("error", "invalid (err)")
check_url_count += 1
print("[%04d/%04d] %s" % (check_url_count, npkgs, pkg.name))
return
if req.status != 200:
return (RM_API_STATUS_NOT_FOUND, None, None)
pkg.status['url'] = ("ok", "valid")
check_url_count += 1
print("[%04d/%04d] %s" % (check_url_count, npkgs, pkg.name))
data = json.loads(req.data)
if 'version' in data:
return (RM_API_STATUS_FOUND_BY_DISTRO, data['version'], data['id'])
async def check_package_urls(packages):
tasks = []
connector = aiohttp.TCPConnector(limit_per_host=5)
async with aiohttp.ClientSession(connector=connector, trust_env=True) as sess:
packages = [p for p in packages if p.status['url'][0] == 'ok']
for pkg in packages:
tasks.append(check_url_status(sess, pkg, len(packages)))
await asyncio.wait(tasks)
def check_package_latest_version_set_status(pkg, status, version, identifier):
pkg.latest_version = {
"status": status,
"version": version,
"id": identifier,
}
if pkg.latest_version['status'] == RM_API_STATUS_ERROR:
pkg.status['version'] = ('warning', "Release Monitoring API error")
elif pkg.latest_version['status'] == RM_API_STATUS_NOT_FOUND:
pkg.status['version'] = ('warning', "Package not found on Release Monitoring")
if pkg.latest_version['version'] is None:
pkg.status['version'] = ('warning', "No upstream version available on Release Monitoring")
elif pkg.latest_version['version'] != pkg.current_version:
pkg.status['version'] = ('error', "The newer version {} is available upstream".format(pkg.latest_version['version']))
else:
return (RM_API_STATUS_FOUND_BY_DISTRO, None, data['id'])
pkg.status['version'] = ('ok', 'up-to-date')
def release_monitoring_get_latest_version_by_guess(pool, name):
async def check_package_get_latest_version_by_distro(session, pkg, retry=True):
url = "https://release-monitoring.org//api/project/Buildroot/%s" % pkg.name
try:
req = pool.request('GET', "/api/projects/?pattern=%s" % name)
except HTTPError:
return (RM_API_STATUS_ERROR, None, None)
async with session.get(url) as resp:
if resp.status != 200:
return False
if req.status != 200:
return (RM_API_STATUS_NOT_FOUND, None, None)
data = await resp.json()
version = data['version'] if 'version' in data else None
check_package_latest_version_set_status(pkg,
RM_API_STATUS_FOUND_BY_DISTRO,
version,
data['id'])
return True
data = json.loads(req.data)
projects = data['projects']
projects.sort(key=lambda x: x['id'])
for p in projects:
if p['name'] == name and 'version' in p:
return (RM_API_STATUS_FOUND_BY_PATTERN, p['version'], p['id'])
return (RM_API_STATUS_NOT_FOUND, None, None)
except (aiohttp.ClientError, asyncio.TimeoutError):
if retry:
return await check_package_get_latest_version_by_distro(session, pkg, retry=False)
else:
return False
def check_package_latest_version_worker(name):
"""Wrapper to try both by name then by guess"""
print(name)
res = release_monitoring_get_latest_version_by_distro(http_pool, name)
if res[0] == RM_API_STATUS_NOT_FOUND:
res = release_monitoring_get_latest_version_by_guess(http_pool, name)
return res
async def check_package_get_latest_version_by_guess(session, pkg, retry=True):
url = "https://release-monitoring.org/api/projects/?pattern=%s" % pkg.name
try:
async with session.get(url) as resp:
if resp.status != 200:
return False
data = await resp.json()
# filter projects that have the right name and a version defined
projects = [p for p in data['projects'] if p['name'] == pkg.name and 'version' in p]
projects.sort(key=lambda x: x['id'])
if len(projects) > 0:
check_package_latest_version_set_status(pkg,
RM_API_STATUS_FOUND_BY_DISTRO,
projects[0]['version'],
projects[0]['id'])
return True
except (aiohttp.ClientError, asyncio.TimeoutError):
if retry:
return await check_package_get_latest_version_by_guess(session, pkg, retry=False)
else:
return False
def check_package_latest_version(packages):
check_latest_count = 0
async def check_package_latest_version_get(session, pkg, npkgs):
global check_latest_count
if await check_package_get_latest_version_by_distro(session, pkg):
check_latest_count += 1
print("[%04d/%04d] %s" % (check_latest_count, npkgs, pkg.name))
return
if await check_package_get_latest_version_by_guess(session, pkg):
check_latest_count += 1
print("[%04d/%04d] %s" % (check_latest_count, npkgs, pkg.name))
return
check_package_latest_version_set_status(pkg,
RM_API_STATUS_NOT_FOUND,
None, None)
check_latest_count += 1
print("[%04d/%04d] %s" % (check_latest_count, npkgs, pkg.name))
async def check_package_latest_version(packages):
"""
Fills in the .latest_version field of all Package objects
This field has a special format:
(status, version, id)
with:
This field is a dict and has the following keys:
- status: one of RM_API_STATUS_ERROR,
RM_API_STATUS_FOUND_BY_DISTRO, RM_API_STATUS_FOUND_BY_PATTERN,
RM_API_STATUS_NOT_FOUND
@@ -473,16 +639,17 @@ def check_package_latest_version(packages):
- id: string containing the id of the project corresponding to this
package, as known by release-monitoring.org
"""
global http_pool
http_pool = HTTPSConnectionPool('release-monitoring.org', port=443,
cert_reqs='CERT_REQUIRED', ca_certs=certifi.where(),
timeout=30)
worker_pool = Pool(processes=64)
results = worker_pool.map(check_package_latest_version_worker, (pkg.name for pkg in packages))
for pkg, r in zip(packages, results):
pkg.latest_version = r
worker_pool.terminate()
del http_pool
for pkg in [p for p in packages if not p.has_valid_infra]:
pkg.status['version'] = ("na", "no valid package infra")
tasks = []
connector = aiohttp.TCPConnector(limit_per_host=5)
async with aiohttp.ClientSession(connector=connector, trust_env=True) as sess:
packages = [p for p in packages if p.has_valid_infra]
for pkg in packages:
tasks.append(check_package_latest_version_get(sess, pkg, len(packages)))
await asyncio.wait(tasks)
def check_package_cves(nvd_path, packages):
@@ -491,12 +658,13 @@ def check_package_cves(nvd_path, packages):
for cve in CVE.read_nvd_dir(nvd_path):
for pkg_name in cve.pkg_names:
if pkg_name in packages and cve.affects(packages[pkg_name]):
if pkg_name in packages and cve.affects(packages[pkg_name]) == CVE_AFFECTS:
packages[pkg_name].cves.append(cve.identifier)
def calculate_stats(packages):
stats = defaultdict(int)
stats['packages'] = len(packages)
for pkg in packages:
# If packages have multiple infra, take the first one. For the
# vast majority of packages, the target and host infra are the
@@ -507,25 +675,25 @@ def calculate_stats(packages):
stats["infra-%s" % infra] += 1
else:
stats["infra-unknown"] += 1
if pkg.has_license:
if pkg.is_status_ok('license'):
stats["license"] += 1
else:
stats["no-license"] += 1
if pkg.has_license_files:
if pkg.is_status_ok('license-files'):
stats["license-files"] += 1
else:
stats["no-license-files"] += 1
if pkg.has_hash:
if pkg.is_status_ok('hash'):
stats["hash"] += 1
else:
stats["no-hash"] += 1
if pkg.latest_version[0] == RM_API_STATUS_FOUND_BY_DISTRO:
if pkg.latest_version['status'] == RM_API_STATUS_FOUND_BY_DISTRO:
stats["rmo-mapping"] += 1
else:
stats["rmo-no-mapping"] += 1
if not pkg.latest_version[1]:
if not pkg.latest_version['version']:
stats["version-unknown"] += 1
elif pkg.latest_version[1] == pkg.current_version:
elif pkg.latest_version['version'] == pkg.current_version:
stats["version-uptodate"] += 1
else:
stats["version-not-uptodate"] += 1
@@ -658,30 +826,30 @@ def dump_html_pkg(f, pkg):
# License
td_class = ["centered"]
if pkg.has_license:
if pkg.is_status_ok('license'):
td_class.append("correct")
else:
td_class.append("wrong")
f.write(" <td class=\"%s\">%s</td>\n" %
(" ".join(td_class), boolean_str(pkg.has_license)))
(" ".join(td_class), boolean_str(pkg.is_status_ok('license'))))
# License files
td_class = ["centered"]
if pkg.has_license_files:
if pkg.is_status_ok('license-files'):
td_class.append("correct")
else:
td_class.append("wrong")
f.write(" <td class=\"%s\">%s</td>\n" %
(" ".join(td_class), boolean_str(pkg.has_license_files)))
(" ".join(td_class), boolean_str(pkg.is_status_ok('license-files'))))
# Hash
td_class = ["centered"]
if pkg.has_hash:
if pkg.is_status_ok('hash'):
td_class.append("correct")
else:
td_class.append("wrong")
f.write(" <td class=\"%s\">%s</td>\n" %
(" ".join(td_class), boolean_str(pkg.has_hash)))
(" ".join(td_class), boolean_str(pkg.is_status_ok('hash'))))
# Current version
if len(pkg.current_version) > 20:
@@ -691,29 +859,29 @@ def dump_html_pkg(f, pkg):
f.write(" <td class=\"centered\">%s</td>\n" % current_version)
# Latest version
if pkg.latest_version[0] == RM_API_STATUS_ERROR:
if pkg.latest_version['status'] == RM_API_STATUS_ERROR:
td_class.append("version-error")
if pkg.latest_version[1] is None:
if pkg.latest_version['version'] is None:
td_class.append("version-unknown")
elif pkg.latest_version[1] != pkg.current_version:
elif pkg.latest_version['version'] != pkg.current_version:
td_class.append("version-needs-update")
else:
td_class.append("version-good")
if pkg.latest_version[0] == RM_API_STATUS_ERROR:
if pkg.latest_version['status'] == RM_API_STATUS_ERROR:
latest_version_text = "<b>Error</b>"
elif pkg.latest_version[0] == RM_API_STATUS_NOT_FOUND:
elif pkg.latest_version['status'] == RM_API_STATUS_NOT_FOUND:
latest_version_text = "<b>Not found</b>"
else:
if pkg.latest_version[1] is None:
if pkg.latest_version['version'] is None:
latest_version_text = "<b>Found, but no version</b>"
else:
latest_version_text = "<a href=\"https://release-monitoring.org/project/%s\"><b>%s</b></a>" % \
(pkg.latest_version[2], str(pkg.latest_version[1]))
(pkg.latest_version['id'], str(pkg.latest_version['version']))
latest_version_text += "<br/>"
if pkg.latest_version[0] == RM_API_STATUS_FOUND_BY_DISTRO:
if pkg.latest_version['status'] == RM_API_STATUS_FOUND_BY_DISTRO:
latest_version_text += "found by <a href=\"https://release-monitoring.org/distro/Buildroot/\">distro</a>"
else:
latest_version_text += "found by guess"
@@ -732,12 +900,12 @@ def dump_html_pkg(f, pkg):
# URL status
td_class = ["centered"]
url_str = pkg.url_status
if pkg.url_status == "Missing" or pkg.url_status == "No Config.in":
url_str = pkg.status['url'][1]
if pkg.status['url'][0] in ("error", "warning"):
td_class.append("missing_url")
elif pkg.url_status.startswith("Invalid"):
if pkg.status['url'][0] == "error":
td_class.append("invalid_url")
url_str = "<a href=%s>%s</a>" % (pkg.url, pkg.url_status)
url_str = "<a href=%s>%s</a>" % (pkg.url, pkg.status['url'][1])
else:
td_class.append("good_url")
url_str = "<a href=%s>Link</a>" % pkg.url
@@ -832,7 +1000,7 @@ def dump_html(packages, stats, date, commit, output):
f.write(html_footer)
def dump_json(packages, stats, date, commit, output):
def dump_json(packages, defconfigs, stats, date, commit, output):
# Format packages as a dictionnary instead of a list
# Exclude local field that does not contains real date
excluded_fields = ['url_worker', 'name']
@@ -843,6 +1011,12 @@ def dump_json(packages, stats, date, commit, output):
if k not in excluded_fields
} for pkg in packages
}
defconfigs = {
d.name: {
k: v
for k, v in d.__dict__.items()
} for d in defconfigs
}
# Aggregate infrastructures into a single dict entry
statistics = {
k: v
@@ -853,6 +1027,8 @@ def dump_json(packages, stats, date, commit, output):
# The actual structure to dump, add commit and date to it
final = {'packages': pkgs,
'stats': statistics,
'defconfigs': defconfigs,
'package_status_checks': Package.status_checks,
'commit': commit,
'date': str(date)}
@@ -861,12 +1037,16 @@ def dump_json(packages, stats, date, commit, output):
f.write('\n')
def resolvepath(path):
return os.path.abspath(os.path.expanduser(path))
def parse_args():
parser = argparse.ArgumentParser()
output = parser.add_argument_group('output', 'Output file(s)')
output.add_argument('--html', dest='html', action='store',
output.add_argument('--html', dest='html', type=resolvepath,
help='HTML output file')
output.add_argument('--json', dest='json', action='store',
output.add_argument('--json', dest='json', type=resolvepath,
help='JSON output file')
packages = parser.add_mutually_exclusive_group()
packages.add_argument('-n', dest='npackages', type=int, action='store',
@@ -874,7 +1054,7 @@ def parse_args():
packages.add_argument('-p', dest='packages', action='store',
help='List of packages (comma separated)')
parser.add_argument('--nvd-path', dest='nvd_path',
help='Path to the local NVD database')
help='Path to the local NVD database', type=resolvepath)
args = parser.parse_args()
if not args.html and not args.json:
parser.error('at least one of --html or --json (or both) is required')
@@ -892,6 +1072,12 @@ def __main__():
'HEAD']).splitlines()[0].decode()
print("Build package list ...")
packages = get_pkglist(args.npackages, package_list)
print("Getting developers ...")
developers = parse_developers()
print("Build defconfig list ...")
defconfigs = get_defconfig_list()
for d in defconfigs:
d.set_developers(developers)
print("Getting package make info ...")
package_init_make_info()
print("Getting package details ...")
@@ -903,10 +1089,13 @@ def __main__():
pkg.set_check_package_warnings()
pkg.set_current_version()
pkg.set_url()
pkg.set_developers(developers)
print("Checking URL status")
check_package_urls(packages)
loop = asyncio.get_event_loop()
loop.run_until_complete(check_package_urls(packages))
print("Getting latest versions ...")
check_package_latest_version(packages)
loop = asyncio.get_event_loop()
loop.run_until_complete(check_package_latest_version(packages))
if args.nvd_path:
print("Checking packages CVEs")
check_package_cves(args.nvd_path, {p.name: p for p in packages})
@@ -917,7 +1106,7 @@ def __main__():
dump_html(packages, stats, date, commit, args.html)
if args.json:
print("Write JSON")
dump_json(packages, stats, date, commit, args.json)
dump_json(packages, defconfigs, stats, date, commit, args.json)
__main__()

View File

@@ -1,69 +1,85 @@
#!/usr/bin/env python
'''Wrapper for python2 and python3 around compileall to raise exception
when a python byte code generation failed.
"""
Byte compile all .py files from provided directories. This script is an
alternative implementation of compileall.compile_dir written with
cross-compilation in mind.
"""
Inspired from:
http://stackoverflow.com/questions/615632/how-to-detect-errors-from-compileall-compile-dir
'''
from __future__ import print_function
import sys
import py_compile
import compileall
import argparse
import os
import py_compile
import re
import sys
def check_for_errors(comparison):
'''Wrap comparison operator with code checking for PyCompileError.
If PyCompileError was raised, re-raise it again to abort execution,
otherwise perform comparison as expected.
'''
def operator(self, other):
exc_type, value, traceback = sys.exc_info()
if exc_type is not None and issubclass(exc_type,
py_compile.PyCompileError):
print("Cannot compile %s" % value.file)
raise value
def compile_one(host_path, strip_root=None):
"""
Compile a .py file into a .pyc file located next to it.
return comparison(self, other)
:arg host_path:
Absolute path to the file to compile on the host running the build.
:arg strip_root:
Prefix to remove from the original source paths encoded in compiled
files.
"""
if os.path.islink(host_path) or not os.path.isfile(host_path):
return # only compile real files
return operator
if not re.match(r"^[_A-Za-z][_A-Za-z0-9]+\.py$",
os.path.basename(host_path)):
return # only compile "importable" python modules
if strip_root is not None:
# determine the runtime path of the file (i.e.: relative path to root
# dir prepended with "/").
runtime_path = os.path.join("/", os.path.relpath(host_path, strip_root))
else:
runtime_path = host_path
# will raise an error if the file cannot be compiled
py_compile.compile(host_path, cfile=host_path + "c",
dfile=runtime_path, doraise=True)
class ReportProblem(int):
'''Class that pretends to be an int() object but implements all of its
comparison operators such that it'd detect being called in
PyCompileError handling context and abort execution
'''
VALUE = 1
def __new__(cls, *args, **kwargs):
return int.__new__(cls, ReportProblem.VALUE, **kwargs)
@check_for_errors
def __lt__(self, other):
return ReportProblem.VALUE < other
@check_for_errors
def __eq__(self, other):
return ReportProblem.VALUE == other
def __ge__(self, other):
return not self < other
def __gt__(self, other):
return not self < other and not self == other
def __ne__(self, other):
return not self == other
def existing_dir_abs(arg):
"""
argparse type callback that checks that argument is a directory and returns
its absolute path.
"""
if not os.path.isdir(arg):
raise argparse.ArgumentTypeError('no such directory: {!r}'.format(arg))
return os.path.abspath(arg)
parser = argparse.ArgumentParser(description='Compile Python source files in a directory tree.')
parser.add_argument("target", metavar='DIRECTORY',
help='Directory to scan')
parser.add_argument("--force", action='store_true',
help="Force compilation even if alread compiled")
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("dirs", metavar="DIR", nargs="+", type=existing_dir_abs,
help="Directory to recursively scan and compile")
parser.add_argument("--strip-root", metavar="ROOT", type=existing_dir_abs,
help="""
Prefix to remove from the original source paths encoded
in compiled files
""")
args = parser.parse_args()
args = parser.parse_args()
compileall.compile_dir(args.target, force=args.force, quiet=ReportProblem())
try:
for d in args.dirs:
if args.strip_root and ".." in os.path.relpath(d, args.strip_root):
parser.error("DIR: not inside ROOT dir: {!r}".format(d))
for parent, _, files in os.walk(d):
for f in files:
compile_one(os.path.join(parent, f), args.strip_root)
except Exception as e:
print("error: {}".format(e))
return 1
return 0
if __name__ == "__main__":
sys.exit(main())

View File

@@ -19,19 +19,14 @@ cd "${1:-.}" || usage
# Check for git and a git repo.
if head=`git rev-parse --verify --short HEAD 2>/dev/null`; then
# If we are at a tagged commit (like "v2.6.30-rc6"), we ignore it,
# because this version is defined in the top level Makefile.
if [ -z "`git describe --exact-match 2>/dev/null`" ]; then
atag="`git describe 2>/dev/null`"
# If we are past a tagged commit (like "v2.6.30-rc5-302-g72357d5"),
# we pretty print it.
if atag="`git describe 2>/dev/null`"; then
echo "$atag" | awk -F- '{printf("-%05d-%s", $(NF-1),$(NF))}'
# If we don't have a tag at all we print -g{commitish}.
else
printf '%s%s' -g $head
fi
# Show -g<commit> if we have no tag, or just the tag
# otherwise.
if [ -z "${atag}" ] ; then
printf "%s%s" -g ${head}
else
printf ${atag}
fi
# Is this git on svn?
@@ -53,13 +48,29 @@ if head=`git rev-parse --verify --short HEAD 2>/dev/null`; then
fi
# Check for mercurial and a mercurial repo.
# In the git case, 'git describe' will show the latest tag, and unless we are
# exactly on that tag, the number of commits since then, and last commit id.
# Mimic something similar in the Mercurial case.
if hgid=`HGRCPATH= hg id --id --tags 2>/dev/null`; then
tag=`printf '%s' "$hgid" | cut -d' ' -f2 --only-delimited`
# Do we have an untagged version?
if [ -z "$tag" -o "$tag" = tip ]; then
# current revision is not tagged, determine latest tag
latesttag=`HGRCPATH= hg log -r. -T '{latesttag}' 2>/dev/null`
# In case there is more than one tag on the latest tagged commit,
# 'latesttag' will separate them by colon (:). We'll retain this.
# In case there is no tag at all, 'null' will be returned.
if [ "$latesttag" = "null" ]; then
latesttag=''
fi
# add the commit id
id=`printf '%s' "$hgid" | sed 's/[+ ].*//'`
printf '%s%s' -hg "$id"
printf '%s%s%s' "${latesttag}" -hg "$id"
else
# current revision is tagged, just print the tag
printf ${tag}
fi
# Are there uncommitted changes?