Update buildroot 2020.02.01 (#622)
* Update buildroot 2020.02.01 Signed-off-by: Pascal Vizeli <pvizeli@syshack.ch> * Fix LN * Fix wpa Signed-off-by: Pascal Vizeli <pvizeli@syshack.ch> * Fix lint Signed-off-by: Pascal Vizeli <pvizeli@syshack.ch> * fix-network Signed-off-by: Pascal Vizeli <pvizeli@syshack.ch> * Fix script Signed-off-by: Pascal Vizeli <pvizeli@syshack.ch>
This commit is contained in:
@@ -23,14 +23,21 @@ import os
|
||||
from collections import defaultdict
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
import requests # URL checking
|
||||
import json
|
||||
import ijson
|
||||
import certifi
|
||||
import distutils.version
|
||||
import time
|
||||
import gzip
|
||||
from urllib3 import HTTPSConnectionPool
|
||||
from urllib3.exceptions import HTTPError
|
||||
from multiprocessing import Pool
|
||||
|
||||
NVD_START_YEAR = 2002
|
||||
NVD_JSON_VERSION = "1.0"
|
||||
NVD_BASE_URL = "https://nvd.nist.gov/feeds/json/cve/" + NVD_JSON_VERSION
|
||||
|
||||
INFRA_RE = re.compile(r"\$\(eval \$\(([a-z-]*)-package\)\)")
|
||||
URL_RE = re.compile(r"\s*https?://\S*\s*$")
|
||||
|
||||
@@ -39,11 +46,16 @@ RM_API_STATUS_FOUND_BY_DISTRO = 2
|
||||
RM_API_STATUS_FOUND_BY_PATTERN = 3
|
||||
RM_API_STATUS_NOT_FOUND = 4
|
||||
|
||||
# Used to make multiple requests to the same host. It is global
|
||||
# because it's used by sub-processes.
|
||||
http_pool = None
|
||||
|
||||
|
||||
class Package:
|
||||
all_licenses = list()
|
||||
all_license_files = list()
|
||||
all_versions = dict()
|
||||
all_ignored_cves = dict()
|
||||
|
||||
def __init__(self, name, path):
|
||||
self.name = name
|
||||
@@ -58,6 +70,7 @@ class Package:
|
||||
self.url = None
|
||||
self.url_status = None
|
||||
self.url_worker = None
|
||||
self.cves = list()
|
||||
self.latest_version = (RM_API_STATUS_ERROR, None, None)
|
||||
|
||||
def pkgvar(self):
|
||||
@@ -144,11 +157,17 @@ class Package:
|
||||
o = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[1]
|
||||
lines = o.splitlines()
|
||||
for line in lines:
|
||||
m = re.match("^([0-9]*) warnings generated", line)
|
||||
m = re.match("^([0-9]*) warnings generated", line.decode())
|
||||
if m:
|
||||
self.warnings = int(m.group(1))
|
||||
return
|
||||
|
||||
def is_cve_ignored(self, cve):
|
||||
"""
|
||||
Tells if the CVE is ignored by the package
|
||||
"""
|
||||
return cve in self.all_ignored_cves.get(self.pkgvar(), [])
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.path == other.path
|
||||
|
||||
@@ -160,6 +179,113 @@ class Package:
|
||||
(self.name, self.path, self.has_license, self.has_license_files, self.has_hash, self.patch_count)
|
||||
|
||||
|
||||
class CVE:
|
||||
"""An accessor class for CVE Items in NVD files"""
|
||||
def __init__(self, nvd_cve):
|
||||
"""Initialize a CVE from its NVD JSON representation"""
|
||||
self.nvd_cve = nvd_cve
|
||||
|
||||
@staticmethod
|
||||
def download_nvd_year(nvd_path, year):
|
||||
metaf = "nvdcve-%s-%s.meta" % (NVD_JSON_VERSION, year)
|
||||
path_metaf = os.path.join(nvd_path, metaf)
|
||||
jsonf_gz = "nvdcve-%s-%s.json.gz" % (NVD_JSON_VERSION, year)
|
||||
path_jsonf_gz = os.path.join(nvd_path, jsonf_gz)
|
||||
|
||||
# If the database file is less than a day old, we assume the NVD data
|
||||
# locally available is recent enough.
|
||||
if os.path.exists(path_jsonf_gz) and os.stat(path_jsonf_gz).st_mtime >= time.time() - 86400:
|
||||
return path_jsonf_gz
|
||||
|
||||
# If not, we download the meta file
|
||||
url = "%s/%s" % (NVD_BASE_URL, metaf)
|
||||
print("Getting %s" % url)
|
||||
page_meta = requests.get(url)
|
||||
page_meta.raise_for_status()
|
||||
|
||||
# If the meta file already existed, we compare the existing
|
||||
# one with the data newly downloaded. If they are different,
|
||||
# we need to re-download the database.
|
||||
# If the database does not exist locally, we need to redownload it in
|
||||
# any case.
|
||||
if os.path.exists(path_metaf) and os.path.exists(path_jsonf_gz):
|
||||
meta_known = open(path_metaf, "r").read()
|
||||
if page_meta.text == meta_known:
|
||||
return path_jsonf_gz
|
||||
|
||||
# Grab the compressed JSON NVD, and write files to disk
|
||||
url = "%s/%s" % (NVD_BASE_URL, jsonf_gz)
|
||||
print("Getting %s" % url)
|
||||
page_json = requests.get(url)
|
||||
page_json.raise_for_status()
|
||||
open(path_jsonf_gz, "wb").write(page_json.content)
|
||||
open(path_metaf, "w").write(page_meta.text)
|
||||
return path_jsonf_gz
|
||||
|
||||
@classmethod
|
||||
def read_nvd_dir(cls, nvd_dir):
|
||||
"""
|
||||
Iterate over all the CVEs contained in NIST Vulnerability Database
|
||||
feeds since NVD_START_YEAR. If the files are missing or outdated in
|
||||
nvd_dir, a fresh copy will be downloaded, and kept in .json.gz
|
||||
"""
|
||||
for year in range(NVD_START_YEAR, datetime.datetime.now().year + 1):
|
||||
filename = CVE.download_nvd_year(nvd_dir, year)
|
||||
try:
|
||||
content = ijson.items(gzip.GzipFile(filename), 'CVE_Items.item')
|
||||
except:
|
||||
print("ERROR: cannot read %s. Please remove the file then rerun this script" % filename)
|
||||
raise
|
||||
for cve in content:
|
||||
yield cls(cve['cve'])
|
||||
|
||||
def each_product(self):
|
||||
"""Iterate over each product section of this cve"""
|
||||
for vendor in self.nvd_cve['affects']['vendor']['vendor_data']:
|
||||
for product in vendor['product']['product_data']:
|
||||
yield product
|
||||
|
||||
@property
|
||||
def identifier(self):
|
||||
"""The CVE unique identifier"""
|
||||
return self.nvd_cve['CVE_data_meta']['ID']
|
||||
|
||||
@property
|
||||
def pkg_names(self):
|
||||
"""The set of package names referred by this CVE definition"""
|
||||
return set(p['product_name'] for p in self.each_product())
|
||||
|
||||
def affects(self, br_pkg):
|
||||
"""
|
||||
True if the Buildroot Package object passed as argument is affected
|
||||
by this CVE.
|
||||
"""
|
||||
if br_pkg.is_cve_ignored(self.identifier):
|
||||
return False
|
||||
|
||||
for product in self.each_product():
|
||||
if product['product_name'] != br_pkg.name:
|
||||
continue
|
||||
|
||||
for v in product['version']['version_data']:
|
||||
if v["version_affected"] == "=":
|
||||
if br_pkg.current_version == v["version_value"]:
|
||||
return True
|
||||
elif v["version_affected"] == "<=":
|
||||
pkg_version = distutils.version.LooseVersion(br_pkg.current_version)
|
||||
if not hasattr(pkg_version, "version"):
|
||||
print("Cannot parse package '%s' version '%s'" % (br_pkg.name, br_pkg.current_version))
|
||||
continue
|
||||
cve_affected_version = distutils.version.LooseVersion(v["version_value"])
|
||||
if not hasattr(cve_affected_version, "version"):
|
||||
print("Cannot parse CVE affected version '%s'" % v["version_value"])
|
||||
continue
|
||||
return pkg_version <= cve_affected_version
|
||||
else:
|
||||
print("version_affected: %s" % v['version_affected'])
|
||||
return False
|
||||
|
||||
|
||||
def get_pkglist(npackages, package_list):
|
||||
"""
|
||||
Builds the list of Buildroot packages, returning a list of Package
|
||||
@@ -222,70 +348,45 @@ def get_pkglist(npackages, package_list):
|
||||
|
||||
|
||||
def package_init_make_info():
|
||||
# Licenses
|
||||
o = subprocess.check_output(["make", "BR2_HAVE_DOT_CONFIG=y",
|
||||
"-s", "printvars", "VARS=%_LICENSE"])
|
||||
for l in o.splitlines():
|
||||
# Get variable name and value
|
||||
pkgvar, value = l.split("=")
|
||||
|
||||
# If present, strip HOST_ from variable name
|
||||
if pkgvar.startswith("HOST_"):
|
||||
pkgvar = pkgvar[5:]
|
||||
|
||||
# Strip _LICENSE
|
||||
pkgvar = pkgvar[:-8]
|
||||
|
||||
# If value is "unknown", no license details available
|
||||
if value == "unknown":
|
||||
continue
|
||||
Package.all_licenses.append(pkgvar)
|
||||
|
||||
# License files
|
||||
o = subprocess.check_output(["make", "BR2_HAVE_DOT_CONFIG=y",
|
||||
"-s", "printvars", "VARS=%_LICENSE_FILES"])
|
||||
for l in o.splitlines():
|
||||
# Get variable name and value
|
||||
pkgvar, value = l.split("=")
|
||||
|
||||
# If present, strip HOST_ from variable name
|
||||
if pkgvar.startswith("HOST_"):
|
||||
pkgvar = pkgvar[5:]
|
||||
|
||||
if pkgvar.endswith("_MANIFEST_LICENSE_FILES"):
|
||||
continue
|
||||
|
||||
# Strip _LICENSE_FILES
|
||||
pkgvar = pkgvar[:-14]
|
||||
|
||||
Package.all_license_files.append(pkgvar)
|
||||
|
||||
# Version
|
||||
o = subprocess.check_output(["make", "BR2_HAVE_DOT_CONFIG=y",
|
||||
"-s", "printvars", "VARS=%_VERSION"])
|
||||
# Fetch all variables at once
|
||||
variables = subprocess.check_output(["make", "BR2_HAVE_DOT_CONFIG=y", "-s", "printvars",
|
||||
"VARS=%_LICENSE %_LICENSE_FILES %_VERSION %_IGNORE_CVES"])
|
||||
variable_list = variables.decode().splitlines()
|
||||
|
||||
# We process first the host package VERSION, and then the target
|
||||
# package VERSION. This means that if a package exists in both
|
||||
# target and host variants, with different version numbers
|
||||
# (unlikely), we'll report the target version number.
|
||||
version_list = o.splitlines()
|
||||
version_list = [x for x in version_list if x.startswith("HOST_")] + \
|
||||
[x for x in version_list if not x.startswith("HOST_")]
|
||||
for l in version_list:
|
||||
# target and host variants, with different values (eg. version
|
||||
# numbers (unlikely)), we'll report the target one.
|
||||
variable_list = [x[5:] for x in variable_list if x.startswith("HOST_")] + \
|
||||
[x for x in variable_list if not x.startswith("HOST_")]
|
||||
|
||||
for l in variable_list:
|
||||
# Get variable name and value
|
||||
pkgvar, value = l.split("=")
|
||||
|
||||
# If present, strip HOST_ from variable name
|
||||
if pkgvar.startswith("HOST_"):
|
||||
pkgvar = pkgvar[5:]
|
||||
# Strip the suffix according to the variable
|
||||
if pkgvar.endswith("_LICENSE"):
|
||||
# If value is "unknown", no license details available
|
||||
if value == "unknown":
|
||||
continue
|
||||
pkgvar = pkgvar[:-8]
|
||||
Package.all_licenses.append(pkgvar)
|
||||
|
||||
if pkgvar.endswith("_DL_VERSION"):
|
||||
continue
|
||||
elif pkgvar.endswith("_LICENSE_FILES"):
|
||||
if pkgvar.endswith("_MANIFEST_LICENSE_FILES"):
|
||||
continue
|
||||
pkgvar = pkgvar[:-14]
|
||||
Package.all_license_files.append(pkgvar)
|
||||
|
||||
# Strip _VERSION
|
||||
pkgvar = pkgvar[:-8]
|
||||
elif pkgvar.endswith("_VERSION"):
|
||||
if pkgvar.endswith("_DL_VERSION"):
|
||||
continue
|
||||
pkgvar = pkgvar[:-8]
|
||||
Package.all_versions[pkgvar] = value
|
||||
|
||||
Package.all_versions[pkgvar] = value
|
||||
elif pkgvar.endswith("_IGNORE_CVES"):
|
||||
pkgvar = pkgvar[:-12]
|
||||
Package.all_ignored_cves[pkgvar] = value.split()
|
||||
|
||||
|
||||
def check_url_status_worker(url, url_status):
|
||||
@@ -301,11 +402,13 @@ def check_url_status_worker(url, url_status):
|
||||
|
||||
|
||||
def check_package_urls(packages):
|
||||
Package.pool = Pool(processes=64)
|
||||
pool = Pool(processes=64)
|
||||
for pkg in packages:
|
||||
pkg.url_worker = pkg.pool.apply_async(check_url_status_worker, (pkg.url, pkg.url_status))
|
||||
pkg.url_worker = pool.apply_async(check_url_status_worker, (pkg.url, pkg.url_status))
|
||||
for pkg in packages:
|
||||
pkg.url_status = pkg.url_worker.get(timeout=3600)
|
||||
del pkg.url_worker
|
||||
pool.terminate()
|
||||
|
||||
|
||||
def release_monitoring_get_latest_version_by_distro(pool, name):
|
||||
@@ -346,6 +449,15 @@ def release_monitoring_get_latest_version_by_guess(pool, name):
|
||||
return (RM_API_STATUS_NOT_FOUND, None, None)
|
||||
|
||||
|
||||
def check_package_latest_version_worker(name):
|
||||
"""Wrapper to try both by name then by guess"""
|
||||
print(name)
|
||||
res = release_monitoring_get_latest_version_by_distro(http_pool, name)
|
||||
if res[0] == RM_API_STATUS_NOT_FOUND:
|
||||
res = release_monitoring_get_latest_version_by_guess(http_pool, name)
|
||||
return res
|
||||
|
||||
|
||||
def check_package_latest_version(packages):
|
||||
"""
|
||||
Fills in the .latest_version field of all Package objects
|
||||
@@ -361,18 +473,26 @@ def check_package_latest_version(packages):
|
||||
- id: string containing the id of the project corresponding to this
|
||||
package, as known by release-monitoring.org
|
||||
"""
|
||||
pool = HTTPSConnectionPool('release-monitoring.org', port=443,
|
||||
cert_reqs='CERT_REQUIRED', ca_certs=certifi.where(),
|
||||
timeout=30)
|
||||
count = 0
|
||||
for pkg in packages:
|
||||
v = release_monitoring_get_latest_version_by_distro(pool, pkg.name)
|
||||
if v[0] == RM_API_STATUS_NOT_FOUND:
|
||||
v = release_monitoring_get_latest_version_by_guess(pool, pkg.name)
|
||||
global http_pool
|
||||
http_pool = HTTPSConnectionPool('release-monitoring.org', port=443,
|
||||
cert_reqs='CERT_REQUIRED', ca_certs=certifi.where(),
|
||||
timeout=30)
|
||||
worker_pool = Pool(processes=64)
|
||||
results = worker_pool.map(check_package_latest_version_worker, (pkg.name for pkg in packages))
|
||||
for pkg, r in zip(packages, results):
|
||||
pkg.latest_version = r
|
||||
worker_pool.terminate()
|
||||
del http_pool
|
||||
|
||||
pkg.latest_version = v
|
||||
print("[%d/%d] Package %s" % (count, len(packages), pkg.name))
|
||||
count += 1
|
||||
|
||||
def check_package_cves(nvd_path, packages):
|
||||
if not os.path.isdir(nvd_path):
|
||||
os.makedirs(nvd_path)
|
||||
|
||||
for cve in CVE.read_nvd_dir(nvd_path):
|
||||
for pkg_name in cve.pkg_names:
|
||||
if pkg_name in packages and cve.affects(packages[pkg_name]):
|
||||
packages[pkg_name].cves.append(cve.identifier)
|
||||
|
||||
|
||||
def calculate_stats(packages):
|
||||
@@ -410,6 +530,9 @@ def calculate_stats(packages):
|
||||
else:
|
||||
stats["version-not-uptodate"] += 1
|
||||
stats["patches"] += pkg.patch_count
|
||||
stats["total-cves"] += len(pkg.cves)
|
||||
if len(pkg.cves) != 0:
|
||||
stats["pkg-cves"] += 1
|
||||
return stats
|
||||
|
||||
|
||||
@@ -621,6 +744,17 @@ def dump_html_pkg(f, pkg):
|
||||
f.write(" <td class=\"%s\">%s</td>\n" %
|
||||
(" ".join(td_class), url_str))
|
||||
|
||||
# CVEs
|
||||
td_class = ["centered"]
|
||||
if len(pkg.cves) == 0:
|
||||
td_class.append("correct")
|
||||
else:
|
||||
td_class.append("wrong")
|
||||
f.write(" <td class=\"%s\">\n" % " ".join(td_class))
|
||||
for cve in pkg.cves:
|
||||
f.write(" <a href=\"https://security-tracker.debian.org/tracker/%s\">%s<br/>\n" % (cve, cve))
|
||||
f.write(" </td>\n")
|
||||
|
||||
f.write(" </tr>\n")
|
||||
|
||||
|
||||
@@ -638,6 +772,7 @@ def dump_html_all_pkgs(f, packages):
|
||||
<td class=\"centered\">Latest version</td>
|
||||
<td class=\"centered\">Warnings</td>
|
||||
<td class=\"centered\">Upstream URL</td>
|
||||
<td class=\"centered\">CVEs</td>
|
||||
</tr>
|
||||
""")
|
||||
for pkg in sorted(packages):
|
||||
@@ -676,46 +811,85 @@ def dump_html_stats(f, stats):
|
||||
stats["version-not-uptodate"])
|
||||
f.write("<tr><td>Packages with no known upstream version</td><td>%s</td></tr>\n" %
|
||||
stats["version-unknown"])
|
||||
f.write("<tr><td>Packages affected by CVEs</td><td>%s</td></tr>\n" %
|
||||
stats["pkg-cves"])
|
||||
f.write("<tr><td>Total number of CVEs affecting all packages</td><td>%s</td></tr>\n" %
|
||||
stats["total-cves"])
|
||||
f.write("</table>\n")
|
||||
|
||||
|
||||
def dump_gen_info(f):
|
||||
def dump_html_gen_info(f, date, commit):
|
||||
# Updated on Mon Feb 19 08:12:08 CET 2018, Git commit aa77030b8f5e41f1c53eb1c1ad664b8c814ba032
|
||||
o = subprocess.check_output(["git", "log", "master", "-n", "1", "--pretty=format:%H"])
|
||||
git_commit = o.splitlines()[0]
|
||||
f.write("<p><i>Updated on %s, git commit %s</i></p>\n" %
|
||||
(str(datetime.datetime.utcnow()), git_commit))
|
||||
f.write("<p><i>Updated on %s, git commit %s</i></p>\n" % (str(date), commit))
|
||||
|
||||
|
||||
def dump_html(packages, stats, output):
|
||||
def dump_html(packages, stats, date, commit, output):
|
||||
with open(output, 'w') as f:
|
||||
f.write(html_header)
|
||||
dump_html_all_pkgs(f, packages)
|
||||
dump_html_stats(f, stats)
|
||||
dump_gen_info(f)
|
||||
dump_html_gen_info(f, date, commit)
|
||||
f.write(html_footer)
|
||||
|
||||
|
||||
def dump_json(packages, stats, date, commit, output):
|
||||
# Format packages as a dictionnary instead of a list
|
||||
# Exclude local field that does not contains real date
|
||||
excluded_fields = ['url_worker', 'name']
|
||||
pkgs = {
|
||||
pkg.name: {
|
||||
k: v
|
||||
for k, v in pkg.__dict__.items()
|
||||
if k not in excluded_fields
|
||||
} for pkg in packages
|
||||
}
|
||||
# Aggregate infrastructures into a single dict entry
|
||||
statistics = {
|
||||
k: v
|
||||
for k, v in stats.items()
|
||||
if not k.startswith('infra-')
|
||||
}
|
||||
statistics['infra'] = {k[6:]: v for k, v in stats.items() if k.startswith('infra-')}
|
||||
# The actual structure to dump, add commit and date to it
|
||||
final = {'packages': pkgs,
|
||||
'stats': statistics,
|
||||
'commit': commit,
|
||||
'date': str(date)}
|
||||
|
||||
with open(output, 'w') as f:
|
||||
json.dump(final, f, indent=2, separators=(',', ': '))
|
||||
f.write('\n')
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-o', dest='output', action='store', required=True,
|
||||
output = parser.add_argument_group('output', 'Output file(s)')
|
||||
output.add_argument('--html', dest='html', action='store',
|
||||
help='HTML output file')
|
||||
parser.add_argument('-n', dest='npackages', type=int, action='store',
|
||||
help='Number of packages')
|
||||
parser.add_argument('-p', dest='packages', action='store',
|
||||
help='List of packages (comma separated)')
|
||||
return parser.parse_args()
|
||||
output.add_argument('--json', dest='json', action='store',
|
||||
help='JSON output file')
|
||||
packages = parser.add_mutually_exclusive_group()
|
||||
packages.add_argument('-n', dest='npackages', type=int, action='store',
|
||||
help='Number of packages')
|
||||
packages.add_argument('-p', dest='packages', action='store',
|
||||
help='List of packages (comma separated)')
|
||||
parser.add_argument('--nvd-path', dest='nvd_path',
|
||||
help='Path to the local NVD database')
|
||||
args = parser.parse_args()
|
||||
if not args.html and not args.json:
|
||||
parser.error('at least one of --html or --json (or both) is required')
|
||||
return args
|
||||
|
||||
|
||||
def __main__():
|
||||
args = parse_args()
|
||||
if args.npackages and args.packages:
|
||||
print("ERROR: -n and -p are mutually exclusive")
|
||||
sys.exit(1)
|
||||
if args.packages:
|
||||
package_list = args.packages.split(",")
|
||||
else:
|
||||
package_list = None
|
||||
date = datetime.datetime.utcnow()
|
||||
commit = subprocess.check_output(['git', 'rev-parse',
|
||||
'HEAD']).splitlines()[0].decode()
|
||||
print("Build package list ...")
|
||||
packages = get_pkglist(args.npackages, package_list)
|
||||
print("Getting package make info ...")
|
||||
@@ -733,10 +907,17 @@ def __main__():
|
||||
check_package_urls(packages)
|
||||
print("Getting latest versions ...")
|
||||
check_package_latest_version(packages)
|
||||
if args.nvd_path:
|
||||
print("Checking packages CVEs")
|
||||
check_package_cves(args.nvd_path, {p.name: p for p in packages})
|
||||
print("Calculate stats")
|
||||
stats = calculate_stats(packages)
|
||||
print("Write HTML")
|
||||
dump_html(packages, stats, args.output)
|
||||
if args.html:
|
||||
print("Write HTML")
|
||||
dump_html(packages, stats, date, commit, args.html)
|
||||
if args.json:
|
||||
print("Write JSON")
|
||||
dump_json(packages, stats, date, commit, args.json)
|
||||
|
||||
|
||||
__main__()
|
||||
|
||||
Reference in New Issue
Block a user