Bump buildroot to 2020.11-rc1 (#985)
* Update buildroot-patches for 2020.11-rc1 buildroot * Update buildroot to 2020.11-rc1 Signed-off-by: Stefan Agner <stefan@agner.ch> * Don't rely on sfdisk --list-free output The --list-free (-F) argument does not allow machine readable mode. And it seems that the output format changes over time (different spacing, using size postfixes instead of raw blocks). Use sfdisk json output and calculate free partition space ourselfs. This works for 2.35 and 2.36 and is more robust since we rely on output which is meant for scripts to parse. * Migrate defconfigs for Buildroot 2020.11-rc1 In particular, rename BR2_TARGET_UBOOT_BOOT_SCRIPT(_SOURCE) to BR2_PACKAGE_HOST_UBOOT_TOOLS_BOOT_SCRIPT(_SOURCE). * Rebase/remove systemd patches for systemd 246 * Drop apparmor/libapparmor from buildroot-external * hassos-persists: use /run as directory for lockfiles The U-Boot tools use /var/lock by default which is not created any more by systemd by default (it is under tmpfiles legacy.conf, which we no longer install). * Disable systemd-update-done.service The service is not suited for pure read-only systems. In particular the service needs to be able to write a file in /etc and /var. Remove the service. Note: This is a static service and cannot be removed using systemd-preset. * Disable apparmor.service for now The service loads all default profiles. Some might actually cause problems. E.g. the profile for ping seems not to match our setup for /etc/resolv.conf: [85503.634653] audit: type=1400 audit(1605286002.684:236): apparmor="DENIED" operation="open" profile="ping" name="/run/resolv.conf" pid=27585 comm="ping" requested_mask="r" denied_mask="r" fsuid=0 ouid=0
This commit is contained in:
@@ -25,20 +25,13 @@ import os
|
||||
from collections import defaultdict
|
||||
import re
|
||||
import subprocess
|
||||
import requests # NVD database download
|
||||
import json
|
||||
import ijson
|
||||
import distutils.version
|
||||
import time
|
||||
import gzip
|
||||
import sys
|
||||
|
||||
sys.path.append('utils/')
|
||||
from getdeveloperlib import parse_developers # noqa: E402
|
||||
import cve as cvecheck # noqa: E402
|
||||
|
||||
NVD_START_YEAR = 2002
|
||||
NVD_JSON_VERSION = "1.0"
|
||||
NVD_BASE_URL = "https://nvd.nist.gov/feeds/json/cve/" + NVD_JSON_VERSION
|
||||
|
||||
INFRA_RE = re.compile(r"\$\(eval \$\(([a-z-]*)-package\)\)")
|
||||
URL_RE = re.compile(r"\s*https?://\S*\s*$")
|
||||
@@ -48,10 +41,6 @@ RM_API_STATUS_FOUND_BY_DISTRO = 2
|
||||
RM_API_STATUS_FOUND_BY_PATTERN = 3
|
||||
RM_API_STATUS_NOT_FOUND = 4
|
||||
|
||||
CVE_AFFECTS = 1
|
||||
CVE_DOESNT_AFFECT = 2
|
||||
CVE_UNKNOWN = 3
|
||||
|
||||
|
||||
class Defconfig:
|
||||
def __init__(self, name, path):
|
||||
@@ -244,11 +233,12 @@ class Package:
|
||||
self.status['pkg-check'] = ("error", "{} warnings".format(self.warnings))
|
||||
return
|
||||
|
||||
def is_cve_ignored(self, cve):
|
||||
@property
|
||||
def ignored_cves(self):
|
||||
"""
|
||||
Tells if the CVE is ignored by the package
|
||||
Give the list of CVEs ignored by the package
|
||||
"""
|
||||
return cve in self.all_ignored_cves.get(self.pkgvar(), [])
|
||||
return list(self.all_ignored_cves.get(self.pkgvar(), []))
|
||||
|
||||
def set_developers(self, developers):
|
||||
"""
|
||||
@@ -280,120 +270,6 @@ class Package:
|
||||
self.is_status_ok('license-files'), self.status['hash'], self.patch_count)
|
||||
|
||||
|
||||
class CVE:
|
||||
"""An accessor class for CVE Items in NVD files"""
|
||||
def __init__(self, nvd_cve):
|
||||
"""Initialize a CVE from its NVD JSON representation"""
|
||||
self.nvd_cve = nvd_cve
|
||||
|
||||
@staticmethod
|
||||
def download_nvd_year(nvd_path, year):
|
||||
metaf = "nvdcve-%s-%s.meta" % (NVD_JSON_VERSION, year)
|
||||
path_metaf = os.path.join(nvd_path, metaf)
|
||||
jsonf_gz = "nvdcve-%s-%s.json.gz" % (NVD_JSON_VERSION, year)
|
||||
path_jsonf_gz = os.path.join(nvd_path, jsonf_gz)
|
||||
|
||||
# If the database file is less than a day old, we assume the NVD data
|
||||
# locally available is recent enough.
|
||||
if os.path.exists(path_jsonf_gz) and os.stat(path_jsonf_gz).st_mtime >= time.time() - 86400:
|
||||
return path_jsonf_gz
|
||||
|
||||
# If not, we download the meta file
|
||||
url = "%s/%s" % (NVD_BASE_URL, metaf)
|
||||
print("Getting %s" % url)
|
||||
page_meta = requests.get(url)
|
||||
page_meta.raise_for_status()
|
||||
|
||||
# If the meta file already existed, we compare the existing
|
||||
# one with the data newly downloaded. If they are different,
|
||||
# we need to re-download the database.
|
||||
# If the database does not exist locally, we need to redownload it in
|
||||
# any case.
|
||||
if os.path.exists(path_metaf) and os.path.exists(path_jsonf_gz):
|
||||
meta_known = open(path_metaf, "r").read()
|
||||
if page_meta.text == meta_known:
|
||||
return path_jsonf_gz
|
||||
|
||||
# Grab the compressed JSON NVD, and write files to disk
|
||||
url = "%s/%s" % (NVD_BASE_URL, jsonf_gz)
|
||||
print("Getting %s" % url)
|
||||
page_json = requests.get(url)
|
||||
page_json.raise_for_status()
|
||||
open(path_jsonf_gz, "wb").write(page_json.content)
|
||||
open(path_metaf, "w").write(page_meta.text)
|
||||
return path_jsonf_gz
|
||||
|
||||
@classmethod
|
||||
def read_nvd_dir(cls, nvd_dir):
|
||||
"""
|
||||
Iterate over all the CVEs contained in NIST Vulnerability Database
|
||||
feeds since NVD_START_YEAR. If the files are missing or outdated in
|
||||
nvd_dir, a fresh copy will be downloaded, and kept in .json.gz
|
||||
"""
|
||||
for year in range(NVD_START_YEAR, datetime.datetime.now().year + 1):
|
||||
filename = CVE.download_nvd_year(nvd_dir, year)
|
||||
try:
|
||||
content = ijson.items(gzip.GzipFile(filename), 'CVE_Items.item')
|
||||
except: # noqa: E722
|
||||
print("ERROR: cannot read %s. Please remove the file then rerun this script" % filename)
|
||||
raise
|
||||
for cve in content:
|
||||
yield cls(cve['cve'])
|
||||
|
||||
def each_product(self):
|
||||
"""Iterate over each product section of this cve"""
|
||||
for vendor in self.nvd_cve['affects']['vendor']['vendor_data']:
|
||||
for product in vendor['product']['product_data']:
|
||||
yield product
|
||||
|
||||
@property
|
||||
def identifier(self):
|
||||
"""The CVE unique identifier"""
|
||||
return self.nvd_cve['CVE_data_meta']['ID']
|
||||
|
||||
@property
|
||||
def pkg_names(self):
|
||||
"""The set of package names referred by this CVE definition"""
|
||||
return set(p['product_name'] for p in self.each_product())
|
||||
|
||||
def affects(self, br_pkg):
|
||||
"""
|
||||
True if the Buildroot Package object passed as argument is affected
|
||||
by this CVE.
|
||||
"""
|
||||
if br_pkg.is_cve_ignored(self.identifier):
|
||||
return CVE_DOESNT_AFFECT
|
||||
|
||||
for product in self.each_product():
|
||||
if product['product_name'] != br_pkg.name:
|
||||
continue
|
||||
|
||||
for v in product['version']['version_data']:
|
||||
if v["version_affected"] == "=":
|
||||
if br_pkg.current_version == v["version_value"]:
|
||||
return CVE_AFFECTS
|
||||
elif v["version_affected"] == "<=":
|
||||
pkg_version = distutils.version.LooseVersion(br_pkg.current_version)
|
||||
if not hasattr(pkg_version, "version"):
|
||||
print("Cannot parse package '%s' version '%s'" % (br_pkg.name, br_pkg.current_version))
|
||||
continue
|
||||
cve_affected_version = distutils.version.LooseVersion(v["version_value"])
|
||||
if not hasattr(cve_affected_version, "version"):
|
||||
print("Cannot parse CVE affected version '%s'" % v["version_value"])
|
||||
continue
|
||||
try:
|
||||
affected = pkg_version <= cve_affected_version
|
||||
except TypeError:
|
||||
return CVE_UNKNOWN
|
||||
if affected:
|
||||
return CVE_AFFECTS
|
||||
else:
|
||||
return CVE_DOESNT_AFFECT
|
||||
else:
|
||||
print("version_affected: %s" % v['version_affected'])
|
||||
return CVE_DOESNT_AFFECT
|
||||
|
||||
|
||||
def get_pkglist(npackages, package_list):
|
||||
"""
|
||||
Builds the list of Buildroot packages, returning a list of Package
|
||||
@@ -417,7 +293,6 @@ def get_pkglist(npackages, package_list):
|
||||
"package/x11r7/x11r7.mk",
|
||||
"package/doc-asciidoc.mk",
|
||||
"package/pkg-.*.mk",
|
||||
"package/nvidia-tegra23/nvidia-tegra23.mk",
|
||||
"toolchain/toolchain-external/pkg-toolchain-external.mk",
|
||||
"toolchain/toolchain-external/toolchain-external.mk",
|
||||
"toolchain/toolchain.mk",
|
||||
@@ -656,10 +531,12 @@ def check_package_cves(nvd_path, packages):
|
||||
if not os.path.isdir(nvd_path):
|
||||
os.makedirs(nvd_path)
|
||||
|
||||
for cve in CVE.read_nvd_dir(nvd_path):
|
||||
for cve in cvecheck.CVE.read_nvd_dir(nvd_path):
|
||||
for pkg_name in cve.pkg_names:
|
||||
if pkg_name in packages and cve.affects(packages[pkg_name]) == CVE_AFFECTS:
|
||||
packages[pkg_name].cves.append(cve.identifier)
|
||||
if pkg_name in packages:
|
||||
pkg = packages[pkg_name]
|
||||
if cve.affects(pkg.name, pkg.current_version, pkg.ignored_cves) == cve.CVE_AFFECTS:
|
||||
pkg.cves.append(cve.identifier)
|
||||
|
||||
|
||||
def calculate_stats(packages):
|
||||
|
||||
Reference in New Issue
Block a user