diff options
Diffstat (limited to 'support/scripts')
-rwxr-xr-x | support/scripts/apply-patches.sh | 9 | ||||
-rwxr-xr-x | support/scripts/boot-qemu-image.py | 10 | ||||
-rwxr-xr-x | support/scripts/br2-external | 18 | ||||
-rw-r--r-- | support/scripts/cpedb.py | 174 | ||||
-rwxr-xr-x | support/scripts/cve-checker | 196 | ||||
-rwxr-xr-x | support/scripts/cve.py | 50 | ||||
-rwxr-xr-x | support/scripts/gen-bootlin-toolchains | 34 | ||||
-rwxr-xr-x | support/scripts/generate-gitlab-ci-yml | 141 | ||||
-rwxr-xr-x | support/scripts/pkg-stats | 219 | ||||
-rwxr-xr-x | support/scripts/setlocalversion | 16 |
10 files changed, 527 insertions, 340 deletions
diff --git a/support/scripts/apply-patches.sh b/support/scripts/apply-patches.sh index 66fef262ee..9fb488c570 100755 --- a/support/scripts/apply-patches.sh +++ b/support/scripts/apply-patches.sh @@ -113,13 +113,13 @@ function apply_patch { echo " to be applied : ${path}/${patch}" exit 1 fi - if grep -q "^rename from" ${path}/${patch} && \ - grep -q "^rename to" ${path}/${patch} ; then + if ${uncomp} "${path}/$patch" | grep -q "^rename from" && \ + ${uncomp} "${path}/$patch" | grep -q "^rename to" ; then echo "Error: patch contains some renames, not supported by old patch versions" exit 1 fi echo "${path}/${patch}" >> ${builddir}/.applied_patches_list - ${uncomp} "${path}/$patch" | patch -g0 -p1 -E -d "${builddir}" -t -N $silent + ${uncomp} "${path}/$patch" | patch -g0 -p1 -E --no-backup-if-mismatch -d "${builddir}" -t -N $silent if [ $? != 0 ] ; then echo "Patch failed! Please fix ${patch}!" exit 1 @@ -168,6 +168,3 @@ if [ "`find $builddir/ '(' -name '*.rej' -o -name '.*.rej' ')' -print`" ] ; then echo "Aborting. Reject files found." exit 1 fi - -# Remove backup files -find $builddir/ '(' -name '*.orig' -o -name '.*.orig' ')' -exec rm -f {} \; diff --git a/support/scripts/boot-qemu-image.py b/support/scripts/boot-qemu-image.py index dbbba552ad..0d4ad825fc 100755 --- a/support/scripts/boot-qemu-image.py +++ b/support/scripts/boot-qemu-image.py @@ -18,6 +18,10 @@ def main(): if not sys.argv[1].startswith('qemu_'): sys.exit(0) + if not os.path.exists('output/images/start-qemu.sh'): + print('qemu-start.sh is missing, cannot test.') + sys.exit(0) + qemu_start = os.path.join(os.getcwd(), 'output/images/start-qemu.sh') child = pexpect.spawn(qemu_start, ['serial-only'], @@ -32,7 +36,7 @@ def main(): time.sleep(1) try: - child.expect(["buildroot login:", pexpect.TIMEOUT], timeout=60) + child.expect(["buildroot login:"], timeout=60) except pexpect.EOF as e: # Some emulations require a fork of qemu-system, which may be # missing on the system, and is not provided by Buildroot. @@ -54,7 +58,7 @@ def main(): child.sendline("root\r") try: - child.expect(["# ", pexpect.TIMEOUT], timeout=60) + child.expect(["# "], timeout=60) except pexpect.EOF: print("Cannot connect to shell") sys.exit(1) @@ -65,7 +69,7 @@ def main(): child.sendline("poweroff\r") try: - child.expect(["System halted", pexpect.TIMEOUT], timeout=60) + child.expect(["System halted"], timeout=60) child.expect(pexpect.EOF) except pexpect.EOF: pass diff --git a/support/scripts/br2-external b/support/scripts/br2-external index ededd2d900..56dc6f53ca 100755 --- a/support/scripts/br2-external +++ b/support/scripts/br2-external @@ -161,6 +161,8 @@ do_kconfig() { toolchains jpeg openssl + skeleton + init ) for br2 in "${items[@]}"; do @@ -224,6 +226,22 @@ do_kconfig() { else printf '# No openssl from: %s\n\n' "${br2_desc}" fi >>"${outputdir}/.br2-external.in.openssl" + + if [ -f "${br2_ext}/provides/skeleton.in" ]; then + printf 'comment "skeleton from: %s"\n' "${br2_desc}" + printf 'source "%s/provides/skeleton.in"\n' "${br2_ext}" + printf '\n' + else + printf '# No skeleton from: %s\n\n' "${br2_desc}" + fi >>"${outputdir}/.br2-external.in.skeleton" + + if [ -f "${br2_ext}/provides/init.in" ]; then + printf 'comment "init from: %s"\n' "${br2_desc}" + printf 'source "%s/provides/init.in"\n' "${br2_ext}" + printf '\n' + else + printf '# No init from: %s\n\n' "${br2_desc}" + fi >>"${outputdir}/.br2-external.in.init" done printf 'endmenu\n' >>"${outputdir}/.br2-external.in.menus" diff --git a/support/scripts/cpedb.py b/support/scripts/cpedb.py new file mode 100644 index 0000000000..f4daf56124 --- /dev/null +++ b/support/scripts/cpedb.py @@ -0,0 +1,174 @@ +#!/usr/bin/env python3 + +import xml.etree.ElementTree as ET +from xml.etree.ElementTree import Element, SubElement +import gzip +import os +import requests +import time +from xml.dom import minidom + +VALID_REFS = ['VENDOR', 'VERSION', 'CHANGE_LOG', 'PRODUCT', 'PROJECT', 'ADVISORY'] + +CPEDB_URL = "https://static.nvd.nist.gov/feeds/xml/cpe/dictionary/official-cpe-dictionary_v2.3.xml.gz" + +ns = { + '': 'http://cpe.mitre.org/dictionary/2.0', + 'cpe-23': 'http://scap.nist.gov/schema/cpe-extension/2.3', + 'xml': 'http://www.w3.org/XML/1998/namespace' +} + + +class CPE: + def __init__(self, cpe_str, titles, refs): + self.cpe_str = cpe_str + self.titles = titles + self.references = refs + self.cpe_cur_ver = "".join(self.cpe_str.split(":")[5:6]) + + def update_xml_dict(self): + ET.register_namespace('', 'http://cpe.mitre.org/dictionary/2.0') + cpes = Element('cpe-list') + cpes.set('xmlns:cpe-23', "http://scap.nist.gov/schema/cpe-extension/2.3") + cpes.set('xmlns:ns6', "http://scap.nist.gov/schema/scap-core/0.1") + cpes.set('xmlns:scap-core', "http://scap.nist.gov/schema/scap-core/0.3") + cpes.set('xmlns:config', "http://scap.nist.gov/schema/configuration/0.1") + cpes.set('xmlns:xsi', "http://www.w3.org/2001/XMLSchema-instance") + cpes.set('xmlns:meta', "http://scap.nist.gov/schema/cpe-dictionary-metadata/0.2") + cpes.set('xsi:schemaLocation', " ".join(["http://scap.nist.gov/schema/cpe-extension/2.3", + "https://scap.nist.gov/schema/cpe/2.3/cpe-dictionary-extension_2.3.xsd", + "http://cpe.mitre.org/dictionary/2.0", + "https://scap.nist.gov/schema/cpe/2.3/cpe-dictionary_2.3.xsd", + "http://scap.nist.gov/schema/cpe-dictionary-metadata/0.2", + "https://scap.nist.gov/schema/cpe/2.1/cpe-dictionary-metadata_0.2.xsd", + "http://scap.nist.gov/schema/scap-core/0.3", + "https://scap.nist.gov/schema/nvd/scap-core_0.3.xsd", + "http://scap.nist.gov/schema/configuration/0.1", + "https://scap.nist.gov/schema/nvd/configuration_0.1.xsd", + "http://scap.nist.gov/schema/scap-core/0.1", + "https://scap.nist.gov/schema/nvd/scap-core_0.1.xsd"])) + item = SubElement(cpes, 'cpe-item') + cpe_short_name = CPE.short_name(self.cpe_str) + cpe_new_ver = CPE.version_update(self.cpe_str) + + item.set('name', 'cpe:/' + cpe_short_name) + self.titles[0].text.replace(self.cpe_cur_ver, cpe_new_ver) + for title in self.titles: + item.append(title) + if self.references: + item.append(self.references) + cpe23item = SubElement(item, 'cpe-23:cpe23-item') + cpe23item.set('name', self.cpe_str) + + # Generate the XML as a string + xmlstr = ET.tostring(cpes) + + # And use minidom to pretty print the XML + return minidom.parseString(xmlstr).toprettyxml(encoding="utf-8").decode("utf-8") + + @staticmethod + def version(cpe): + return cpe.split(":")[5] + + @staticmethod + def product(cpe): + return cpe.split(":")[4] + + @staticmethod + def short_name(cpe): + return ":".join(cpe.split(":")[2:6]) + + @staticmethod + def version_update(cpe): + return ":".join(cpe.split(":")[5:6]) + + @staticmethod + def no_version(cpe): + return ":".join(cpe.split(":")[:5]) + + +class CPEDB: + def __init__(self, nvd_path): + self.all_cpes = dict() + self.all_cpes_no_version = dict() + self.nvd_path = nvd_path + + def get_xml_dict(self): + print("CPE: Setting up NIST dictionary") + if not os.path.exists(os.path.join(self.nvd_path, "cpe")): + os.makedirs(os.path.join(self.nvd_path, "cpe")) + + cpe_dict_local = os.path.join(self.nvd_path, "cpe", os.path.basename(CPEDB_URL)) + if not os.path.exists(cpe_dict_local) or os.stat(cpe_dict_local).st_mtime < time.time() - 86400: + print("CPE: Fetching xml manifest from [" + CPEDB_URL + "]") + cpe_dict = requests.get(CPEDB_URL) + open(cpe_dict_local, "wb").write(cpe_dict.content) + + print("CPE: Unzipping xml manifest...") + nist_cpe_file = gzip.GzipFile(fileobj=open(cpe_dict_local, 'rb')) + print("CPE: Converting xml manifest to dict...") + tree = ET.parse(nist_cpe_file) + all_cpedb = tree.getroot() + self.parse_dict(all_cpedb) + + def parse_dict(self, all_cpedb): + # Cycle through the dict and build two dict to be used for custom + # lookups of partial and complete CPE objects + # The objects are then used to create new proposed XML updates if + # if is determined one is required + # Out of the different language titles, select English + for cpe in all_cpedb.findall(".//{http://cpe.mitre.org/dictionary/2.0}cpe-item"): + cpe_titles = [] + for title in cpe.findall('.//{http://cpe.mitre.org/dictionary/2.0}title[@xml:lang="en-US"]', ns): + title.tail = None + cpe_titles.append(title) + + # Some older CPE don't include references, if they do, make + # sure we handle the case of one ref needing to be packed + # in a list + cpe_ref = cpe.find(".//{http://cpe.mitre.org/dictionary/2.0}references") + if cpe_ref: + for ref in cpe_ref.findall(".//{http://cpe.mitre.org/dictionary/2.0}reference"): + ref.tail = None + ref.text = ref.text.upper() + if ref.text not in VALID_REFS: + ref.text = ref.text + "-- UPDATE this entry, here are some examples and just one word should be used -- " + ' '.join(VALID_REFS) # noqa E501 + cpe_ref.tail = None + cpe_ref.text = None + + cpe_str = cpe.find(".//{http://scap.nist.gov/schema/cpe-extension/2.3}cpe23-item").get('name') + item = CPE(cpe_str, cpe_titles, cpe_ref) + cpe_str_no_version = CPE.no_version(cpe_str) + # This dict must have a unique key for every CPE version + # which allows matching to the specific obj data of that + # NIST dict entry + self.all_cpes.update({cpe_str: item}) + # This dict has one entry for every CPE (w/o version) to allow + # partial match (no valid version) check (the obj is saved and + # used as seed for suggested xml updates. By updating the same + # non-version'd entry, it assumes the last update here is the + # latest version in the NIST dict) + self.all_cpes_no_version.update({cpe_str_no_version: item}) + + def find_partial(self, cpe_str): + cpe_str_no_version = CPE.no_version(cpe_str) + if cpe_str_no_version in self.all_cpes_no_version: + return cpe_str_no_version + + def find_partial_obj(self, cpe_str): + cpe_str_no_version = CPE.no_version(cpe_str) + if cpe_str_no_version in self.all_cpes_no_version: + return self.all_cpes_no_version[cpe_str_no_version] + + def find_partial_latest_version(self, cpe_str_partial): + cpe_obj = self.find_partial_obj(cpe_str_partial) + return cpe_obj.cpe_cur_ver + + def find(self, cpe_str): + if self.find_partial(cpe_str): + if cpe_str in self.all_cpes: + return cpe_str + + def gen_update_xml(self, cpe_str): + cpe = self.find_partial_obj(cpe_str) + return cpe.update_xml_dict() diff --git a/support/scripts/cve-checker b/support/scripts/cve-checker deleted file mode 100755 index 998ea5b8af..0000000000 --- a/support/scripts/cve-checker +++ /dev/null @@ -1,196 +0,0 @@ -#!/usr/bin/env python - -# Copyright (C) 2009 by Thomas Petazzoni <thomas.petazzoni@free-electrons.com> -# Copyright (C) 2020 by Gregory CLEMENT <gregory.clement@bootlin.com> -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - -import argparse -import datetime -import os -import json -import sys -import cve as cvecheck - - -class Package: - def __init__(self, name, version, ignored_cves): - self.name = name - self.version = version - self.cves = list() - self.ignored_cves = ignored_cves - - -def check_package_cves(nvd_path, packages): - if not os.path.isdir(nvd_path): - os.makedirs(nvd_path) - - for cve in cvecheck.CVE.read_nvd_dir(nvd_path): - for pkg_name in cve.pkg_names: - pkg = packages.get(pkg_name, '') - if pkg and cve.affects(pkg.name, pkg.version, pkg.ignored_cves) == cve.CVE_AFFECTS: - pkg.cves.append(cve.identifier) - - -html_header = """ -<head> -<script src=\"https://www.kryogenix.org/code/browser/sorttable/sorttable.js\"></script> -<style type=\"text/css\"> -table { - width: 100%; -} -td { - border: 1px solid black; -} -td.centered { - text-align: center; -} -td.wrong { - background: #ff9a69; -} -td.correct { - background: #d2ffc4; -} - -</style> -<title>CVE status for Buildroot configuration</title> -</head> - -<p id=\"sortable_hint\"></p> -""" - - -html_footer = """ -</body> -<script> -if (typeof sorttable === \"object\") { - document.getElementById(\"sortable_hint\").innerHTML = - \"hint: the table can be sorted by clicking the column headers\" -} -</script> -</html> -""" - - -def dump_html_pkg(f, pkg): - f.write(" <tr>\n") - f.write(" <td>%s</td>\n" % pkg.name) - - # Current version - if len(pkg.version) > 20: - version = pkg.version[:20] + "..." - else: - version = pkg.version - f.write(" <td class=\"centered\">%s</td>\n" % version) - - # CVEs - td_class = ["centered"] - if len(pkg.cves) == 0: - td_class.append("correct") - else: - td_class.append("wrong") - f.write(" <td class=\"%s\">\n" % " ".join(td_class)) - for cve in pkg.cves: - f.write(" <a href=\"https://security-tracker.debian.org/tracker/%s\">%s<br/>\n" % (cve, cve)) - f.write(" </td>\n") - - f.write(" </tr>\n") - - -def dump_html_all_pkgs(f, packages): - f.write(""" -<table class=\"sortable\"> -<tr> -<td>Package</td> -<td class=\"centered\">Version</td> -<td class=\"centered\">CVEs</td> -</tr> -""") - for pkg in packages: - dump_html_pkg(f, pkg) - f.write("</table>") - - -def dump_html_gen_info(f, date): - f.write("<p><i>Generated on %s</i></p>\n" % (str(date))) - - -def dump_html(packages, date, output): - with open(output, 'w') as f: - f.write(html_header) - dump_html_all_pkgs(f, packages) - dump_html_gen_info(f, date) - f.write(html_footer) - - -def dump_json(packages, date, output): - # Format packages as a dictionnary instead of a list - pkgs = { - pkg.name: { - "version": pkg.version, - "cves": pkg.cves, - } for pkg in packages - } - # The actual structure to dump, add date to it - final = {'packages': pkgs, - 'date': str(date)} - with open(output, 'w') as f: - json.dump(final, f, indent=2, separators=(',', ': ')) - f.write('\n') - - -def resolvepath(path): - return os.path.abspath(os.path.expanduser(path)) - - -def parse_args(): - parser = argparse.ArgumentParser() - output = parser.add_argument_group('output', 'Output file(s)') - output.add_argument('--html', dest='html', type=resolvepath, - help='HTML output file') - output.add_argument('--json', dest='json', type=resolvepath, - help='JSON output file') - parser.add_argument('--nvd-path', dest='nvd_path', - help='Path to the local NVD database', type=resolvepath, - required=True) - args = parser.parse_args() - if not args.html and not args.json: - parser.error('at least one of --html or --json (or both) is required') - return args - - -def __main__(): - packages = list() - content = json.load(sys.stdin) - for item in content: - pkg = content[item] - p = Package(item, pkg.get('version', ''), pkg.get('ignore_cves', '')) - packages.append(p) - - args = parse_args() - date = datetime.datetime.utcnow() - - print("Checking packages CVEs") - check_package_cves(args.nvd_path, {p.name: p for p in packages}) - - if args.html: - print("Write HTML") - dump_html(packages, date, args.html) - if args.json: - print("Write JSON") - dump_json(packages, date, args.json) - - -__main__() diff --git a/support/scripts/cve.py b/support/scripts/cve.py index 6396019e0e..6e97ea193f 100755 --- a/support/scripts/cve.py +++ b/support/scripts/cve.py @@ -47,6 +47,24 @@ ops = { } +# Check if two CPE IDs match each other +def cpe_matches(cpe1, cpe2): + cpe1_elems = cpe1.split(":") + cpe2_elems = cpe2.split(":") + + remains = filter(lambda x: x[0] not in ["*", "-"] and x[1] not in ["*", "-"] and x[0] != x[1], + zip(cpe1_elems, cpe2_elems)) + return len(list(remains)) == 0 + + +def cpe_product(cpe): + return cpe.split(':')[4] + + +def cpe_version(cpe): + return cpe.split(':')[5] + + class CVE: """An accessor class for CVE Items in NVD files""" CVE_AFFECTS = 1 @@ -134,7 +152,11 @@ class CVE: for cpe in node.get('cpe_match', ()): if not cpe['vulnerable']: return - vendor, product, version = cpe['cpe23Uri'].split(':')[3:6] + product = cpe_product(cpe['cpe23Uri']) + version = cpe_version(cpe['cpe23Uri']) + # ignore when product is '-', which means N/A + if product == '-': + return op_start = '' op_end = '' v_start = '' @@ -144,10 +166,6 @@ class CVE: # Version is defined, this is a '=' match op_start = '=' v_start = version - elif version == '-': - # no version information is available - op_start = '=' - v_start = version else: # Parse start version, end version and operators if 'versionStartIncluding' in cpe: @@ -167,8 +185,7 @@ class CVE: v_end = cpe['versionEndExcluding'] yield { - 'vendor': vendor, - 'product': product, + 'id': cpe['cpe23Uri'], 'v_start': v_start, 'op_start': op_start, 'v_end': v_end, @@ -186,11 +203,11 @@ class CVE: return self.nvd_cve['cve']['CVE_data_meta']['ID'] @property - def pkg_names(self): - """The set of package names referred by this CVE definition""" - return set(p['product'] for p in self.each_cpe()) + def affected_products(self): + """The set of CPE products referred by this CVE definition""" + return set(cpe_product(p['id']) for p in self.each_cpe()) - def affects(self, name, version, cve_ignore_list): + def affects(self, name, version, cve_ignore_list, cpeid=None): """ True if the Buildroot Package object passed as argument is affected by this CVE. @@ -203,14 +220,15 @@ class CVE: print("Cannot parse package '%s' version '%s'" % (name, version)) pkg_version = None + # if we don't have a cpeid, build one based on name and version + if not cpeid: + cpeid = "cpe:2.3:*:*:%s:%s:*:*:*:*:*:*:*" % (name, version) + for cpe in self.each_cpe(): - if cpe['product'] != name: + if not cpe_matches(cpe['id'], cpeid): continue - if cpe['v_start'] == '-': - return self.CVE_AFFECTS if not cpe['v_start'] and not cpe['v_end']: - print("No CVE affected version") - continue + return self.CVE_AFFECTS if not pkg_version: continue diff --git a/support/scripts/gen-bootlin-toolchains b/support/scripts/gen-bootlin-toolchains index ac1a19b3c0..902b7795ee 100755 --- a/support/scripts/gen-bootlin-toolchains +++ b/support/scripts/gen-bootlin-toolchains @@ -123,6 +123,16 @@ arches = { 'conditions': ['BR2_or1k'], 'prefix': 'or1k', }, + 'powerpc-440fp': { + # Not sure it could be used by other powerpc variants? + 'conditions': ['BR2_powerpc', 'BR2_powerpc_440fp'], + 'prefix': 'powerpc', + }, + 'powerpc-e300c3': { + # Not sure it could be used by other powerpc variants? + 'conditions': ['BR2_powerpc', 'BR2_powerpc_e300c3'], + 'prefix': 'powerpc', + }, 'powerpc-e500mc': { # Not sure it could be used by other powerpc variants? 'conditions': ['BR2_powerpc', 'BR2_powerpc_e500mc'], @@ -132,6 +142,10 @@ arches = { 'conditions': ['BR2_powerpc64', 'BR2_powerpc_e5500'], 'prefix': 'powerpc64', }, + 'powerpc64-e6500': { + 'conditions': ['BR2_powerpc64', 'BR2_powerpc_e6500'], + 'prefix': 'powerpc64', + }, 'powerpc64-power8': { 'conditions': ['BR2_powerpc64', 'BR2_powerpc_power8'], 'prefix': 'powerpc64', @@ -196,7 +210,7 @@ arches = { 'prefix': 'i686', }, 'xtensa-lx60': { - 'conditions': ['BR2_xtensa', 'BR2_xtensa_fsf'], + 'conditions': ['BR2_xtensa', 'BR2_XTENSA_CUSTOM', 'BR2_XTENSA_LITTLE_ENDIAN'], 'prefix': 'xtensa', }, } @@ -233,18 +247,25 @@ class Toolchain: f.write("config %s\n" % self.option_name) f.write("\tbool \"%s %s %s %s\"\n" % (self.arch, self.libc, self.variant, self.version)) - for c in arches[self.arch]['conditions']: - f.write("\tdepends on %s\n" % c) + depends = [] selects = [] + + for c in arches[self.arch]['conditions']: + depends.append(c) + for frag in self.fragment: # libc type if frag.startswith("BR2_TOOLCHAIN_EXTERNAL_CUSTOM_UCLIBC"): selects.append("BR2_TOOLCHAIN_EXTERNAL_UCLIBC") elif frag.startswith("BR2_TOOLCHAIN_EXTERNAL_CUSTOM_GLIBC"): + # glibc needs mmu support + depends.append("BR2_USE_MMU") + # glibc doesn't support static only configuration + depends.append("!BR2_STATIC_LIBS") selects.append("BR2_TOOLCHAIN_EXTERNAL_GLIBC") - # all glibc toolchains have RPC support - selects.append("BR2_TOOLCHAIN_HAS_NATIVE_RPC") elif frag.startswith("BR2_TOOLCHAIN_EXTERNAL_CUSTOM_MUSL"): + # musl needs mmu support + depends.append("BR2_USE_MMU") selects.append("BR2_TOOLCHAIN_EXTERNAL_MUSL") # gcc version @@ -303,6 +324,9 @@ class Toolchain: if frag.startswith("BR2_TOOLCHAIN_EXTERNAL_OPENMP"): selects.append("BR2_TOOLCHAIN_HAS_OPENMP") + for depend in depends: + f.write("\tdepends on %s\n" % depend) + for select in selects: f.write("\tselect %s\n" % select) diff --git a/support/scripts/generate-gitlab-ci-yml b/support/scripts/generate-gitlab-ci-yml index e42943953c..3f498e08fd 100755 --- a/support/scripts/generate-gitlab-ci-yml +++ b/support/scripts/generate-gitlab-ci-yml @@ -6,9 +6,6 @@ main() { local template="${1}" preamble "${template}" - - gen_basics - gen_defconfigs gen_tests } @@ -24,91 +21,85 @@ preamble() { _EOF_ } -gen_basics() { - local tst - - # Skip basic tests when explicitly building defconfigs or runtime tests - case "${CI_COMMIT_REF_NAME}" in - (*-defconfigs) return;; - (*-*_defconfig) return;; - (*-runtime-tests) return;; - (*-tests.*) return;; - esac - - for tst in DEVELOPERS flake8 package; do - printf 'check-%s: { extends: .check-%s_base }\n' "${tst}" "${tst}" - done -} +gen_tests() { + local -a basics defconfigs runtimes + local do_basics do_defconfigs do_runtime + local defconfigs_ext cfg tst -gen_defconfigs() { - local -a defconfigs - local template cfg ext + basics=( DEVELOPERS flake8 package ) defconfigs=( $(cd configs; LC_ALL=C ls -1 *_defconfig) ) + runtimes=( $(./support/testing/run-tests -l 2>&1 \ + | sed -r -e '/^test_run \((.*)\).*/!d; s//\1/' \ + | LC_ALL=C sort) + ) + if [ -n "${CI_COMMIT_TAG}" ]; then - # For tags, create a pipeline. - template=base + do_basics=true + do_defconfigs=base + do_runtime=true + elif [ "${CI_PIPELINE_SOURCE}" = "trigger" ]; then + case "${BR_SCHEDULE_JOBS}" in + (basic) + do_basics=true + do_defconfigs=check + defconfigs_ext=_check + ;; + (defconfig) + do_defconfigs=base + ;; + (runtime) + do_runtime=true + ;; + esac + else + case "${CI_COMMIT_REF_NAME}" in + (*-basics) + do_basics=true + do_defconfigs=check + defconfigs_ext=_check + ;; + (*-defconfigs) + do_defconfigs=base + ;; + (*-*_defconfig) + defconfigs=( "${CI_COMMIT_REF_NAME##*-}" ) + do_defconfigs=base + ;; + (*-runtime-tests) + do_runtime=true + ;; + (*-tests.*) + runtimes=( "${CI_COMMIT_REF_NAME##*-}" ) + do_runtime=true + ;; + esac fi - if [ -n "${CI_PIPELINE_TRIGGERED}" ]; then - # For pipeline created by using a trigger token. - template=base + + # If nothing else, at least do the basics to generate a valid pipeline + if [ -z "${do_defconfigs}" \ + -a -z "${do_runtime}" \ + ] + then + do_basics=true fi - case "${CI_COMMIT_REF_NAME}" in - # For master, next, and maintenance branches, only check the defconfigs - (master|next|????.??.x) - template=check - ext=_check - ;; - # For the branch or tag name named *-defconfigs, create a pipeline. - (*-defconfigs) - template=base - ;; - (*-*_defconfig) - defconfigs=( "${CI_COMMIT_REF_NAME##*-}" ) - template=base - ;; - esac - if [ -n "${template}" ]; then + if ${do_basics:-false}; then + for tst in "${basics[@]}"; do + printf 'check-%s: { extends: .check-%s_base }\n' "${tst}" "${tst}" + done + fi + + if [ -n "${do_defconfigs}" ]; then for cfg in "${defconfigs[@]}"; do printf '%s%s: { extends: .defconfig_%s }\n' \ - "${cfg}" "${ext}" "${template}" + "${cfg}" "${defconfigs_ext}" "${do_defconfigs}" done fi -} - -gen_tests() { - local -a tests - local run_tests tst - tests=( $(./support/testing/run-tests -l 2>&1 \ - | sed -r -e '/^test_run \((.*)\).*/!d; s//\1/'\ - | LC_ALL=C sort) - ) - - run_tests=false - if [ -n "${CI_COMMIT_TAG}" ]; then - # For tags, create a pipeline. - run_tests=true - fi - if [ -n "${CI_PIPELINE_TRIGGERED}" ]; then - # For pipeline created by using a trigger token. - run_tests=true - fi - case "${CI_COMMIT_REF_NAME}" in - # For the branch or tag name named *-runtime-tests, create a pipeline. - (*-runtime-tests) - run_tests=true - ;; - (*-tests.*) - tests=( "${CI_COMMIT_REF_NAME##*-}" ) - run_tests=true - ;; - esac - - if ${run_tests}; then - printf '%s: { extends: .runtime_test_base }\n' "${tests[@]}" + if ${do_runtime:-false}; then + printf '%s: { extends: .runtime_test_base }\n' "${runtimes[@]}" fi } diff --git a/support/scripts/pkg-stats b/support/scripts/pkg-stats index 503cc45c16..6f3ddc561f 100755 --- a/support/scripts/pkg-stats +++ b/support/scripts/pkg-stats @@ -28,10 +28,11 @@ import subprocess import json import sys -sys.path.append('utils/') -from getdeveloperlib import parse_developers # noqa: E402 -import cve as cvecheck # noqa: E402 +brpath = os.path.normpath(os.path.join(os.path.dirname(__file__), "..", "..")) +sys.path.append(os.path.join(brpath, "utils")) +from getdeveloperlib import parse_developers # noqa: E402 +from cpedb import CPEDB # noqa: E402 INFRA_RE = re.compile(r"\$\(eval \$\(([a-z-]*)-package\)\)") URL_RE = re.compile(r"\s*https?://\S*\s*$") @@ -66,7 +67,7 @@ def get_defconfig_list(): """ return [ Defconfig(name[:-len('_defconfig')], os.path.join('configs', name)) - for name in os.listdir('configs') + for name in os.listdir(os.path.join(brpath, 'configs')) if name.endswith('_defconfig') ] @@ -76,6 +77,7 @@ class Package: all_license_files = list() all_versions = dict() all_ignored_cves = dict() + all_cpeids = dict() # This is the list of all possible checks. Add new checks to this list so # a tool that post-processeds the json output knows the checks before # iterating over the packages. @@ -96,7 +98,9 @@ class Package: self.current_version = None self.url = None self.url_worker = None + self.cpeid = None self.cves = list() + self.ignored_cves = list() self.latest_version = {'status': RM_API_STATUS_ERROR, 'version': None, 'id': None} self.status = {} @@ -108,9 +112,10 @@ class Package: Fills in the .url field """ self.status['url'] = ("warning", "no Config.in") - for filename in os.listdir(os.path.dirname(self.path)): + pkgdir = os.path.dirname(os.path.join(brpath, self.path)) + for filename in os.listdir(pkgdir): if fnmatch.fnmatch(filename, 'Config.*'): - fp = open(os.path.join(os.path.dirname(self.path), filename), "r") + fp = open(os.path.join(pkgdir, filename), "r") for config_line in fp: if URL_RE.match(config_line): self.url = config_line.strip() @@ -138,10 +143,10 @@ class Package: Fills in the .infras field """ self.infras = list() - with open(self.path, 'r') as f: + with open(os.path.join(brpath, self.path), 'r') as f: lines = f.readlines() - for l in lines: - match = INFRA_RE.match(l) + for line in lines: + match = INFRA_RE.match(line) if not match: continue infra = match.group(1) @@ -178,7 +183,7 @@ class Package: return hashpath = self.path.replace(".mk", ".hash") - if os.path.exists(hashpath): + if os.path.exists(os.path.join(brpath, hashpath)): self.status['hash'] = ("ok", "found") else: self.status['hash'] = ("error", "missing") @@ -191,7 +196,7 @@ class Package: self.status['patches'] = ("na", "no valid package infra") return - pkgdir = os.path.dirname(self.path) + pkgdir = os.path.dirname(os.path.join(brpath, self.path)) for subdir, _, _ in os.walk(pkgdir): self.patch_files = fnmatch.filter(os.listdir(subdir), '*.patch') @@ -210,12 +215,28 @@ class Package: if var in self.all_versions: self.current_version = self.all_versions[var] + def set_cpeid(self): + """ + Fills in the .cpeid field + """ + var = self.pkgvar() + if not self.has_valid_infra: + self.status['cpe'] = ("na", "no valid package infra") + return + + if var in self.all_cpeids: + self.cpeid = self.all_cpeids[var] + # Set a preliminary status, it might be overridden by check_package_cpes() + self.status['cpe'] = ("warning", "not checked against CPE dictionnary") + else: + self.status['cpe'] = ("error", "no verified CPE identifier") + def set_check_package_warnings(self): """ Fills in the .warnings and .status['pkg-check'] fields """ - cmd = ["./utils/check-package"] - pkgdir = os.path.dirname(self.path) + cmd = [os.path.join(brpath, "utils/check-package")] + pkgdir = os.path.dirname(os.path.join(brpath, self.path)) self.status['pkg-check'] = ("error", "Missing") for root, dirs, files in os.walk(pkgdir): for f in files: @@ -233,12 +254,11 @@ class Package: self.status['pkg-check'] = ("error", "{} warnings".format(self.warnings)) return - @property - def ignored_cves(self): + def set_ignored_cves(self): """ Give the list of CVEs ignored by the package """ - return list(self.all_ignored_cves.get(self.pkgvar(), [])) + self.ignored_cves = list(self.all_ignored_cves.get(self.pkgvar(), [])) def set_developers(self, developers): """ @@ -256,7 +276,13 @@ class Package: self.status['developers'] = ("warning", "no developers") def is_status_ok(self, name): - return self.status[name][0] == 'ok' + return name in self.status and self.status[name][0] == 'ok' + + def is_status_error(self, name): + return name in self.status and self.status[name][0] == 'error' + + def is_status_na(self, name): + return name in self.status and self.status[name][0] == 'na' def __eq__(self, other): return self.path == other.path @@ -300,11 +326,12 @@ def get_pkglist(npackages, package_list): "toolchain/toolchain-wrapper.mk"] packages = list() count = 0 - for root, dirs, files in os.walk("."): + for root, dirs, files in os.walk(brpath): + root = os.path.relpath(root, brpath) rootdir = root.split("/") - if len(rootdir) < 2: + if len(rootdir) < 1: continue - if rootdir[1] not in WALK_USEFUL_SUBDIRS: + if rootdir[0] not in WALK_USEFUL_SUBDIRS: continue for f in files: if not f.endswith(".mk"): @@ -316,8 +343,7 @@ def get_pkglist(npackages, package_list): pkgpath = os.path.join(root, f) skip = False for exclude in WALK_EXCLUDES: - # pkgpath[2:] strips the initial './' - if re.match(exclude, pkgpath[2:]): + if re.match(exclude, pkgpath): skip = True continue if skip: @@ -330,10 +356,16 @@ def get_pkglist(npackages, package_list): return packages +def get_config_packages(): + cmd = ["make", "--no-print-directory", "show-info"] + js = json.loads(subprocess.check_output(cmd)) + return set([v["name"] for v in js.values()]) + + def package_init_make_info(): # Fetch all variables at once variables = subprocess.check_output(["make", "BR2_HAVE_DOT_CONFIG=y", "-s", "printvars", - "VARS=%_LICENSE %_LICENSE_FILES %_VERSION %_IGNORE_CVES"]) + "VARS=%_LICENSE %_LICENSE_FILES %_VERSION %_IGNORE_CVES %_CPE_ID"]) variable_list = variables.decode().splitlines() # We process first the host package VERSION, and then the target @@ -371,6 +403,10 @@ def package_init_make_info(): pkgvar = pkgvar[:-12] Package.all_ignored_cves[pkgvar] = value.split() + elif pkgvar.endswith("_CPE_ID"): + pkgvar = pkgvar[:-7] + Package.all_cpeids[pkgvar] = value + check_url_count = 0 @@ -405,7 +441,7 @@ async def check_package_urls(packages): async with aiohttp.ClientSession(connector=connector, trust_env=True) as sess: packages = [p for p in packages if p.status['url'][0] == 'ok'] for pkg in packages: - tasks.append(check_url_status(sess, pkg, len(packages))) + tasks.append(asyncio.ensure_future(check_url_status(sess, pkg, len(packages)))) await asyncio.wait(tasks) @@ -523,20 +559,58 @@ async def check_package_latest_version(packages): async with aiohttp.ClientSession(connector=connector, trust_env=True) as sess: packages = [p for p in packages if p.has_valid_infra] for pkg in packages: - tasks.append(check_package_latest_version_get(sess, pkg, len(packages))) + tasks.append(asyncio.ensure_future(check_package_latest_version_get(sess, pkg, len(packages)))) await asyncio.wait(tasks) +def check_package_cve_affects(cve, cpe_product_pkgs): + for product in cve.affected_products: + if product not in cpe_product_pkgs: + continue + for pkg in cpe_product_pkgs[product]: + if cve.affects(pkg.name, pkg.current_version, pkg.ignored_cves, pkg.cpeid) == cve.CVE_AFFECTS: + pkg.cves.append(cve.identifier) + + def check_package_cves(nvd_path, packages): if not os.path.isdir(nvd_path): os.makedirs(nvd_path) + cpe_product_pkgs = defaultdict(list) + for pkg in packages: + if not pkg.has_valid_infra: + pkg.status['cve'] = ("na", "no valid package infra") + continue + if not pkg.current_version: + pkg.status['cve'] = ("na", "no version information available") + continue + if pkg.cpeid: + cpe_product = cvecheck.cpe_product(pkg.cpeid) + cpe_product_pkgs[cpe_product].append(pkg) + else: + cpe_product_pkgs[pkg.name].append(pkg) + for cve in cvecheck.CVE.read_nvd_dir(nvd_path): - for pkg_name in cve.pkg_names: - if pkg_name in packages: - pkg = packages[pkg_name] - if cve.affects(pkg.name, pkg.current_version, pkg.ignored_cves) == cve.CVE_AFFECTS: - pkg.cves.append(cve.identifier) + check_package_cve_affects(cve, cpe_product_pkgs) + + for pkg in packages: + if 'cve' not in pkg.status: + if pkg.cves: + pkg.status['cve'] = ("error", "affected by CVEs") + else: + pkg.status['cve'] = ("ok", "not affected by CVEs") + + +def check_package_cpes(nvd_path, packages): + cpedb = CPEDB(nvd_path) + cpedb.get_xml_dict() + for p in packages: + if not p.cpeid: + continue + if cpedb.find(p.cpeid): + p.status['cpe'] = ("ok", "verified CPE identifier") + else: + p.status['cpe'] = ("error", "CPE identifier unknown in CPE database") def calculate_stats(packages): @@ -578,6 +652,10 @@ def calculate_stats(packages): stats["total-cves"] += len(pkg.cves) if len(pkg.cves) != 0: stats["pkg-cves"] += 1 + if pkg.cpeid: + stats["cpe-id"] += 1 + else: + stats["no-cpe-id"] += 1 return stats @@ -633,6 +711,30 @@ td.version-error { background: #ccc; } +td.cpe-ok { + background: #d2ffc4; +} + +td.cpe-nok { + background: #ff9a69; +} + +td.cpe-unknown { + background: #ffd870; +} + +td.cve-ok { + background: #d2ffc4; +} + +td.cve-nok { + background: #ff9a69; +} + +td.cve-unknown { + background: #ffd870; +} + </style> <title>Statistics of Buildroot packages</title> </head> @@ -678,7 +780,7 @@ def boolean_str(b): def dump_html_pkg(f, pkg): f.write(" <tr>\n") - f.write(" <td>%s</td>\n" % pkg.path[2:]) + f.write(" <td>%s</td>\n" % pkg.path) # Patch count td_class = ["centered"] @@ -791,13 +893,35 @@ def dump_html_pkg(f, pkg): # CVEs td_class = ["centered"] - if len(pkg.cves) == 0: - td_class.append("correct") + if pkg.is_status_ok("cve"): + td_class.append("cve-ok") + elif pkg.is_status_error("cve"): + td_class.append("cve-nok") else: - td_class.append("wrong") + td_class.append("cve-unknown") + f.write(" <td class=\"%s\">\n" % " ".join(td_class)) + if pkg.is_status_error("cve"): + for cve in pkg.cves: + f.write(" <a href=\"https://security-tracker.debian.org/tracker/%s\">%s<br/>\n" % (cve, cve)) + elif pkg.is_status_na("cve"): + f.write(" %s" % pkg.status['cve'][1]) + else: + f.write(" N/A\n") + f.write(" </td>\n") + + # CPE ID + td_class = ["left"] + if pkg.is_status_ok("cpe"): + td_class.append("cpe-ok") + elif pkg.is_status_error("cpe"): + td_class.append("cpe-nok") + else: + td_class.append("cpe-unknown") f.write(" <td class=\"%s\">\n" % " ".join(td_class)) - for cve in pkg.cves: - f.write(" <a href=\"https://security-tracker.debian.org/tracker/%s\">%s<br/>\n" % (cve, cve)) + if pkg.cpeid: + f.write(" <code>%s</code>\n" % pkg.cpeid) + if not pkg.is_status_ok("cpe"): + f.write(" %s%s\n" % ("<br/>" if pkg.cpeid else "", pkg.status['cpe'][1])) f.write(" </td>\n") f.write(" </tr>\n") @@ -818,6 +942,7 @@ def dump_html_all_pkgs(f, packages): <td class=\"centered\">Warnings</td> <td class=\"centered\">Upstream URL</td> <td class=\"centered\">CVEs</td> +<td class=\"centered\">CPE ID</td> </tr> """) for pkg in sorted(packages): @@ -860,6 +985,10 @@ def dump_html_stats(f, stats): stats["pkg-cves"]) f.write("<tr><td>Total number of CVEs affecting all packages</td><td>%s</td></tr>\n" % stats["total-cves"]) + f.write("<tr><td>Packages with CPE ID</td><td>%s</td></tr>\n" % + stats["cpe-id"]) + f.write("<tr><td>Packages without CPE ID</td><td>%s</td></tr>\n" % + stats["no-cpe-id"]) f.write("</table>\n") @@ -926,6 +1055,8 @@ def parse_args(): output.add_argument('--json', dest='json', type=resolvepath, help='JSON output file') packages = parser.add_mutually_exclusive_group() + packages.add_argument('-c', dest='configpackages', action='store_true', + help='Apply to packages enabled in current configuration') packages.add_argument('-n', dest='npackages', type=int, action='store', help='Number of packages') packages.add_argument('-p', dest='packages', action='store', @@ -939,13 +1070,22 @@ def parse_args(): def __main__(): + global cvecheck + args = parse_args() + + if args.nvd_path: + import cve as cvecheck + if args.packages: package_list = args.packages.split(",") + elif args.configpackages: + package_list = get_config_packages() else: package_list = None date = datetime.datetime.utcnow() - commit = subprocess.check_output(['git', 'rev-parse', + commit = subprocess.check_output(['git', '-C', brpath, + 'rev-parse', 'HEAD']).splitlines()[0].decode() print("Build package list ...") packages = get_pkglist(args.npackages, package_list) @@ -965,7 +1105,9 @@ def __main__(): pkg.set_patch_count() pkg.set_check_package_warnings() pkg.set_current_version() + pkg.set_cpeid() pkg.set_url() + pkg.set_ignored_cves() pkg.set_developers(developers) print("Checking URL status") loop = asyncio.get_event_loop() @@ -975,7 +1117,8 @@ def __main__(): loop.run_until_complete(check_package_latest_version(packages)) if args.nvd_path: print("Checking packages CVEs") - check_package_cves(args.nvd_path, {p.name: p for p in packages}) + check_package_cves(args.nvd_path, packages) + check_package_cpes(args.nvd_path, packages) print("Calculate stats") stats = calculate_stats(packages) if args.html: diff --git a/support/scripts/setlocalversion b/support/scripts/setlocalversion index e04c955d9e..d492f2db2f 100755 --- a/support/scripts/setlocalversion +++ b/support/scripts/setlocalversion @@ -48,14 +48,28 @@ if head=`git rev-parse --verify --short HEAD 2>/dev/null`; then fi # Check for mercurial and a mercurial repo. +# In the git case, 'git describe' will show the latest tag, and unless we are +# exactly on that tag, the number of commits since then, and last commit id. +# Mimic something similar in the Mercurial case. if hgid=`HGRCPATH= hg id --id --tags 2>/dev/null`; then tag=`printf '%s' "$hgid" | cut -d' ' -f2 --only-delimited` # Do we have an untagged version? if [ -z "$tag" -o "$tag" = tip ]; then + # current revision is not tagged, determine latest tag + latesttag=`HGRCPATH= hg log -r. -T '{latesttag}' 2>/dev/null` + # In case there is more than one tag on the latest tagged commit, + # 'latesttag' will separate them by colon (:). We'll retain this. + # In case there is no tag at all, 'null' will be returned. + if [ "$latesttag" = "null" ]; then + latesttag='' + fi + + # add the commit id id=`printf '%s' "$hgid" | sed 's/[+ ].*//'` - printf '%s%s' -hg "$id" + printf '%s%s%s' "${latesttag}" -hg "$id" else + # current revision is tagged, just print the tag printf ${tag} fi |