summaryrefslogtreecommitdiffstats
path: root/tools
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 15:35:18 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 15:35:18 +0000
commitb750101eb236130cf056c675997decbac904cc49 (patch)
treea5df1a06754bdd014cb975c051c83b01c9a97532 /tools
parentInitial commit. (diff)
downloadsystemd-b750101eb236130cf056c675997decbac904cc49.tar.xz
systemd-b750101eb236130cf056c675997decbac904cc49.zip
Adding upstream version 252.22.upstream/252.22upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tools')
-rwxr-xr-xtools/add-git-hook.sh13
-rwxr-xr-xtools/analyze-dump-sort.py78
-rwxr-xr-xtools/catalog-report.py84
-rwxr-xr-xtools/check-api-docs.sh44
-rwxr-xr-xtools/check-directives.sh72
-rwxr-xr-xtools/check-help.sh47
-rwxr-xr-xtools/check-includes.pl23
-rwxr-xr-xtools/check-version.sh36
-rw-r--r--tools/chromiumos/gen_autosuspend_rules.py339
-rwxr-xr-xtools/coverity.sh62
-rwxr-xr-xtools/dbus_exporter.py42
-rwxr-xr-xtools/debug-sd-boot.sh85
-rwxr-xr-xtools/find-build-dir.sh33
-rwxr-xr-xtools/find-double-newline.sh43
-rwxr-xr-xtools/find-tabs.sh43
-rw-r--r--tools/gdb-sd_dump_hashmaps.py77
-rwxr-xr-xtools/generate-gperfs.py24
-rwxr-xr-xtools/git-contrib.sh10
-rw-r--r--tools/list-discoverable-partitions.py192
-rwxr-xr-xtools/make-autosuspend-rules.py24
-rwxr-xr-xtools/make-directive-index.py174
-rwxr-xr-xtools/make-man-index.py111
-rwxr-xr-xtools/meson-build.sh22
-rwxr-xr-xtools/meson-make-symlink.sh22
-rwxr-xr-xtools/meson-render-jinja2.py37
-rwxr-xr-xtools/meson-vcs-tag.sh19
-rwxr-xr-xtools/oss-fuzz.sh144
-rwxr-xr-xtools/update-dbus-docs.py347
-rwxr-xr-xtools/update-hwdb-autosuspend.sh17
-rwxr-xr-xtools/update-hwdb.sh33
-rwxr-xr-xtools/update-man-rules.py97
-rwxr-xr-xtools/update-syscall-tables.sh11
-rwxr-xr-xtools/xml_helper.py20
33 files changed, 2425 insertions, 0 deletions
diff --git a/tools/add-git-hook.sh b/tools/add-git-hook.sh
new file mode 100755
index 0000000..8cff62e
--- /dev/null
+++ b/tools/add-git-hook.sh
@@ -0,0 +1,13 @@
+#!/bin/sh
+# SPDX-License-Identifier: LGPL-2.1-or-later
+set -eu
+
+cd "${MESON_SOURCE_ROOT:?}"
+
+if [ ! -f .git/hooks/pre-commit.sample ] || [ -f .git/hooks/pre-commit ]; then
+ exit 2 # not needed
+fi
+
+cp -p .git/hooks/pre-commit.sample .git/hooks/pre-commit
+chmod +x .git/hooks/pre-commit
+echo 'Activated pre-commit hook'
diff --git a/tools/analyze-dump-sort.py b/tools/analyze-dump-sort.py
new file mode 100755
index 0000000..015027a
--- /dev/null
+++ b/tools/analyze-dump-sort.py
@@ -0,0 +1,78 @@
+#!/usr/bin/python
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+"""
+A helper to compare 'systemd-analyze dump' outputs.
+
+systemd-analyze dump >/var/tmp/dump1
+(reboot)
+tools/analyze-dump-sort.py /var/tmp/dump1 → this does a diff from dump1 to current
+
+systemd-analyze dump >/var/tmp/dump2
+tools/analyze-dump-sort.py /var/tmp/{dump1,dump2} → this does a diff from dump1 to dump2
+"""
+
+import argparse
+import tempfile
+import subprocess
+
+def sort_dump(sourcefile, destfile=None):
+ if destfile is None:
+ destfile = tempfile.NamedTemporaryFile('wt')
+
+ units = {}
+ unit = []
+
+ same = []
+
+ for line in sourcefile:
+ line = line.rstrip()
+
+ header = line.split(':')[0]
+ if 'Timestamp' in header or 'Invocation ID' in header or 'PID' in header:
+ line = header + ': …'
+
+ if line.startswith('->'):
+ if unit:
+ units[unit[0]] = unit
+ unit = [line]
+ elif line.startswith('\t'):
+ assert unit
+
+ if same and same[0].startswith(header):
+ same.append(line)
+ else:
+ unit.extend(sorted(same, key=str.lower))
+ same = [line]
+ else:
+ print(line, file=destfile)
+
+ if unit:
+ units[unit[0]] = unit
+
+ for unit in sorted(units.values()):
+ print('\n'.join(unit), file=destfile)
+
+ destfile.flush()
+ return destfile
+
+def parse_args():
+ p = argparse.ArgumentParser(description=__doc__)
+ p.add_argument('one')
+ p.add_argument('two', nargs='?')
+ p.add_argument('--user', action='store_true')
+ return p.parse_args()
+
+if __name__ == '__main__':
+ opts = parse_args()
+
+ one = sort_dump(open(opts.one))
+ if opts.two:
+ two = sort_dump(open(opts.two))
+ else:
+ user = ['--user'] if opts.user else []
+ two = subprocess.run(['systemd-analyze', 'dump', *user],
+ capture_output=True, text=True, check=True)
+ two = sort_dump(two.stdout.splitlines())
+ with subprocess.Popen(['diff', '-U10', one.name, two.name], stdout=subprocess.PIPE) as diff:
+ subprocess.Popen(['less'], stdin=diff.stdout)
diff --git a/tools/catalog-report.py b/tools/catalog-report.py
new file mode 100755
index 0000000..ca1e13d
--- /dev/null
+++ b/tools/catalog-report.py
@@ -0,0 +1,84 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: MIT
+#
+# This file is distributed under the MIT license, see below.
+#
+# The MIT License (MIT)
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""
+Prints out journal entries with no or bad catalog explanations.
+"""
+
+import re
+from systemd import journal, id128
+
+j = journal.Reader()
+
+logged = set()
+pattern = re.compile('@[A-Z0-9_]+@')
+
+mids = {v:k for k,v in id128.__dict__.items()
+ if k.startswith('SD_MESSAGE')}
+
+freq = 1000
+
+def log_entry(x):
+ if 'CODE_FILE' in x:
+ # some of our code was using 'CODE_FUNCTION' instead of 'CODE_FUNC'
+ print('{}:{} {}'.format(x.get('CODE_FILE', '???'),
+ x.get('CODE_LINE', '???'),
+ x.get('CODE_FUNC', None) or x.get('CODE_FUNCTION', '???')))
+ print(' {}'.format(x.get('MESSAGE', 'no message!')))
+ for k, v in x.items():
+ if k.startswith('CODE_') or k in {'MESSAGE_ID', 'MESSAGE'}:
+ continue
+ print(' {}={}'.format(k, v))
+ print()
+
+for i, x in enumerate(j):
+ if i % freq == 0:
+ print(i, end='\r')
+
+ try:
+ mid = x['MESSAGE_ID']
+ except KeyError:
+ continue
+ name = mids.get(mid, 'unknown')
+
+ try:
+ desc = journal.get_catalog(mid)
+ except FileNotFoundError:
+ if mid in logged:
+ continue
+
+ print('{} {.hex}: no catalog entry'.format(name, mid))
+ log_entry(x)
+ logged.add(mid)
+ continue
+
+ fields = [field[1:-1] for field in pattern.findall(desc)]
+ for field in fields:
+ index = (mid, field)
+ if field in x or index in logged:
+ continue
+ print('{} {.hex}: no field {}'.format(name, mid, field))
+ log_entry(x)
+ logged.add(index)
diff --git a/tools/check-api-docs.sh b/tools/check-api-docs.sh
new file mode 100755
index 0000000..2e973a0
--- /dev/null
+++ b/tools/check-api-docs.sh
@@ -0,0 +1,44 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: LGPL-2.1-or-later
+set -eu
+set -o pipefail
+
+sd_good=0
+sd_total=0
+udev_good=0
+udev_total=0
+
+deprecated=(
+ -e sd_bus_try_close
+ -e sd_bus_process_priority
+ -e sd_bus_message_get_priority
+ -e sd_bus_message_set_priority
+ -e sd_seat_can_multi_session
+ -e sd_journal_open_container
+)
+
+for symbol in $(nm -g --defined-only "$@" | grep " T " | cut -d" " -f3 | grep -wv "${deprecated[@]}" | sort -u); do
+ if test -f "${MESON_BUILD_ROOT:?}/man/$symbol.3"; then
+ echo "✓ Symbol $symbol() is documented."
+ good=1
+ else
+ echo -e " \x1b[1;31mSymbol $symbol() lacks documentation.\x1b[0m"
+ good=0
+ fi
+
+ case "$symbol" in
+ sd_*)
+ ((sd_good+=good))
+ ((sd_total+=1))
+ ;;
+ udev_*)
+ ((udev_good+=good))
+ ((udev_total+=1))
+ ;;
+ *)
+ echo 'unknown symbol prefix'
+ exit 1
+ esac
+done
+
+echo "libsystemd: $sd_good/$sd_total libudev: $udev_good/$udev_total"
diff --git a/tools/check-directives.sh b/tools/check-directives.sh
new file mode 100755
index 0000000..7678332
--- /dev/null
+++ b/tools/check-directives.sh
@@ -0,0 +1,72 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: LGPL-2.1-or-later
+set -eu
+set -o pipefail
+
+SOURCE_ROOT="${1:?}"
+BUILD_ROOT="${2:?}"
+
+command -v gawk &>/dev/null || exit 77
+
+function generate_directives() {
+ gawk -v sec_rx="${2:-""}" -v unit_type="${3:-""}" '
+ match($0, /^([^ \t\.]+)\.([^ \t\.,]+)/, m) {
+ # res[section][directive] = 1
+ res[m[1]][m[2]] = 1;
+ }
+ END {
+ if (unit_type)
+ print unit_type
+
+ for (section in res) {
+ if (sec_rx && section !~ sec_rx)
+ continue
+
+ print "[" section "]";
+ for (directive in res[section]) {
+ print directive "=";
+ }
+ }
+ }
+ ' "$1"
+}
+
+ret=0
+if ! diff \
+ <(generate_directives "$SOURCE_ROOT"/src/network/networkd-network-gperf.gperf | sort) \
+ <(sort "$SOURCE_ROOT"/test/fuzz/fuzz-network-parser/directives); then
+ echo "Looks like test/fuzz/fuzz-network-parser/directives hasn't been updated"
+ ret=1
+fi
+
+if ! diff \
+ <(generate_directives "$SOURCE_ROOT"/src/network/netdev/netdev-gperf.gperf | sort) \
+ <(sort "$SOURCE_ROOT"/test/fuzz/fuzz-netdev-parser/directives.netdev); then
+ echo "Looks like test/fuzz/fuzz-netdev-parser/directives.netdev hasn't been updated"
+ ret=1
+fi
+
+if ! diff \
+ <(generate_directives "$SOURCE_ROOT"/src/udev/net/link-config-gperf.gperf | sort) \
+ <(sort "$SOURCE_ROOT"/test/fuzz/fuzz-link-parser/directives.link) ; then
+ echo "Looks like test/fuzz/fuzz-link-parser/directives.link hasn't been updated"
+ ret=1
+fi
+
+for section in Automount Mount Path Scope Slice Socket Swap Timer; do
+ if ! diff \
+ <(generate_directives "$BUILD_ROOT"/src/core/load-fragment-gperf.gperf "$section" "${section,,}" | sort) \
+ <(sort "$SOURCE_ROOT/test/fuzz/fuzz-unit-file/directives.${section,,}") ; then
+ echo "Looks like test/fuzz/fuzz-unit-file/directives.${section,,} hasn't been updated"
+ ret=1
+ fi
+done
+
+if ! diff \
+ <(generate_directives "$BUILD_ROOT"/src/core/load-fragment-gperf.gperf "(Service|Unit|Install)" "service" | sort) \
+ <(sort "$SOURCE_ROOT/test/fuzz/fuzz-unit-file/directives.service") ; then
+ echo "Looks like test/fuzz/fuzz-unit-file/directives.service hasn't been updated"
+ ret=1
+fi
+
+exit $ret
diff --git a/tools/check-help.sh b/tools/check-help.sh
new file mode 100755
index 0000000..f974293
--- /dev/null
+++ b/tools/check-help.sh
@@ -0,0 +1,47 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: LGPL-2.1-or-later
+set -eu
+set -o pipefail
+
+# Note: 'grep ... >/dev/null' instead of just 'grep -q' is used intentionally
+# here, since 'grep -q' exits on the first match causing SIGPIPE being
+# sent to the sender.
+
+BINARY="${1:?}"
+export SYSTEMD_LOG_LEVEL=info
+
+if [[ ! -x "$BINARY" ]]; then
+ echo "$BINARY is not an executable"
+ exit 1
+fi
+
+# output width
+if "$BINARY" --help | grep -v 'default:' | grep -E '.{80}.' >/dev/null; then
+ echo "$(basename "$BINARY") --help output is too wide:"
+ "$BINARY" --help | awk 'length > 80' | grep -E --color=yes '.{80}'
+ exit 1
+fi
+
+# --help prints something. Also catches case where args are ignored.
+if ! "$BINARY" --help | grep . >/dev/null; then
+ echo "$(basename "$BINARY") --help output is empty"
+ exit 2
+fi
+
+# no --help output to stderr
+if "$BINARY" --help 2>&1 1>/dev/null | grep .; then
+ echo "$(basename "$BINARY") --help prints to stderr"
+ exit 3
+fi
+
+# error output to stderr
+if ! ("$BINARY" --no-such-parameter 2>&1 1>/dev/null || :) | grep . >/dev/null; then
+ echo "$(basename "$BINARY") with an unknown parameter does not print to stderr"
+ exit 4
+fi
+
+# --help and -h are equivalent
+if ! diff <("$BINARY" -h) <("$BINARY" --help); then
+ echo "$(basename "$BINARY") --help and -h are not identical"
+ exit 5
+fi
diff --git a/tools/check-includes.pl b/tools/check-includes.pl
new file mode 100755
index 0000000..c8bfcba
--- /dev/null
+++ b/tools/check-includes.pl
@@ -0,0 +1,23 @@
+# SPDX-License-Identifier: CC0-1.0
+#!/usr/bin/env perl
+#
+# checkincludes: Find files included more than once in (other) files.
+
+foreach $file (@ARGV) {
+ open(FILE, $file) or die "Cannot open $file: $!.\n";
+
+ my %includedfiles = ();
+
+ while (<FILE>) {
+ if (m/^\s*#\s*include\s*[<"](\S*)[>"]/o) {
+ ++$includedfiles{$1};
+ }
+ }
+ foreach $filename (keys %includedfiles) {
+ if ($includedfiles{$filename} > 1) {
+ print "$file: $filename is included more than once.\n";
+ }
+ }
+
+ close(FILE);
+}
diff --git a/tools/check-version.sh b/tools/check-version.sh
new file mode 100755
index 0000000..faefb46
--- /dev/null
+++ b/tools/check-version.sh
@@ -0,0 +1,36 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: LGPL-2.1-or-later
+set -eu
+set -o pipefail
+
+# Note: 'grep ... >/dev/null' instead of just 'grep -q' is used intentionally
+# here, since 'grep -q' exits on the first match causing SIGPIPE being
+# sent to the sender.
+
+BINARY="${1:?}"
+VERSION="${2:?}"
+export SYSTEMD_LOG_LEVEL=info
+
+if [[ ! -x "$BINARY" ]]; then
+ echo "$BINARY is not an executable"
+ exit 1
+fi
+
+# --version prints something. Also catches case where args are ignored.
+if ! "$BINARY" --version | grep . >/dev/null; then
+ echo "$(basename "$BINARY") --version output is empty"
+ exit 2
+fi
+
+# no --version output to stderr
+if "$BINARY" --version 2>&1 1>/dev/null | grep .; then
+ echo "$(basename "$BINARY") --version prints to stderr"
+ exit 3
+fi
+
+# project version appears in version output
+out="$("$BINARY" --version)"
+if ! grep -F "$VERSION" >/dev/null <<<"$out"; then
+ echo "$(basename "$BINARY") --version output does not match '$VERSION': $out"
+ exit 4
+fi
diff --git a/tools/chromiumos/gen_autosuspend_rules.py b/tools/chromiumos/gen_autosuspend_rules.py
new file mode 100644
index 0000000..cbdd577
--- /dev/null
+++ b/tools/chromiumos/gen_autosuspend_rules.py
@@ -0,0 +1,339 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: BSD-3-Clause
+# -*- coding: utf-8 -*-
+
+# Copyright 2017 The Chromium OS Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSES/BSD-3-Clause.txt file.
+
+"""Autosuspend udev rule generator
+
+This script is executed at build time to generate udev rules. The
+resulting rules file is installed on the device, the script itself
+is not.
+"""
+
+# List of USB devices (vendorid:productid) for which it is safe to enable
+# autosuspend.
+USB_IDS = []
+
+# Host Controllers and internal hubs
+USB_IDS += [
+ # Linux Host Controller (UHCI) (most older x86 boards)
+ '1d6b:0001',
+ # Linux Host Controller (EHCI) (all boards)
+ '1d6b:0002',
+ # Linux Host Controller (XHCI) (most newer boards)
+ '1d6b:0003',
+ # SMSC (Internal HSIC Hub) (most Exynos boards)
+ '0424:3503',
+ # Intel (Rate Matching Hub) (all x86 boards)
+ '05e3:0610',
+ # Intel (Internal Hub?) (peppy, falco)
+ '8087:0024',
+ # Genesys Logic (Internal Hub) (rambi)
+ '8087:8000',
+ # Microchip (Composite HID + CDC) (kefka)
+ '04d8:0b28',
+]
+
+# Webcams
+USB_IDS += [
+ # Chicony (zgb)
+ '04f2:b1d8',
+ # Chicony (mario)
+ '04f2:b262',
+ # Chicony (stout)
+ '04f2:b2fe',
+ # Chicony (butterfly)
+ '04f2:b35f',
+ # Chicony (rambi)
+ '04f2:b443',
+ # Chicony (glados)
+ '04f2:b552',
+ # LiteOn (spring)
+ '058f:b001',
+ # Foxlink? (butterfly)
+ '05c8:0351',
+ # Foxlink? (butterfly)
+ '05c8:0355',
+ # Cheng Uei? (falco)
+ '05c8:036e',
+ # SuYin (parrot)
+ '064e:d251',
+ # Realtek (falco)
+ '0bda:571c',
+ # IMC Networks (squawks)
+ '13d3:5657',
+ # Sunplus (parrot)
+ '1bcf:2c17',
+ # (C-13HDO10B39N) (alex)
+ '2232:1013',
+ # (C-10HDP11538N) (lumpy)
+ '2232:1017',
+ # (Namuga) (link)
+ '2232:1033',
+ # (C-03FFM12339N) (daisy)
+ '2232:1037',
+ # (C-10HDO13531N) (peach)
+ '2232:1056',
+ # (NCM-G102) (samus)
+ '2232:6001',
+ # Acer (stout)
+ '5986:0299',
+]
+
+# Bluetooth Host Controller
+USB_IDS += [
+ # Hon-hai (parrot)
+ '0489:e04e',
+ # Hon-hai (peppy)
+ '0489:e056',
+ # Hon-hai (Kahlee)
+ '0489:e09f',
+ # QCA6174A (delan)
+ '0489:e0a2',
+ # LiteOn (parrot)
+ '04ca:3006',
+ # LiteOn (aleena)
+ '04ca:3016',
+ # LiteOn (scarlet)
+ '04ca:301a',
+ # Realtek (blooglet)
+ '0bda:b00c',
+ # Atheros (stumpy, stout)
+ '0cf3:3004',
+ # Atheros (AR3011) (mario, alex, zgb)
+ '0cf3:3005',
+ # Atheros (stumyp)
+ '0cf3:3007',
+ # Atheros (butterfly)
+ '0cf3:311e',
+ # Atheros (scarlet)
+ '0cf3:e300',
+ # Marvell (rambi)
+ '1286:2046',
+ # Marvell (gru)
+ '1286:204e',
+ # Intel (rambi, samus)
+ '8087:07dc',
+ # Intel (strago, glados)
+ '8087:0a2a',
+ # Intel (octopus)
+ '8087:0aaa',
+ # Intel (hatch)
+ '8087:0026',
+ # Intel (atlas)
+ '8087:0025',
+]
+
+# WWAN (LTE)
+USB_IDS += [
+ # Huawei (ME936) (kip)
+ '12d1:15bb',
+ # Fibocom (L850-GL) (coral, nautilus, sarien)
+ '2cb7:0007',
+ # Fibocom (NL668, NL652)
+ '2cb7:01a0',
+]
+
+# Mass Storage
+USB_IDS += [
+ # Genesys (SD card reader) (lumpy, link, peppy)
+ '05e3:0727',
+ # Realtek (SD card reader) (mario, alex)
+ '0bda:0138',
+ # Realtek (SD card reader) (helios)
+ '0bda:0136',
+ # Realtek (SD card reader) (falco)
+ '0bda:0177',
+]
+
+# Security Key
+USB_IDS += [
+ # Yubico.com
+ '1050:0211',
+ # Yubico.com (HID firmware)
+ '1050:0200',
+ # Google Titan key
+ '18d1:5026',
+]
+
+# USB Audio devices
+USB_IDS += [
+ # Google USB-C to 3.5mm Digital Headphone Jack Adapter 'Mir'
+ '18d1:5025',
+ # Google USB-C to 3.5mm Digital Headphone Jack Adapter 'Mir' (HID only)
+ '18d1:5029',
+ # Google USB-C to 3.5mm Digital Headphone Jack Adapter 2018 'Condor'
+ '18d1:5034',
+ # Google Pixel USB-C Earbuds 'Blackbird'
+ '18d1:5033',
+ # Libratone Q Adapt In-Ear USB-C Earphones, Made for Google
+ '03eb:2433',
+ # Moshi USB-C to 3.5 mm Adapter/Charger, Made for Google
+ '282b:48f0',
+ # Moshi USB-C to 3.5 mm Adapter/Charger, Made for Google (HID only)
+ '282b:0026',
+ # AiAiAi TMA-2 C60 Cable, Made for Google
+ '0572:1a08',
+ # Apple USB-C to 3.5mm Headphone Jack Adapter
+ '05ac:110a',
+]
+
+# List of PCI devices (vendorid:deviceid) for which it is safe to enable
+# autosuspend.
+PCI_IDS = []
+
+# Intel
+PCI_IDS += [
+ # Host bridge
+ '8086:590c',
+ # i915
+ '8086:591e',
+ # proc_thermal
+ '8086:1903',
+ # SPT PCH xHCI controller
+ '8086:9d2f',
+ # CNP PCH xHCI controller
+ '8086:9ded',
+ # intel_pmc_core
+ '8086:9d21',
+ # i801_smbus
+ '8086:9d23',
+ # iwlwifi
+ '8086:095a',
+ # GMM
+ '8086:1911',
+ # Thermal
+ '8086:9d31',
+ # MME
+ '8086:9d3a',
+ # CrOS EC
+ '8086:9d4b',
+ # PCH SPI
+ '8086:9d24',
+ # SATA
+ '8086:02d3',
+ # RAM memory
+ '8086:02ef',
+ # ISA bridge
+ '8086:0284',
+ # Communication controller
+ '8086:02e0',
+ # Network controller
+ '8086:02f0',
+ # Serial bus controller
+ '8086:02a4',
+ # USB controller
+ '8086:02ed',
+ # Volteer xHCI controller
+ '8086:a0ed',
+ # Graphics
+ '8086:9b41',
+ # DSP
+ '8086:02f9',
+ # Host bridge
+ '8086:9b61',
+ # Host bridge
+ '8086:9b71',
+ # PCI Bridge
+ '8086:02b0',
+ # i915 (atlas)
+ '8086:591c',
+ # iwlwifi (atlas)
+ '8086:2526',
+ # i915 (kefka)
+ '8086:22b1',
+ # proc_thermal (kefka)
+ '8086:22dc',
+ # xchi_hdc (kefka)
+ '8086:22b5',
+ # snd_hda (kefka)
+ '8086:2284',
+ # pcieport (kefka)
+ '8086:22c8',
+ '8086:22cc',
+ # lpc_ich (kefka)
+ '8086:229c',
+ # iosf_mbi_pci (kefka)
+ '8086:2280',
+]
+
+# Samsung
+PCI_IDS += [
+ # NVMe KUS030205M-B001
+ '144d:a806',
+ # NVMe MZVLB256HAHQ
+ '144d:a808',
+]
+
+# Lite-on
+PCI_IDS += [
+ # 3C07110288
+ '14a4:9100',
+]
+
+# Seagate
+PCI_IDS += [
+ # ZP256CM30011
+ '7089:5012',
+]
+
+# Kingston
+PCI_IDS += [
+ # RBUSNS8154P3128GJ3
+ '2646:5008',
+]
+
+# Do not edit below this line. #################################################
+
+UDEV_RULE = """\
+ACTION!="add", GOTO="autosuspend_end"
+SUBSYSTEM!="i2c|pci|usb", GOTO="autosuspend_end"
+
+SUBSYSTEM=="i2c", GOTO="autosuspend_i2c"
+SUBSYSTEM=="pci", GOTO="autosuspend_pci"
+SUBSYSTEM=="usb", GOTO="autosuspend_usb"
+
+# I2C rules
+LABEL="autosuspend_i2c"
+ATTR{name}=="cyapa", ATTR{power/control}="on", GOTO="autosuspend_end"
+GOTO="autosuspend_end"
+
+# PCI rules
+LABEL="autosuspend_pci"
+%(pci_rules)s\
+GOTO="autosuspend_end"
+
+# USB rules
+LABEL="autosuspend_usb"
+%(usb_rules)s\
+GOTO="autosuspend_end"
+
+# Enable autosuspend
+LABEL="autosuspend_enable"
+TEST=="power/control", ATTR{power/control}="auto", GOTO="autosuspend_end"
+
+LABEL="autosuspend_end"
+"""
+
+
+def main():
+ pci_rules = ''
+ for dev_ids in PCI_IDS:
+ vendor, device = dev_ids.split(':')
+ pci_rules += ('ATTR{vendor}=="0x%s", ATTR{device}=="0x%s", '
+ 'GOTO="autosuspend_enable"\n' % (vendor, device))
+
+ usb_rules = ''
+ for dev_ids in USB_IDS:
+ vid, pid = dev_ids.split(':')
+ usb_rules += ('ATTR{idVendor}=="%s", ATTR{idProduct}=="%s", '
+ 'GOTO="autosuspend_enable"\n' % (vid, pid))
+
+ print(UDEV_RULE % {'pci_rules': pci_rules, 'usb_rules': usb_rules})
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/coverity.sh b/tools/coverity.sh
new file mode 100755
index 0000000..361376f
--- /dev/null
+++ b/tools/coverity.sh
@@ -0,0 +1,62 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+set -eux
+
+COVERITY_SCAN_TOOL_BASE="/tmp/coverity-scan-analysis"
+COVERITY_SCAN_PROJECT_NAME="systemd/systemd"
+
+function coverity_install_script {
+ local platform tool_url tool_archive
+
+ platform=$(uname)
+ tool_url="https://scan.coverity.com/download/${platform}"
+ tool_archive="/tmp/cov-analysis-${platform}.tgz"
+
+ set +x # this is supposed to hide COVERITY_SCAN_TOKEN
+ echo -e "\033[33;1mDownloading Coverity Scan Analysis Tool...\033[0m"
+ wget -nv -O "$tool_archive" "$tool_url" --post-data "project=$COVERITY_SCAN_PROJECT_NAME&token=${COVERITY_SCAN_TOKEN:?}"
+ set -x
+
+ mkdir -p "$COVERITY_SCAN_TOOL_BASE"
+ pushd "$COVERITY_SCAN_TOOL_BASE"
+ tar xzf "$tool_archive"
+ popd
+}
+
+function run_coverity {
+ local results_dir tool_dir results_archive sha response status_code
+
+ results_dir="cov-int"
+ tool_dir=$(find "$COVERITY_SCAN_TOOL_BASE" -type d -name 'cov-analysis*')
+ results_archive="analysis-results.tgz"
+ sha=$(git rev-parse --short HEAD)
+
+ meson -Dman=false build
+ COVERITY_UNSUPPORTED=1 "$tool_dir/bin/cov-build" --dir "$results_dir" sh -c "ninja -C ./build -v"
+ "$tool_dir/bin/cov-import-scm" --dir "$results_dir" --scm git --log "$results_dir/scm_log.txt"
+
+ tar czf "$results_archive" "$results_dir"
+
+ set +x # this is supposed to hide COVERITY_SCAN_TOKEN
+ echo -e "\033[33;1mUploading Coverity Scan Analysis results...\033[0m"
+ response=$(curl \
+ --silent --write-out "\n%{http_code}\n" \
+ --form project="$COVERITY_SCAN_PROJECT_NAME" \
+ --form token="${COVERITY_SCAN_TOKEN:?}" \
+ --form email="${COVERITY_SCAN_NOTIFICATION_EMAIL:?}" \
+ --form file="@$results_archive" \
+ --form version="$sha" \
+ --form description="Daily build" \
+ https://scan.coverity.com/builds)
+ printf "\033[33;1mThe response is\033[0m\n%s\n" "$response"
+ status_code=$(echo "$response" | sed -n '$p')
+ if [ "$status_code" != "200" ]; then
+ echo -e "\033[33;1mCoverity Scan upload failed: $(echo "$response" | sed '$d').\033[0m"
+ return 1
+ fi
+ set -x
+}
+
+coverity_install_script
+run_coverity
diff --git a/tools/dbus_exporter.py b/tools/dbus_exporter.py
new file mode 100755
index 0000000..f94f261
--- /dev/null
+++ b/tools/dbus_exporter.py
@@ -0,0 +1,42 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: LGPL-2.1-or-later
+from argparse import ArgumentParser
+from pathlib import Path
+from subprocess import run, PIPE
+
+def extract_interfaces_xml(output_dir, executable):
+ proc = run(
+ args=[executable.absolute(), '--bus-introspect', 'list'],
+ stdout=PIPE,
+ check=True,
+ universal_newlines=True)
+
+ interface_names = (x.split()[1] for x in proc.stdout.splitlines())
+
+ for interface_name in interface_names:
+ proc = run(
+ args=[executable.absolute(), '--bus-introspect', interface_name],
+ stdout=PIPE,
+ check=True,
+ universal_newlines=True)
+
+ interface_file_name = output_dir / (interface_name + '.xml')
+ interface_file_name.write_text(proc.stdout)
+ interface_file_name.chmod(0o644)
+
+def main():
+ parser = ArgumentParser()
+ parser.add_argument('output',
+ type=Path)
+ parser.add_argument('executables',
+ nargs='+',
+ type=Path)
+
+ args = parser.parse_args()
+
+ args.output.mkdir(exist_ok=True)
+ for exe in args.executables:
+ extract_interfaces_xml(args.output, exe)
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/debug-sd-boot.sh b/tools/debug-sd-boot.sh
new file mode 100755
index 0000000..1bd2cc4
--- /dev/null
+++ b/tools/debug-sd-boot.sh
@@ -0,0 +1,85 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+set -e
+
+if [[ $# -lt 2 ]]; then
+ echo "Usage: ${0} TARGET INPUT [GDBSCRIPT]"
+ echo "Debug systemd-boot/stub in QEMU."
+ echo
+ echo "TARGET should point to the EFI binary to be examined inside the"
+ echo "build directory (systemd-boot\$ARCH.efi or linux\$arch.efi.stub)."
+ echo
+ echo "INPUT should point to the QEMU serial output pipe. This is used to"
+ echo "extract the location of the symbols. For this to work, QEMU must"
+ echo "be run with '-s -serial pipe:PATH'. Note that QEMU will append"
+ echo ".in/.out to the path, while this script expects the out pipe directly."
+ echo
+ echo "If GDBSCRIPT is empty, gdb is run directly attached to the boot"
+ echo "loader, otherwise a script is generated in the given path that allows"
+ echo "attaching manually like this:"
+ echo " (gdb) source GDBSCRIPT"
+ echo " (gdb) target remote :1234"
+ echo
+ echo "Example usage:"
+ echo " mkfifo /tmp/sdboot.{in,out}"
+ echo " qemu-system-x86_64 [...] -s -serial pipe:/tmp/sdboot"
+ echo " ./tools/debug-sd-boot.sh ./build/src/boot/efi/systemd-bootx64.efi \\"
+ echo " /tmp/sdboot.out"
+ exit 1
+fi
+
+binary=$(realpath "${1}")
+if [[ "${1}" =~ systemd-boot([[:alnum:]]+).efi ]]; then
+ target="systemd-boot"
+ symbols=$(realpath "${1%efi}elf")
+elif [[ "${1}" =~ linux([[:alnum:]]+).efi.stub ]]; then
+ target="systemd-stub"
+ symbols=$(realpath "${1%efi.stub}elf.stub")
+else
+ echo "Cannot detect EFI binary '${1}'."
+ exit 1
+fi
+
+case "${BASH_REMATCH[1]}" in
+ ia32) arch="i386";;
+ x64) arch="i386:x86-64";;
+ aa64) arch="aarch64";;
+ arm|riscv64) arch="${BASH_REMATCH[1]}";;
+ *)
+ echo "Unknown EFI arch '${BASH_REMATCH[1]}'."
+ exit 1
+esac
+
+# system-boot will print out a line like this to inform us where gdb is supposed to
+# look for .text and .data section:
+# systemd-boot@0x0,0x0
+while read -r line; do
+ if [[ "${line}" =~ ${target}@(0x[[:xdigit:]]+),(0x[[:xdigit:]]+) ]]; then
+ text="${BASH_REMATCH[1]}"
+ data="${BASH_REMATCH[2]}"
+ break
+ fi
+done < "${2}"
+
+if [[ -z "${text}" || -z "${data}" ]]; then
+ echo "Could not determine text and data location."
+ exit 1
+fi
+
+if [[ -z "${3}" ]]; then
+ gdb_script=$(mktemp /tmp/debug-sd-boot.XXXXXX.gdb)
+ trap 'rm -f "${gdb_script}"' EXIT
+else
+ gdb_script="${3}"
+fi
+
+echo "file ${binary}
+add-symbol-file ${symbols} ${text} -s .data ${data}
+set architecture ${arch}" > "${gdb_script}"
+
+if [[ -z "${3}" ]]; then
+ gdb -x "${gdb_script}" -ex "target remote :1234"
+else
+ echo "GDB script written to '${gdb_script}'."
+fi
diff --git a/tools/find-build-dir.sh b/tools/find-build-dir.sh
new file mode 100755
index 0000000..79a79fc
--- /dev/null
+++ b/tools/find-build-dir.sh
@@ -0,0 +1,33 @@
+#!/bin/sh
+# SPDX-License-Identifier: LGPL-2.1-or-later
+set -eu
+
+# Try to guess the build directory:
+# we look for subdirectories of the parent directory that look like ninja build dirs.
+
+if [ -n "${BUILD_DIR:=}" ]; then
+ realpath "$BUILD_DIR"
+ exit 0
+fi
+
+root="$(dirname "$(realpath "$0")")"
+
+found=
+for i in "$root"/../*/build.ninja; do
+ c="$(dirname "$i")"
+ [ -d "$c" ] || continue
+ [ "$(basename "$c")" != mkosi.builddir ] || continue
+
+ if [ -n "$found" ]; then
+ echo "Found multiple candidates, specify build directory with \$BUILD_DIR" >&2
+ exit 2
+ fi
+ found="$c"
+done
+
+if [ -z "$found" ]; then
+ echo "Specify build directory with \$BUILD_DIR" >&2
+ exit 1
+fi
+
+realpath "$found"
diff --git a/tools/find-double-newline.sh b/tools/find-double-newline.sh
new file mode 100755
index 0000000..2999a58
--- /dev/null
+++ b/tools/find-double-newline.sh
@@ -0,0 +1,43 @@
+#!/bin/sh
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+set -eu
+
+TOP="$(git rev-parse --show-toplevel)"
+
+case "${1:-}" in
+ recdiff)
+ if [ "${2:-}" = "" ] ; then
+ DIR="$TOP"
+ else
+ DIR="$2"
+ fi
+
+ find "$DIR" -type f \( -name '*.[ch]' -o -name '*.xml' \) -exec "$0" diff \{\} \;
+ ;;
+
+ recpatch)
+ if [ "${2:-}" = "" ] ; then
+ DIR="$TOP"
+ else
+ DIR="$2"
+ fi
+
+ find "$DIR" -type f \( -name '*.[ch]' -o -name '*.xml' \) -exec "$0" patch \{\} \;
+ ;;
+
+ diff)
+ T="$(mktemp)"
+ sed '/^$/N;/^\n$/D' <"${2:?}" >"$T"
+ diff -u "$2" "$T"
+ rm -f "$T"
+ ;;
+
+ patch)
+ sed -i '/^$/N;/^\n$/D' "${2:?}"
+ ;;
+
+ *)
+ echo "Expected recdiff|recpatch|diff|patch as verb." >&2
+ ;;
+esac
diff --git a/tools/find-tabs.sh b/tools/find-tabs.sh
new file mode 100755
index 0000000..6cea339
--- /dev/null
+++ b/tools/find-tabs.sh
@@ -0,0 +1,43 @@
+#!/bin/sh
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+set -eu
+
+TOP="$(git rev-parse --show-toplevel)"
+
+case "${1:-}" in
+ recdiff)
+ if [ "${2:-}" = "" ] ; then
+ DIR="$TOP"
+ else
+ DIR="$2"
+ fi
+
+ find "$DIR" -type f \( -name '*.[ch]' -o -name '*.xml' \) -exec "$0" diff \{\} \;
+ ;;
+
+ recpatch)
+ if [ "${2:-}" = "" ] ; then
+ DIR="$TOP"
+ else
+ DIR="$2"
+ fi
+
+ find "$DIR" -type f \( -name '*.[ch]' -o -name '*.xml' \) -exec "$0" patch \{\} \;
+ ;;
+
+ diff)
+ T="$(mktemp)"
+ sed 's/\t/ /g' <"${2:?}" >"$T"
+ diff -u "$2" "$T"
+ rm -f "$T"
+ ;;
+
+ patch)
+ sed -i 's/\t/ /g' "${2:?}"
+ ;;
+
+ *)
+ echo "Expected recdiff|recpatch|diff|patch as verb." >&2
+ ;;
+esac
diff --git a/tools/gdb-sd_dump_hashmaps.py b/tools/gdb-sd_dump_hashmaps.py
new file mode 100644
index 0000000..d2388b7
--- /dev/null
+++ b/tools/gdb-sd_dump_hashmaps.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+import gdb
+
+class sd_dump_hashmaps(gdb.Command):
+ "dump systemd's hashmaps"
+
+ def __init__(self):
+ super().__init__("sd_dump_hashmaps", gdb.COMMAND_DATA, gdb.COMPLETE_NONE)
+
+ def invoke(self, arg, from_tty):
+ d = gdb.parse_and_eval("hashmap_debug_list")
+ hashmap_type_info = gdb.parse_and_eval("hashmap_type_info")
+ uchar_t = gdb.lookup_type("unsigned char")
+ ulong_t = gdb.lookup_type("unsigned long")
+ debug_offset = gdb.parse_and_eval("(unsigned long)&((HashmapBase*)0)->debug")
+
+ print("type, hash, indirect, entries, max_entries, buckets, creator")
+ while d:
+ h = gdb.parse_and_eval(f"(HashmapBase*)((char*){int(d.cast(ulong_t))} - {debug_offset})")
+
+ if h["has_indirect"]:
+ storage_ptr = h["indirect"]["storage"].cast(uchar_t.pointer())
+ n_entries = h["indirect"]["n_entries"]
+ n_buckets = h["indirect"]["n_buckets"]
+ else:
+ storage_ptr = h["direct"]["storage"].cast(uchar_t.pointer())
+ n_entries = h["n_direct_entries"]
+ n_buckets = hashmap_type_info[h["type"]]["n_direct_buckets"]
+
+ t = ["plain", "ordered", "set"][int(h["type"])]
+
+ print(f'{t}, {h["hash_ops"]}, {bool(h["has_indirect"])}, {n_entries}, {d["max_entries"]}, {n_buckets}, {d["func"].string()}, {d["file"].string()}:{d["line"]}')
+
+ if arg != "" and n_entries > 0:
+ dib_raw_addr = storage_ptr + hashmap_type_info[h["type"]]["entry_size"] * n_buckets
+
+ histogram = {}
+ for i in range(0, n_buckets):
+ dib = int(dib_raw_addr[i])
+ histogram[dib] = histogram.get(dib, 0) + 1
+
+ for dib in sorted(histogram):
+ if dib != 255:
+ print(f"{dib:>3} {histogram[dib]:>8} {float(histogram[dib]/n_entries):.0%} of entries")
+ else:
+ print(f"{dib:>3} {histogram[dib]:>8} {float(histogram[dib]/n_buckets):.0%} of slots")
+ s = sum(dib*count for (dib, count) in histogram.items() if dib != 255) / n_entries
+ print(f"mean DIB of entries: {s}")
+
+ blocks = []
+ current_len = 1
+ prev = int(dib_raw_addr[0])
+ for i in range(1, n_buckets):
+ dib = int(dib_raw_addr[i])
+ if (dib == 255) != (prev == 255):
+ if prev != 255:
+ blocks += [[i, current_len]]
+ current_len = 1
+ else:
+ current_len += 1
+
+ prev = dib
+ if prev != 255:
+ blocks += [[i, current_len]]
+ # a block may be wrapped around
+ if len(blocks) > 1 and blocks[0][0] == blocks[0][1] and blocks[-1][0] == n_buckets - 1:
+ blocks[0][1] += blocks[-1][1]
+ blocks = blocks[0:-1]
+ print("max block: {}".format(max(blocks, key=lambda a: a[1])))
+ print("sum block lens: {}".format(sum(b[1] for b in blocks)))
+ print("mean block len: {}".format(sum(b[1] for b in blocks) / len(blocks)))
+
+ d = d["debug_list_next"]
+
+sd_dump_hashmaps()
diff --git a/tools/generate-gperfs.py b/tools/generate-gperfs.py
new file mode 100755
index 0000000..d240b2c
--- /dev/null
+++ b/tools/generate-gperfs.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+"""
+Generate %-from-name.gperf from %-list.txt
+"""
+
+import sys
+
+name, prefix, input = sys.argv[1:]
+
+print("""\
+%{
+#if __GNUC__ >= 7
+_Pragma("GCC diagnostic ignored \\"-Wimplicit-fallthrough\\"")
+#endif
+%}""")
+print("""\
+struct {}_name {{ const char* name; int id; }};
+%null-strings
+%%""".format(name))
+
+for line in open(input):
+ print("{0}, {1}{0}".format(line.rstrip(), prefix))
diff --git a/tools/git-contrib.sh b/tools/git-contrib.sh
new file mode 100755
index 0000000..cde1ecd
--- /dev/null
+++ b/tools/git-contrib.sh
@@ -0,0 +1,10 @@
+#!/bin/sh
+# SPDX-License-Identifier: LGPL-2.1-or-later
+set -eu
+
+tag="$(git describe --abbrev=0 --match 'v[0-9][0-9][0-9]')"
+git log --pretty=tformat:%aN -s "${tag}.." |
+ grep -v noreply@weblate.org |
+ sed 's/ / /g; s/--/-/g; s/.*/\0,/' |
+ sort -u | tr '\n' ' ' | sed -e "s/^/Contributions from: /g" -e "s/,\s*$/\n/g" | fold -w 72 -s |
+ sed -e "s/^/ /g" -e "s/\s*$//g"
diff --git a/tools/list-discoverable-partitions.py b/tools/list-discoverable-partitions.py
new file mode 100644
index 0000000..153c904
--- /dev/null
+++ b/tools/list-discoverable-partitions.py
@@ -0,0 +1,192 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+import re
+import sys
+import uuid
+
+HEADER = f'''\
+| Name | Partition Type UUID | Allowed File Systems | Explanation |
+|------|---------------------|----------------------|-------------|
+'''
+
+ARCHITECTURES = {
+ 'ALPHA': 'Alpha',
+ 'ARC': 'ARC',
+ 'ARM': '32-bit ARM',
+ 'ARM64': '64-bit ARM/AArch64',
+ 'IA64': 'Itanium/IA-64',
+ 'LOONGARCH64': 'LoongArch 64-bit',
+ 'MIPS_LE': '32-bit MIPS LittleEndian (mipsel)',
+ 'MIPS64_LE': '64-bit MIPS LittleEndian (mips64el)',
+ 'PARISC': 'HPPA/PARISC',
+ 'PPC': '32-bit PowerPC',
+ 'PPC64': '64-bit PowerPC BigEndian',
+ 'PPC64_LE': '64-bit PowerPC LittleEndian',
+ 'RISCV32': 'RISC-V 32-bit',
+ 'RISCV64': 'RISC-V 64-bit',
+ 'S390': 's390',
+ 'S390X': 's390x',
+ 'TILEGX': 'TILE-Gx',
+ 'X86': 'x86',
+ 'X86_64': 'amd64/x86_64',
+}
+
+TYPES = {
+ 'ROOT' : 'Root Partition',
+ 'ROOT_VERITY' : 'Root Verity Partition',
+ 'ROOT_VERITY_SIG' : 'Root Verity Signature Partition',
+ 'USR' : '`/usr/` Partition',
+ 'USR_VERITY' : '`/usr/` Verity Partition',
+ 'USR_VERITY_SIG' : '`/usr/` Verity Signature Partition',
+
+ 'ESP': 'EFI System Partition',
+ 'SRV': 'Server Data Partition',
+ 'VAR': 'Variable Data Partition',
+ 'TMP': 'Temporary Data Partition',
+ 'SWAP': 'Swap',
+ 'HOME': 'Home Partition',
+ 'USER_HOME': 'Per-user Home Partition',
+ 'LINUX_GENERIC': 'Generic Linux Data Partition',
+ 'XBOOTLDR': 'Extended Boot Loader Partition',
+}
+
+DESCRIPTIONS = {
+ 'ROOT': (
+ 'Any native, optionally in LUKS',
+ 'On systems with matching architecture, the first partition with this type UUID on the disk '
+ 'containing the active EFI ESP is automatically mounted to the root directory `/`. '
+ 'If the partition is encrypted with LUKS or has dm-verity integrity data (see below), the '
+ 'device mapper file will be named `/dev/mapper/root`.'),
+ 'USR': (
+ 'Any native, optionally in LUKS',
+ 'Similar semantics to root partition, but just the `/usr/` partition.'),
+ 'ROOT_VERITY': (
+ 'A dm-verity superblock followed by hash data',
+ 'Contains dm-verity integrity hash data for the matching root partition. If this feature is '
+ 'used the partition UUID of the root partition should be the first 128 bits of the root hash '
+ 'of the dm-verity hash data, and the partition UUID of this dm-verity partition should be the '
+ 'final 128 bits of it, so that the root partition and its Verity partition can be discovered '
+ 'easily, simply by specifying the root hash.'),
+ 'USR_VERITY': (
+ 'A dm-verity superblock followed by hash data',
+ 'Similar semantics to root Verity partition, but just for the `/usr/` partition.'),
+ 'ROOT_VERITY_SIG': (
+ 'A serialized JSON object, see below',
+ 'Contains a root hash and a PKCS#7 signature for it, permitting signed dm-verity GPT images.'),
+ 'USR_VERITY_SIG': (
+ 'A serialized JSON object, see below',
+ 'Similar semantics to root Verity signature partition, but just for the `/usr/` partition.'),
+
+ 'ESP': (
+ 'VFAT',
+ 'The ESP used for the current boot is automatically mounted to `/efi/` (or `/boot/` as '
+ 'fallback), unless a different partition is mounted there (possibly via `/etc/fstab`, or '
+ 'because the Extended Boot Loader Partition — see below — exists) or the directory is '
+ 'non-empty on the root disk. This partition type is defined by the '
+ '[UEFI Specification](http://www.uefi.org/specifications).'),
+ 'XBOOTLDR': (
+ 'Typically VFAT',
+ 'The Extended Boot Loader Partition (XBOOTLDR) used for the current boot is automatically '
+ 'mounted to `/boot/`, unless a different partition is mounted there (possibly via '
+ '`/etc/fstab`) or the directory is non-empty on the root disk. This partition type '
+ 'is defined by the [Boot Loader Specification](https://systemd.io/BOOT_LOADER_SPECIFICATION).'),
+ 'SWAP': (
+ 'Swap, optionally in LUKS',
+ 'All swap partitions on the disk containing the root partition are automatically enabled. '
+ 'If the partition is encrypted with LUKS, the device mapper file will be named '
+ '`/dev/mapper/swap`. This partition type predates the Discoverable Partitions Specification.'),
+ 'HOME': (
+ 'Any native, optionally in LUKS',
+ 'The first partition with this type UUID on the disk containing the root partition is '
+ 'automatically mounted to `/home/`. If the partition is encrypted with LUKS, the device '
+ 'mapper file will be named `/dev/mapper/home`.'),
+ 'SRV': (
+ 'Any native, optionally in LUKS',
+ 'The first partition with this type UUID on the disk containing the root partition is '
+ 'automatically mounted to `/srv/`. If the partition is encrypted with LUKS, the device '
+ 'mapper file will be named `/dev/mapper/srv`.'),
+ 'VAR': (
+ 'Any native, optionally in LUKS',
+ 'The first partition with this type UUID on the disk containing the root partition is '
+ 'automatically mounted to `/var/` — under the condition that its partition UUID matches '
+ 'the first 128 bits of `HMAC-SHA256(machine-id, 0x4d21b016b53445c2a9fb5c16e091fd2d)` '
+ '(i.e. the SHA256 HMAC hash of the binary type UUID keyed by the machine ID as read from '
+ '[`/etc/machine-id`](https://www.freedesktop.org/software/systemd/man/machine-id.html). '
+ 'This special requirement is made because `/var/` (unlike the other partition types '
+ 'listed here) is inherently private to a specific installation and cannot possibly be '
+ 'shared between multiple OS installations on the same disk, and thus should be bound to '
+ 'a specific instance of the OS, identified by its machine ID. If the partition is '
+ 'encrypted with LUKS, the device mapper file will be named `/dev/mapper/var`.'),
+ 'TMP': (
+ 'Any native, optionally in LUKS',
+ 'The first partition with this type UUID on the disk containing the root partition is '
+ 'automatically mounted to `/var/tmp/`. If the partition is encrypted with LUKS, the '
+ 'device mapper file will be named `/dev/mapper/tmp`. Note that the intended mount point '
+ 'is indeed `/var/tmp/`, not `/tmp/`. The latter is typically maintained in memory via '
+ '`tmpfs` and does not require a partition on disk. In some cases it might be '
+ 'desirable to make `/tmp/` persistent too, in which case it is recommended to make it '
+ 'a symlink or bind mount to `/var/tmp/`, thus not requiring its own partition type UUID.'),
+ 'USER_HOME': (
+ 'Any native, optionally in LUKS',
+ 'A home partition of a user, managed by '
+ '[`systemd-homed`](https://www.freedesktop.org/software/systemd/man/systemd-homed.html).'),
+ 'LINUX_GENERIC': (
+ 'Any native, optionally in LUKS',
+ 'No automatic mounting takes place for other Linux data partitions. This partition type '
+ 'should be used for all partitions that carry Linux file systems. The installer needs '
+ 'to mount them explicitly via entries in `/etc/fstab`. Optionally, these partitions may '
+ 'be encrypted with LUKS. This partition type predates the Discoverable Partitions Specification.'),
+}
+
+def extract(file):
+ for line in file:
+ # print(line)
+ m = re.match(r'^#define\s+SD_GPT_(.*SD_ID128_MAKE\(.*\))', line)
+ if not m:
+ continue
+
+ name = line.split()[1]
+ if m2 := re.match(r'^(ROOT|USR)_([A-Z0-9]+|X86_64|PPC64_LE|MIPS_LE|MIPS64_LE)(|_VERITY|_VERITY_SIG)\s+SD_ID128_MAKE\((.*)\)', m.group(1)):
+ type, arch, suffix, u = m2.groups()
+ u = uuid.UUID(u.replace(',', ''))
+ assert arch in ARCHITECTURES, f'{arch} not in f{ARCHITECTURES}'
+ type = f'{type}{suffix}'
+ assert type in TYPES
+
+ yield name, type, arch, u
+
+ elif m2 := re.match(r'(\w+)\s+SD_ID128_MAKE\((.*)\)', m.group(1)):
+ type, u = m2.groups()
+ u = uuid.UUID(u.replace(',', ''))
+ yield name, type, None, u
+
+ else:
+ raise Exception(f'Failed to match: {m.group(1)}')
+
+def generate(defines):
+ prevtype = None
+
+ print(HEADER, end='')
+
+ uuids = set()
+
+ for name, type, arch, uuid in defines:
+ tdesc = TYPES[type]
+ adesc = '' if arch is None else f' ({ARCHITECTURES[arch]})'
+
+ # Let's make sure that we didn't select&paste the same value twice
+ assert uuid not in uuids
+ uuids.add(uuid)
+
+ if type != prevtype:
+ prevtype = type
+ morea, moreb = DESCRIPTIONS[type]
+ else:
+ morea = moreb = 'ditto'
+
+ print(f'| _{tdesc}{adesc}_ | `{uuid}` `{name}` | {morea} | {moreb} |')
+
+if __name__ == '__main__':
+ known = extract(sys.stdin)
+ generate(known)
diff --git a/tools/make-autosuspend-rules.py b/tools/make-autosuspend-rules.py
new file mode 100755
index 0000000..633b771
--- /dev/null
+++ b/tools/make-autosuspend-rules.py
@@ -0,0 +1,24 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+# Generate autosuspend rules for devices that have been tested to work properly
+# with autosuspend by the Chromium OS team. Based on
+# https://chromium.googlesource.com/chromiumos/platform2/+/master/power_manager/udev/gen_autosuspend_rules.py
+
+import chromiumos.gen_autosuspend_rules
+
+print('# pci:v<00VENDOR>d<00DEVICE> (8 uppercase hexadecimal digits twice)')
+for entry in chromiumos.gen_autosuspend_rules.PCI_IDS:
+ vendor, device = entry.split(':')
+ vendor = int(vendor, 16)
+ device = int(device, 16)
+ print('pci:v{:08X}d{:08X}*'.format(vendor, device))
+
+print('# usb:v<VEND>p<PROD> (4 uppercase hexadecimal digits twice)')
+for entry in chromiumos.gen_autosuspend_rules.USB_IDS:
+ vendor, product = entry.split(':')
+ vendor = int(vendor, 16)
+ product = int(product, 16)
+ print('usb:v{:04X}p{:04X}*'.format(vendor, product))
+
+print(' ID_AUTOSUSPEND=1')
diff --git a/tools/make-directive-index.py b/tools/make-directive-index.py
new file mode 100755
index 0000000..8a29399
--- /dev/null
+++ b/tools/make-directive-index.py
@@ -0,0 +1,174 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+import sys
+import collections
+import re
+from xml_helper import xml_parse, xml_print, tree
+from copy import deepcopy
+
+COLOPHON = '''\
+This index contains {count} entries in {sections} sections,
+referring to {pages} individual manual pages.
+'''
+
+def _extract_directives(directive_groups, formatting, page):
+ t = xml_parse(page)
+ section = t.find('./refmeta/manvolnum').text
+ pagename = t.find('./refmeta/refentrytitle').text
+
+ storopt = directive_groups['options']
+ for variablelist in t.iterfind('.//variablelist'):
+ klass = variablelist.attrib.get('class')
+ searchpath = variablelist.attrib.get('xpath','./varlistentry/term/varname')
+ storvar = directive_groups[klass or 'miscellaneous']
+ # <option>s go in OPTIONS, unless class is specified
+ for xpath, stor in ((searchpath, storvar),
+ ('./varlistentry/term/option',
+ storvar if klass else storopt)):
+ for name in variablelist.iterfind(xpath):
+ text = re.sub(r'([= ]).*', r'\1', name.text).rstrip()
+ if text.startswith('-'):
+ # for options, merge options with and without mandatory arg
+ text = text.partition('=')[0]
+ stor[text].append((pagename, section))
+ if text not in formatting:
+ # use element as formatted display
+ if name.text[-1] in "= '":
+ name.clear()
+ else:
+ name.tail = ''
+ name.text = text
+ formatting[text] = name
+ extra = variablelist.attrib.get('extra-ref')
+ if extra:
+ stor[extra].append((pagename, section))
+ if extra not in formatting:
+ elt = tree.Element("varname")
+ elt.text= extra
+ formatting[extra] = elt
+
+ storfile = directive_groups['filenames']
+ for xpath, absolute_only in (('.//refsynopsisdiv//filename', False),
+ ('.//refsynopsisdiv//command', False),
+ ('.//filename', True)):
+ for name in t.iterfind(xpath):
+ if absolute_only and not (name.text and name.text.startswith('/')):
+ continue
+ if name.attrib.get('index') == 'false':
+ continue
+ name.tail = ''
+ if name.text:
+ if name.text.endswith('*'):
+ name.text = name.text[:-1]
+ if not name.text.startswith('.'):
+ text = name.text.partition(' ')[0]
+ if text != name.text:
+ name.clear()
+ name.text = text
+ if text.endswith('/'):
+ text = text[:-1]
+ storfile[text].append((pagename, section))
+ if text not in formatting:
+ # use element as formatted display
+ formatting[text] = name
+ else:
+ text = ' '.join(name.itertext())
+ storfile[text].append((pagename, section))
+ formatting[text] = name
+
+ for name in t.iterfind('.//constant'):
+ if name.attrib.get('index') == 'false':
+ continue
+ name.tail = ''
+ if name.text.startswith('('): # a cast, strip it
+ name.text = name.text.partition(' ')[2]
+ klass = name.attrib.get('class') or 'constants'
+ storfile = directive_groups[klass]
+ storfile[name.text].append((pagename, section))
+ formatting[name.text] = name
+
+ storfile = directive_groups['specifiers']
+ for name in t.iterfind(".//table[@class='specifiers']//entry/literal"):
+ if name.text[0] != '%' or name.getparent().text is not None:
+ continue
+ if name.attrib.get('index') == 'false':
+ continue
+ storfile[name.text].append((pagename, section))
+ formatting[name.text] = name
+ for name in t.iterfind(".//literal[@class='specifiers']"):
+ storfile[name.text].append((pagename, section))
+ formatting[name.text] = name
+
+def _make_section(template, name, directives, formatting):
+ varlist = template.find(".//*[@id='{}']".format(name))
+ for varname, manpages in sorted(directives.items()):
+ entry = tree.SubElement(varlist, 'varlistentry')
+ term = tree.SubElement(entry, 'term')
+ display = deepcopy(formatting[varname])
+ term.append(display)
+
+ para = tree.SubElement(tree.SubElement(entry, 'listitem'), 'para')
+
+ b = None
+ for manpage, manvolume in sorted(set(manpages)):
+ if b is not None:
+ b.tail = ', '
+ b = tree.SubElement(para, 'citerefentry')
+ c = tree.SubElement(b, 'refentrytitle')
+ c.text = manpage
+ c.attrib['target'] = varname
+ d = tree.SubElement(b, 'manvolnum')
+ d.text = manvolume
+ entry.tail = '\n\n'
+
+def _make_colophon(template, groups):
+ count = 0
+ pages = set()
+ for group in groups:
+ count += len(group)
+ for pagelist in group.values():
+ pages |= set(pagelist)
+
+ para = template.find(".//para[@id='colophon']")
+ para.text = COLOPHON.format(count=count,
+ sections=len(groups),
+ pages=len(pages))
+
+def _make_page(template, directive_groups, formatting):
+ """Create an XML tree from directive_groups.
+
+ directive_groups = {
+ 'class': {'variable': [('manpage', 'manvolume'), ...],
+ 'variable2': ...},
+ ...
+ }
+ """
+ for name, directives in directive_groups.items():
+ _make_section(template, name, directives, formatting)
+
+ _make_colophon(template, directive_groups.values())
+
+ return template
+
+def make_page(template_path, xml_files):
+ "Extract directives from xml_files and return XML index tree."
+ template = xml_parse(template_path)
+ names = [vl.get('id') for vl in template.iterfind('.//variablelist')]
+ directive_groups = {name:collections.defaultdict(list)
+ for name in names}
+ formatting = {}
+ for page in xml_files:
+ try:
+ _extract_directives(directive_groups, formatting, page)
+ except Exception:
+ raise ValueError("failed to process " + page)
+
+ return _make_page(template, directive_groups, formatting)
+
+if __name__ == '__main__':
+ with open(sys.argv[1], 'wb') as f:
+ template_path = sys.argv[2]
+ xml_files = sys.argv[3:]
+ xml = make_page(template_path, xml_files)
+ f.write(xml_print(xml))
diff --git a/tools/make-man-index.py b/tools/make-man-index.py
new file mode 100755
index 0000000..bae36fb
--- /dev/null
+++ b/tools/make-man-index.py
@@ -0,0 +1,111 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+import collections
+import sys
+import re
+from xml_helper import xml_parse, xml_print, tree
+
+MDASH = ' — ' if sys.version_info.major >= 3 else ' -- '
+
+TEMPLATE = '''\
+<refentry id="systemd.index">
+
+ <refentryinfo>
+ <title>systemd.index</title>
+ <productname>systemd</productname>
+ </refentryinfo>
+
+ <refmeta>
+ <refentrytitle>systemd.index</refentrytitle>
+ <manvolnum>7</manvolnum>
+ </refmeta>
+
+ <refnamediv>
+ <refname>systemd.index</refname>
+ <refpurpose>List all manpages from the systemd project</refpurpose>
+ </refnamediv>
+</refentry>
+'''
+
+SUMMARY = '''\
+ <refsect1>
+ <title>See Also</title>
+ <para>
+ <citerefentry><refentrytitle>systemd.directives</refentrytitle><manvolnum>7</manvolnum></citerefentry>
+ </para>
+
+ <para id='counts' />
+ </refsect1>
+'''
+
+COUNTS = '\
+This index contains {count} entries, referring to {pages} individual manual pages.'
+
+
+def check_id(page, t):
+ id = t.getroot().get('id')
+ if not re.search('/' + id + '[.]', page):
+ raise ValueError("id='{}' is not the same as page name '{}'".format(id, page))
+
+def make_index(pages):
+ index = collections.defaultdict(list)
+ for p in pages:
+ t = xml_parse(p)
+ check_id(p, t)
+ section = t.find('./refmeta/manvolnum').text
+ refname = t.find('./refnamediv/refname').text
+ purpose_text = ' '.join(t.find('./refnamediv/refpurpose').itertext())
+ purpose = ' '.join(purpose_text.split())
+ for f in t.findall('./refnamediv/refname'):
+ infos = (f.text, section, purpose, refname)
+ index[f.text[0].upper()].append(infos)
+ return index
+
+def add_letter(template, letter, pages):
+ refsect1 = tree.SubElement(template, 'refsect1')
+ title = tree.SubElement(refsect1, 'title')
+ title.text = letter
+ para = tree.SubElement(refsect1, 'para')
+ for info in sorted(pages, key=lambda info: str.lower(info[0])):
+ refname, section, purpose, realname = info
+
+ b = tree.SubElement(para, 'citerefentry')
+ c = tree.SubElement(b, 'refentrytitle')
+ c.text = refname
+ d = tree.SubElement(b, 'manvolnum')
+ d.text = section
+
+ b.tail = MDASH + purpose # + ' (' + p + ')'
+
+ tree.SubElement(para, 'sbr')
+
+def add_summary(template, indexpages):
+ count = 0
+ pages = set()
+ for group in indexpages:
+ count += len(group)
+ for info in group:
+ refname, section, purpose, realname = info
+ pages.add((realname, section))
+
+ refsect1 = tree.fromstring(SUMMARY)
+ template.append(refsect1)
+
+ para = template.find(".//para[@id='counts']")
+ para.text = COUNTS.format(count=count, pages=len(pages))
+
+def make_page(*xml_files):
+ template = tree.fromstring(TEMPLATE)
+ index = make_index(xml_files)
+
+ for letter in sorted(index):
+ add_letter(template, letter, index[letter])
+
+ add_summary(template, index.values())
+
+ return template
+
+if __name__ == '__main__':
+ with open(sys.argv[1], 'wb') as f:
+ f.write(xml_print(make_page(*sys.argv[2:])))
diff --git a/tools/meson-build.sh b/tools/meson-build.sh
new file mode 100755
index 0000000..ecd558f
--- /dev/null
+++ b/tools/meson-build.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+# SPDX-License-Identifier: LGPL-2.1-or-later
+set -eux
+
+src="$1"
+dst="$2"
+target="$3"
+options="$4"
+CC="$5"
+CXX="$6"
+
+# shellcheck disable=SC2086
+[ -f "$dst/ninja.build" ] || CC="$CC" CXX="$CXX" meson "$src" "$dst" $options
+
+# Locate ninja binary, on CentOS 7 it is called ninja-build, so
+# use that name if available.
+ninja="ninja"
+if command -v ninja-build >/dev/null ; then
+ ninja="ninja-build"
+fi
+
+"$ninja" -C "$dst" "$target"
diff --git a/tools/meson-make-symlink.sh b/tools/meson-make-symlink.sh
new file mode 100755
index 0000000..653a73b
--- /dev/null
+++ b/tools/meson-make-symlink.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+# SPDX-License-Identifier: LGPL-2.1-or-later
+set -eu
+
+SOURCE="${1:?}"
+TARGET="${2:?}"
+
+if [ "${MESON_INSTALL_QUIET:-0}" = 1 ] ; then
+ VERBOSE=""
+else
+ VERBOSE="v"
+fi
+
+# this is needed mostly because $DESTDIR is provided as a variable,
+# and we need to create the target directory...
+
+mkdir -${VERBOSE}p "$(dirname "${DESTDIR:-}$TARGET")"
+if [ "$(dirname "$SOURCE")" = . ] || [ "$(dirname "$SOURCE")" = .. ]; then
+ ln -${VERBOSE}fs -T -- "$SOURCE" "${DESTDIR:-}$TARGET"
+else
+ ln -${VERBOSE}fs -T --relative -- "${DESTDIR:-}$SOURCE" "${DESTDIR:-}$TARGET"
+fi
diff --git a/tools/meson-render-jinja2.py b/tools/meson-render-jinja2.py
new file mode 100755
index 0000000..fbaae59
--- /dev/null
+++ b/tools/meson-render-jinja2.py
@@ -0,0 +1,37 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+import ast
+import os
+import re
+import sys
+
+import jinja2
+
+def parse_config_h(filename):
+ # Parse config.h file generated by meson.
+ ans = {}
+ for line in open(filename):
+ m = re.match(r'#define\s+(\w+)\s+(.*)', line)
+ if not m:
+ continue
+ a, b = m.groups()
+ if b and b[0] in '0123456789"':
+ b = ast.literal_eval(b)
+ ans[a] = b
+ return ans
+
+def render(filename, defines):
+ text = open(filename).read()
+ template = jinja2.Template(text, trim_blocks=True, undefined=jinja2.StrictUndefined)
+ return template.render(defines)
+
+if __name__ == '__main__':
+ defines = parse_config_h(sys.argv[1])
+ defines.update(parse_config_h(sys.argv[2]))
+ output = render(sys.argv[3], defines)
+ with open(sys.argv[4], 'w') as f:
+ f.write(output)
+ f.write('\n')
+ info = os.stat(sys.argv[3])
+ os.chmod(sys.argv[4], info.st_mode)
diff --git a/tools/meson-vcs-tag.sh b/tools/meson-vcs-tag.sh
new file mode 100755
index 0000000..3964a8e
--- /dev/null
+++ b/tools/meson-vcs-tag.sh
@@ -0,0 +1,19 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+set -eu
+set -o pipefail
+
+dir="${1:?}"
+fallback="${2:?}"
+
+# Apparently git describe has a bug where it always considers the work-tree
+# dirty when invoked with --git-dir (even though 'git status' is happy). Work
+# around this issue by cd-ing to the source directory.
+cd "$dir"
+# Check that we have either .git/ (a normal clone) or a .git file (a work-tree)
+# and that we don't get confused if a tarball is extracted in a higher-level
+# git repository.
+[ -e .git ] && \
+ git describe --abbrev=7 --dirty=^ 2>/dev/null | sed 's/^v//; s/-rc/~rc/' || \
+ echo "$fallback"
diff --git a/tools/oss-fuzz.sh b/tools/oss-fuzz.sh
new file mode 100755
index 0000000..793411e
--- /dev/null
+++ b/tools/oss-fuzz.sh
@@ -0,0 +1,144 @@
+#!/usr/bin/env bash
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+set -ex
+
+export LC_CTYPE=C.UTF-8
+
+export CC=${CC:-clang}
+export CXX=${CXX:-clang++}
+clang_version="$($CC --version | sed -nr 's/.*version ([^ ]+?) .*/\1/p' | sed -r 's/-$//')"
+
+SANITIZER=${SANITIZER:-address -fsanitize-address-use-after-scope}
+flags="-O1 -fno-omit-frame-pointer -g -DFUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION -fsanitize=$SANITIZER"
+
+clang_lib="/usr/lib64/clang/${clang_version}/lib/linux"
+[ -d "$clang_lib" ] || clang_lib="/usr/lib/clang/${clang_version}/lib/linux"
+
+export CFLAGS=${CFLAGS:-$flags}
+export CXXFLAGS=${CXXFLAGS:-$flags}
+export LDFLAGS=${LDFLAGS:--L${clang_lib}}
+
+export WORK=${WORK:-$(pwd)}
+export OUT=${OUT:-$(pwd)/out}
+mkdir -p "$OUT"
+
+build="$WORK/build"
+rm -rf "$build"
+mkdir -p "$build"
+
+if [ -z "$FUZZING_ENGINE" ]; then
+ fuzzflag="llvm-fuzz=true"
+else
+ fuzzflag="oss-fuzz=true"
+
+ apt-get update
+ apt-get install -y gperf m4 gettext python3-pip \
+ libcap-dev libmount-dev \
+ pkg-config wget python3-jinja2 zipmerge
+
+ if [[ "$ARCHITECTURE" == i386 ]]; then
+ apt-get install -y pkg-config:i386 libcap-dev:i386 libmount-dev:i386
+ fi
+
+ # gnu-efi is installed here to enable -Dgnu-efi behind which fuzz-bcd
+ # is hidden. It isn't linked against efi. It doesn't
+ # even include "efi.h" because "bcd.c" can work in "unit test" mode
+ # where it isn't necessary.
+ apt-get install -y gnu-efi zstd
+
+ pip3 install -r .github/workflows/requirements.txt --require-hashes
+
+ # https://github.com/google/oss-fuzz/issues/6868
+ ORIG_PYTHONPATH=$(python3 -c 'import sys;print(":".join(sys.path[1:]))')
+ export PYTHONPATH="$ORIG_PYTHONPATH:/usr/lib/python3/dist-packages/"
+
+ if [[ "$SANITIZER" == undefined ]]; then
+ additional_ubsan_checks=pointer-overflow,alignment
+ UBSAN_FLAGS="-fsanitize=$additional_ubsan_checks -fno-sanitize-recover=$additional_ubsan_checks"
+ CFLAGS="$CFLAGS $UBSAN_FLAGS"
+ CXXFLAGS="$CXXFLAGS $UBSAN_FLAGS"
+ fi
+
+ if [[ "$SANITIZER" == introspector ]]; then
+ # fuzz-introspector passes -fuse-ld=gold and -flto using CFLAGS/LDFLAGS and due to
+ # https://github.com/mesonbuild/meson/issues/6377#issuecomment-575977919 and
+ # https://github.com/mesonbuild/meson/issues/6377 it doesn't mix well with meson.
+ # It's possible to build systemd with duct tape there using something like
+ # https://github.com/google/oss-fuzz/pull/7583#issuecomment-1104011067 but
+ # apparently even with gold and lto some parts of systemd are missing from
+ # reports (presumably due to https://github.com/google/oss-fuzz/issues/7598).
+ # Let's just fail here for now to make it clear that fuzz-introspector isn't supported.
+ exit 1
+ fi
+fi
+
+if ! meson "$build" "-D$fuzzflag" -Db_lundef=false; then
+ cat "$build/meson-logs/meson-log.txt"
+ exit 1
+fi
+
+ninja -v -C "$build" fuzzers
+
+# Compressed BCD files are kept in test/test-bcd so let's unpack them
+# and put them all in the seed corpus.
+bcd=$(mktemp -d)
+for i in test/test-bcd/*.zst; do
+ unzstd "$i" -o "$bcd/$(basename "${i%.zst}")";
+done
+zip -jqr "$OUT/fuzz-bcd_seed_corpus.zip" "$bcd"
+rm -rf "$bcd"
+
+hosts=$(mktemp)
+wget -O "$hosts" https://raw.githubusercontent.com/StevenBlack/hosts/master/hosts
+zip -jq "$OUT/fuzz-etc-hosts_seed_corpus.zip" "$hosts"
+rm -rf "$hosts"
+
+# The seed corpus is a separate flat archive for each fuzzer,
+# with a fixed name ${fuzzer}_seed_corpus.zip.
+for d in test/fuzz/fuzz-*; do
+ zip -jqr "$OUT/$(basename "$d")_seed_corpus.zip" "$d"
+done
+
+# get fuzz-dns-packet corpus
+df="$build/dns-fuzzing"
+git clone --depth 1 https://github.com/CZ-NIC/dns-fuzzing "$df"
+zip -jqr "$OUT/fuzz-dns-packet_seed_corpus.zip" "$df/packet"
+
+install -Dt "$OUT/src/shared/" \
+ "$build"/src/shared/libsystemd-shared-*.so \
+ "$build"/src/core/libsystemd-core-*.so
+
+# Most i386 libraries have to be brought to the runtime environment somehow. Ideally they
+# should be linked statically but since it isn't possible another way to keep them close
+# to the fuzz targets is used here. The dependencies are copied to "$OUT/src/shared" and
+# then `rpath` is tweaked to make it possible for the linker to find them there. "$OUT/src/shared"
+# is chosen because the runtime search path of all the fuzz targets already points to it
+# to load "libsystemd-shared" and "libsystemd-core". Stuff like that should be avoided on
+# x86_64 because it tends to break coverage reports, fuzz-introspector, CIFuzz and so on.
+if [[ "$ARCHITECTURE" == i386 ]]; then
+ for lib_path in $(ldd "$OUT"/src/shared/libsystemd-shared-*.so | perl -lne 'print $1 if m{=>\s+(/lib\S+)}'); do
+ lib_name=$(basename "$lib_path")
+ cp "$lib_path" "$OUT/src/shared"
+ patchelf --set-rpath \$ORIGIN "$OUT/src/shared/$lib_name"
+ done
+ patchelf --set-rpath \$ORIGIN "$OUT"/src/shared/libsystemd-shared-*.so
+fi
+
+wget -O "$OUT/fuzz-json.dict" https://raw.githubusercontent.com/rc0r/afl-fuzz/master/dictionaries/json.dict
+
+find "$build" -maxdepth 1 -type f -executable -name "fuzz-*" -exec mv {} "$OUT" \;
+find src -type f -name "fuzz-*.dict" -exec cp {} "$OUT" \;
+cp src/fuzz/*.options "$OUT"
+
+if [[ "$MERGE_WITH_OSS_FUZZ_CORPORA" == "yes" ]]; then
+ for f in "$OUT/"fuzz-*; do
+ [[ -x "$f" ]] || continue
+ fuzzer=$(basename "$f")
+ t=$(mktemp)
+ if wget -O "$t" "https://storage.googleapis.com/systemd-backup.clusterfuzz-external.appspot.com/corpus/libFuzzer/systemd_${fuzzer}/public.zip"; then
+ zipmerge "$OUT/${fuzzer}_seed_corpus.zip" "$t"
+ fi
+ rm -rf "$t"
+ done
+fi
diff --git a/tools/update-dbus-docs.py b/tools/update-dbus-docs.py
new file mode 100755
index 0000000..473469e
--- /dev/null
+++ b/tools/update-dbus-docs.py
@@ -0,0 +1,347 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+import argparse
+import collections
+import sys
+import os
+import subprocess
+import io
+
+try:
+ from lxml import etree
+except ModuleNotFoundError as e:
+ etree = e
+
+try:
+ from shlex import join as shlex_join
+except ImportError as e:
+ shlex_join = e
+
+try:
+ from shlex import quote as shlex_quote
+except ImportError as e:
+ shlex_quote = e
+
+class NoCommand(Exception):
+ pass
+
+BORING_INTERFACES = [
+ 'org.freedesktop.DBus.Peer',
+ 'org.freedesktop.DBus.Introspectable',
+ 'org.freedesktop.DBus.Properties',
+]
+RED = '\x1b[31m'
+GREEN = '\x1b[32m'
+YELLOW = '\x1b[33m'
+RESET = '\x1b[39m'
+
+def xml_parser():
+ return etree.XMLParser(no_network=True,
+ remove_comments=False,
+ strip_cdata=False,
+ resolve_entities=False)
+
+def print_method(declarations, elem, *, prefix, file, is_signal=False):
+ name = elem.get('name')
+ klass = 'signal' if is_signal else 'method'
+ declarations[klass].append(name)
+
+ # @org.freedesktop.systemd1.Privileged("true")
+ # SetShowStatus(in s mode);
+
+ for anno in elem.findall('./annotation'):
+ anno_name = anno.get('name')
+ anno_value = anno.get('value')
+ print(f'''{prefix}@{anno_name}("{anno_value}")''', file=file)
+
+ print(f'''{prefix}{name}(''', file=file, end='')
+ lead = ',\n' + prefix + ' ' * len(name) + ' '
+
+ for num, arg in enumerate(elem.findall('./arg')):
+ argname = arg.get('name')
+
+ if argname is None:
+ if opts.print_errors:
+ print(f'method {name}: argument {num+1} has no name', file=sys.stderr)
+ argname = 'UNNAMED'
+
+ type = arg.get('type')
+ if not is_signal:
+ direction = arg.get('direction')
+ print(f'''{lead if num > 0 else ''}{direction:3} {type} {argname}''', file=file, end='')
+ else:
+ print(f'''{lead if num > 0 else ''}{type} {argname}''', file=file, end='')
+
+ print(f');', file=file)
+
+ACCESS_MAP = {
+ 'read' : 'readonly',
+ 'write' : 'readwrite',
+}
+
+def value_ellipsis(type):
+ if type == 's':
+ return "'...'";
+ if type[0] == 'a':
+ inner = value_ellipsis(type[1:])
+ return f"[{inner}{', ...' if inner != '...' else ''}]";
+ return '...'
+
+def print_property(declarations, elem, *, prefix, file):
+ name = elem.get('name')
+ type = elem.get('type')
+ access = elem.get('access')
+
+ declarations['property'].append(name)
+
+ # @org.freedesktop.DBus.Property.EmitsChangedSignal("false")
+ # @org.freedesktop.systemd1.Privileged("true")
+ # readwrite b EnableWallMessages = false;
+
+ for anno in elem.findall('./annotation'):
+ anno_name = anno.get('name')
+ anno_value = anno.get('value')
+ print(f'''{prefix}@{anno_name}("{anno_value}")''', file=file)
+
+ access = ACCESS_MAP.get(access, access)
+ print(f'''{prefix}{access} {type} {name} = {value_ellipsis(type)};''', file=file)
+
+def print_interface(iface, *, prefix, file, print_boring, only_interface, declarations):
+ name = iface.get('name')
+
+ is_boring = (name in BORING_INTERFACES or
+ only_interface is not None and name != only_interface)
+
+ if is_boring and print_boring:
+ print(f'''{prefix}interface {name} {{ ... }};''', file=file)
+
+ elif not is_boring and not print_boring:
+ print(f'''{prefix}interface {name} {{''', file=file)
+ prefix2 = prefix + ' '
+
+ for num, elem in enumerate(iface.findall('./method')):
+ if num == 0:
+ print(f'''{prefix2}methods:''', file=file)
+ print_method(declarations, elem, prefix=prefix2 + ' ', file=file)
+
+ for num, elem in enumerate(iface.findall('./signal')):
+ if num == 0:
+ print(f'''{prefix2}signals:''', file=file)
+ print_method(declarations, elem, prefix=prefix2 + ' ', file=file, is_signal=True)
+
+ for num, elem in enumerate(iface.findall('./property')):
+ if num == 0:
+ print(f'''{prefix2}properties:''', file=file)
+ print_property(declarations, elem, prefix=prefix2 + ' ', file=file)
+
+ print(f'''{prefix}}};''', file=file)
+
+def document_has_elem_with_text(document, elem, item_repr):
+ predicate = f".//{elem}" # [text() = 'foo'] doesn't seem supported :(
+ for loc in document.findall(predicate):
+ if loc.text == item_repr:
+ return True
+ return False
+
+def check_documented(document, declarations, stats):
+ missing = []
+ for klass, items in declarations.items():
+ stats['total'] += len(items)
+
+ for item in items:
+ if klass == 'method':
+ elem = 'function'
+ item_repr = f'{item}()'
+ elif klass == 'signal':
+ elem = 'function'
+ item_repr = item
+ elif klass == 'property':
+ elem = 'varname'
+ item_repr = item
+ else:
+ assert False, (klass, item)
+
+ if not document_has_elem_with_text(document, elem, item_repr):
+ if opts.print_errors:
+ print(f'{klass} {item} is not documented :(')
+ missing.append((klass, item))
+
+ stats['missing'] += len(missing)
+
+ return missing
+
+def xml_to_text(destination, xml, *, only_interface=None):
+ file = io.StringIO()
+
+ declarations = collections.defaultdict(list)
+ interfaces = []
+
+ print(f'''node {destination} {{''', file=file)
+
+ for print_boring in [False, True]:
+ for iface in xml.findall('./interface'):
+ print_interface(iface, prefix=' ', file=file,
+ print_boring=print_boring,
+ only_interface=only_interface,
+ declarations=declarations)
+ name = iface.get('name')
+ if not name in BORING_INTERFACES:
+ interfaces.append(name)
+
+ print(f'''}};''', file=file)
+
+ return file.getvalue(), declarations, interfaces
+
+def subst_output(document, programlisting, stats):
+ executable = programlisting.get('executable', None)
+ if executable is None:
+ # Not our thing
+ return
+ executable = programlisting.get('executable')
+ node = programlisting.get('node')
+ interface = programlisting.get('interface')
+
+ argv = [f'{opts.build_dir}/{executable}', f'--bus-introspect={interface}']
+ if isinstance(shlex_join, Exception):
+ print(f'COMMAND: {" ".join(shlex_quote(arg) for arg in argv)}')
+ else:
+ print(f'COMMAND: {shlex_join(argv)}')
+
+ try:
+ out = subprocess.check_output(argv, universal_newlines=True)
+ except FileNotFoundError:
+ print(f'{executable} not found, ignoring', file=sys.stderr)
+ return
+
+ xml = etree.fromstring(out, parser=xml_parser())
+
+ new_text, declarations, interfaces = xml_to_text(node, xml, only_interface=interface)
+ programlisting.text = '\n' + new_text + ' '
+
+ if declarations:
+ missing = check_documented(document, declarations, stats)
+ parent = programlisting.getparent()
+
+ # delete old comments
+ for child in parent:
+ if (child.tag == etree.Comment
+ and 'Autogenerated' in child.text):
+ parent.remove(child)
+ if (child.tag == etree.Comment
+ and 'not documented' in child.text):
+ parent.remove(child)
+ if (child.tag == "variablelist"
+ and child.attrib.get("generated",False) == "True"):
+ parent.remove(child)
+
+ # insert pointer for systemd-directives generation
+ the_tail = programlisting.tail #tail is erased by addnext, so save it here.
+ prev_element = etree.Comment("Autogenerated cross-references for systemd.directives, do not edit")
+ programlisting.addnext(prev_element)
+ programlisting.tail = the_tail
+
+ for interface in interfaces:
+ variablelist = etree.Element("variablelist")
+ variablelist.attrib['class'] = 'dbus-interface'
+ variablelist.attrib['generated'] = 'True'
+ variablelist.attrib['extra-ref'] = interface
+
+ prev_element.addnext(variablelist)
+ prev_element.tail = the_tail
+ prev_element = variablelist
+
+ for decl_type,decl_list in declarations.items():
+ for declaration in decl_list:
+ variablelist = etree.Element("variablelist")
+ variablelist.attrib['class'] = 'dbus-'+decl_type
+ variablelist.attrib['generated'] = 'True'
+ if decl_type == 'method' :
+ variablelist.attrib['extra-ref'] = declaration + '()'
+ else:
+ variablelist.attrib['extra-ref'] = declaration
+
+ prev_element.addnext(variablelist)
+ prev_element.tail = the_tail
+ prev_element = variablelist
+
+ last_element = etree.Comment("End of Autogenerated section")
+ prev_element.addnext(last_element)
+ prev_element.tail = the_tail
+ last_element.tail = the_tail
+
+ # insert comments for undocumented items
+ for item in reversed(missing):
+ comment = etree.Comment(f'{item[0]} {item[1]} is not documented!')
+ comment.tail = programlisting.tail
+ parent.insert(parent.index(programlisting) + 1, comment)
+
+def process(page):
+ src = open(page).read()
+ xml = etree.fromstring(src, parser=xml_parser())
+
+ # print('parsing {}'.format(name), file=sys.stderr)
+ if xml.tag != 'refentry':
+ return
+
+ stats = collections.Counter()
+
+ pls = xml.findall('.//programlisting')
+ for pl in pls:
+ subst_output(xml, pl, stats)
+
+ out_text = etree.tostring(xml, encoding='unicode')
+ # massage format to avoid some lxml whitespace handling idiosyncrasies
+ # https://bugs.launchpad.net/lxml/+bug/526799
+ out_text = (src[:src.find('<refentryinfo')] +
+ out_text[out_text.find('<refentryinfo'):] +
+ '\n')
+
+ if not opts.test:
+ with open(page, 'w') as out:
+ out.write(out_text)
+
+ return dict(stats=stats, modified=(out_text != src))
+
+def parse_args():
+ p = argparse.ArgumentParser()
+ p.add_argument('--test', action='store_true',
+ help='only verify that everything is up2date')
+ p.add_argument('--build-dir', default='build')
+ p.add_argument('pages', nargs='+')
+ opts = p.parse_args()
+ opts.print_errors = not opts.test
+ return opts
+
+if __name__ == '__main__':
+ opts = parse_args()
+
+ for item in (etree, shlex_quote):
+ if isinstance(item, Exception):
+ print(item, file=sys.stderr)
+ exit(77 if opts.test else 1)
+
+ if not os.path.exists(f'{opts.build_dir}/systemd'):
+ exit(f"{opts.build_dir}/systemd doesn't exist. Use --build-dir=.")
+
+ stats = {page.split('/')[-1] : process(page) for page in opts.pages}
+
+ # Let's print all statistics at the end
+ mlen = max(len(page) for page in stats)
+ total = sum((item['stats'] for item in stats.values()), collections.Counter())
+ total = 'total', dict(stats=total, modified=False)
+ modified = []
+ classification = 'OUTDATED' if opts.test else 'MODIFIED'
+ for page, info in sorted(stats.items()) + [total]:
+ m = info['stats']['missing']
+ t = info['stats']['total']
+ p = page + ':'
+ c = classification if info['modified'] else ''
+ if c:
+ modified.append(page)
+ color = RED if m > t/2 else (YELLOW if m else GREEN)
+ print(f'{color}{p:{mlen + 1}} {t - m}/{t} {c}{RESET}')
+
+ if opts.test and modified:
+ exit(f'Outdated pages: {", ".join(modified)}\n'
+ f'Hint: ninja -C {opts.build_dir} update-dbus-docs')
diff --git a/tools/update-hwdb-autosuspend.sh b/tools/update-hwdb-autosuspend.sh
new file mode 100755
index 0000000..c697730
--- /dev/null
+++ b/tools/update-hwdb-autosuspend.sh
@@ -0,0 +1,17 @@
+#!/bin/sh
+# SPDX-License-Identifier: LGPL-2.1-or-later
+set -eu
+
+cd "${1:?}"
+
+(curl --fail -L 'https://chromium.googlesource.com/chromiumos/platform2/+/master/power_manager/udev/gen_autosuspend_rules.py?format=TEXT'; echo) \
+ | base64 -d > tools/chromiumos/gen_autosuspend_rules.py
+
+(cat <<%EOF
+# This file is part of systemd.
+#
+# Rules to autosuspend known fingerprint readers (pulled from libfprint).
+#
+%EOF
+curl --fail -L 'https://gitlab.freedesktop.org/libfprint/libfprint/-/raw/master/data/autosuspend.hwdb') \
+ > hwdb.d/60-autosuspend-fingerprint-reader.hwdb
diff --git a/tools/update-hwdb.sh b/tools/update-hwdb.sh
new file mode 100755
index 0000000..abbbb82
--- /dev/null
+++ b/tools/update-hwdb.sh
@@ -0,0 +1,33 @@
+#!/bin/sh
+# SPDX-License-Identifier: LGPL-2.1-or-later
+set -eu
+
+cd "${1:?}"
+
+unset permissive
+if [ "${2:-}" = "-p" ]; then
+ permissive=1
+ shift
+else
+ permissive=0
+fi
+
+if [ "${2:-}" != "-n" ]; then (
+ [ -z "$permissive" ] || set +e
+ set -x
+
+ curl --fail -L -o usb.ids 'http://www.linux-usb.org/usb.ids'
+ curl --fail -L -o pci.ids 'http://pci-ids.ucw.cz/v2.2/pci.ids'
+ curl --fail -L -o ma-large.txt 'http://standards-oui.ieee.org/oui/oui.txt'
+ curl --fail -L -o ma-medium.txt 'http://standards-oui.ieee.org/oui28/mam.txt'
+ curl --fail -L -o ma-small.txt 'http://standards-oui.ieee.org/oui36/oui36.txt'
+ curl --fail -L -o pnp_id_registry.html 'https://uefi.org/uefi-pnp-export'
+ curl --fail -L -o acpi_id_registry.html 'https://uefi.org/uefi-acpi-export'
+) fi
+
+set -x
+./acpi-update.py >20-acpi-vendor.hwdb.base
+patch -p0 -o- 20-acpi-vendor.hwdb.base <20-acpi-vendor.hwdb.patch >20-acpi-vendor.hwdb
+diff -u 20-acpi-vendor.hwdb.base 20-acpi-vendor.hwdb >20-acpi-vendor.hwdb.patch && exit 1
+
+./ids_parser.py
diff --git a/tools/update-man-rules.py b/tools/update-man-rules.py
new file mode 100755
index 0000000..3a8c31d
--- /dev/null
+++ b/tools/update-man-rules.py
@@ -0,0 +1,97 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+from __future__ import print_function
+import collections
+import glob
+import sys
+from pathlib import Path
+import pprint
+from xml_helper import xml_parse
+
+def man(page, number):
+ return '{}.{}'.format(page, number)
+
+def add_rules(rules, name):
+ xml = xml_parse(name)
+ # print('parsing {}'.format(name), file=sys.stderr)
+ if xml.getroot().tag != 'refentry':
+ return
+ conditional = xml.getroot().get('conditional') or ''
+ rulegroup = rules[conditional]
+ refmeta = xml.find('./refmeta')
+ title = refmeta.find('./refentrytitle').text
+ number = refmeta.find('./manvolnum').text
+ refnames = xml.findall('./refnamediv/refname')
+ target = man(refnames[0].text, number)
+ if title != refnames[0].text:
+ raise ValueError('refmeta and refnamediv disagree: ' + name)
+ for refname in refnames:
+ assert all(refname not in group
+ for group in rules.values()), "duplicate page name"
+ alias = man(refname.text, number)
+ rulegroup[alias] = target
+ # print('{} => {} [{}]'.format(alias, target, conditional), file=sys.stderr)
+
+def create_rules(xml_files):
+ " {conditional => {alias-name => source-name}} "
+ rules = collections.defaultdict(dict)
+ for name in xml_files:
+ try:
+ add_rules(rules, name)
+ except Exception:
+ print("Failed to process", name, file=sys.stderr)
+ raise
+ return rules
+
+def mjoin(files):
+ return ' \\\n\t'.join(sorted(files) or '#')
+
+MESON_HEADER = '''\
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+# Do not edit. Generated by update-man-rules.py.
+# Update with:
+# ninja -C build update-man-rules
+manpages = ['''
+
+MESON_FOOTER = '''\
+]
+# Really, do not edit.
+'''
+
+def make_mesonfile(rules, dist_files):
+ # reformat rules as
+ # grouped = [ [name, section, [alias...], condition], ...]
+ #
+ # but first create a dictionary like
+ # lists = { (name, condition) => [alias...]
+ grouped = collections.defaultdict(list)
+ for condition, items in rules.items():
+ for alias, name in items.items():
+ group = grouped[(name, condition)]
+ if name != alias:
+ group.append(alias)
+
+ lines = [ [p[0][:-2], p[0][-1], sorted(a[:-2] for a in aliases), p[1]]
+ for p, aliases in sorted(grouped.items()) ]
+ return '\n'.join((MESON_HEADER, pprint.pformat(lines)[1:-1], MESON_FOOTER))
+
+if __name__ == '__main__':
+ source_glob = sys.argv[1]
+ target = Path(sys.argv[2])
+
+ pages = glob.glob(source_glob)
+ pages = (p for p in pages
+ if Path(p).name not in {
+ 'systemd.directives.xml',
+ 'systemd.index.xml',
+ 'directives-template.xml'})
+
+ rules = create_rules(pages)
+ dist_files = (Path(p).name for p in pages)
+ text = make_mesonfile(rules, dist_files)
+
+ tmp = target.with_suffix('.tmp')
+ tmp.write_text(text)
+ tmp.rename(target)
diff --git a/tools/update-syscall-tables.sh b/tools/update-syscall-tables.sh
new file mode 100755
index 0000000..fb861e1
--- /dev/null
+++ b/tools/update-syscall-tables.sh
@@ -0,0 +1,11 @@
+#!/bin/sh
+# SPDX-License-Identifier: LGPL-2.1-or-later
+set -eu
+
+cd "${1:?}" && shift
+
+curl --fail -L -o syscall-list.txt 'https://raw.githubusercontent.com/hrw/syscalls-table/master/data/syscall-names.text'
+
+for arch in "$@"; do
+ curl --fail -L -o "syscalls-$arch.txt" "https://raw.githubusercontent.com/hrw/syscalls-table/master/data/tables/syscalls-$arch"
+done
diff --git a/tools/xml_helper.py b/tools/xml_helper.py
new file mode 100755
index 0000000..0361358
--- /dev/null
+++ b/tools/xml_helper.py
@@ -0,0 +1,20 @@
+#!/usr/bin/env python3
+# SPDX-License-Identifier: LGPL-2.1-or-later
+
+from lxml import etree as tree
+
+class CustomResolver(tree.Resolver):
+ def resolve(self, url, id, context):
+ if 'custom-entities.ent' in url:
+ return self.resolve_filename('man/custom-entities.ent', context)
+
+_parser = tree.XMLParser()
+_parser.resolvers.add(CustomResolver())
+
+def xml_parse(page):
+ doc = tree.parse(page, _parser)
+ doc.xinclude()
+ return doc
+
+def xml_print(xml):
+ return tree.tostring(xml, pretty_print=True, encoding='utf-8')