From 19f4f86bfed21c5326ed2acebe1163f3a83e832b Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Mon, 6 May 2024 04:25:50 +0200 Subject: Adding upstream version 241. Signed-off-by: Daniel Baumann --- tools/add-git-hook.sh | 12 ++ tools/catalog-report.py | 84 +++++++++++ tools/check-directives.sh | 21 +++ tools/check-includes.pl | 23 +++ tools/choose-default-locale.sh | 12 ++ tools/coverity.sh | 233 +++++++++++++++++++++++++++++ tools/find-build-dir.sh | 32 ++++ tools/find-double-newline.sh | 42 ++++++ tools/find-tabs.sh | 42 ++++++ tools/gdb-sd_dump_hashmaps.py | 79 ++++++++++ tools/generate-gperfs.py | 24 +++ tools/make-directive-index.py | 307 +++++++++++++++++++++++++++++++++++++++ tools/make-man-index.py | 110 ++++++++++++++ tools/make-man-rules.py | 84 +++++++++++ tools/meson-apply-m4.sh | 24 +++ tools/meson-build.sh | 20 +++ tools/meson-check-api-docs.sh | 34 +++++ tools/meson-check-compilation.sh | 4 + tools/meson-check-help.sh | 23 +++ tools/meson-git-contrib.sh | 9 ++ tools/meson-hwdb-update.sh | 32 ++++ tools/meson-make-symlink.sh | 12 ++ tools/meson-vcs-tag.sh | 18 +++ tools/oss-fuzz.sh | 56 +++++++ tools/xml_helper.py | 20 +++ 25 files changed, 1357 insertions(+) create mode 100755 tools/add-git-hook.sh create mode 100755 tools/catalog-report.py create mode 100755 tools/check-directives.sh create mode 100755 tools/check-includes.pl create mode 100755 tools/choose-default-locale.sh create mode 100755 tools/coverity.sh create mode 100755 tools/find-build-dir.sh create mode 100755 tools/find-double-newline.sh create mode 100755 tools/find-tabs.sh create mode 100644 tools/gdb-sd_dump_hashmaps.py create mode 100755 tools/generate-gperfs.py create mode 100755 tools/make-directive-index.py create mode 100755 tools/make-man-index.py create mode 100755 tools/make-man-rules.py create mode 100755 tools/meson-apply-m4.sh create mode 100755 tools/meson-build.sh create mode 100755 tools/meson-check-api-docs.sh create mode 100755 tools/meson-check-compilation.sh create mode 100755 tools/meson-check-help.sh create mode 100755 tools/meson-git-contrib.sh create mode 100755 tools/meson-hwdb-update.sh create mode 100755 tools/meson-make-symlink.sh create mode 100755 tools/meson-vcs-tag.sh create mode 100755 tools/oss-fuzz.sh create mode 100755 tools/xml_helper.py (limited to 'tools') diff --git a/tools/add-git-hook.sh b/tools/add-git-hook.sh new file mode 100755 index 0000000..c1db99b --- /dev/null +++ b/tools/add-git-hook.sh @@ -0,0 +1,12 @@ +#!/bin/sh +set -eu + +cd "$MESON_SOURCE_ROOT" + +if [ ! -f .git/hooks/pre-commit.sample -o -f .git/hooks/pre-commit ]; then + exit 2 # not needed +fi + +cp -p .git/hooks/pre-commit.sample .git/hooks/pre-commit +chmod +x .git/hooks/pre-commit +echo 'Activated pre-commit hook' diff --git a/tools/catalog-report.py b/tools/catalog-report.py new file mode 100755 index 0000000..ca1e13d --- /dev/null +++ b/tools/catalog-report.py @@ -0,0 +1,84 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: MIT +# +# This file is distributed under the MIT license, see below. +# +# The MIT License (MIT) +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in +# all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +""" +Prints out journal entries with no or bad catalog explanations. +""" + +import re +from systemd import journal, id128 + +j = journal.Reader() + +logged = set() +pattern = re.compile('@[A-Z0-9_]+@') + +mids = {v:k for k,v in id128.__dict__.items() + if k.startswith('SD_MESSAGE')} + +freq = 1000 + +def log_entry(x): + if 'CODE_FILE' in x: + # some of our code was using 'CODE_FUNCTION' instead of 'CODE_FUNC' + print('{}:{} {}'.format(x.get('CODE_FILE', '???'), + x.get('CODE_LINE', '???'), + x.get('CODE_FUNC', None) or x.get('CODE_FUNCTION', '???'))) + print(' {}'.format(x.get('MESSAGE', 'no message!'))) + for k, v in x.items(): + if k.startswith('CODE_') or k in {'MESSAGE_ID', 'MESSAGE'}: + continue + print(' {}={}'.format(k, v)) + print() + +for i, x in enumerate(j): + if i % freq == 0: + print(i, end='\r') + + try: + mid = x['MESSAGE_ID'] + except KeyError: + continue + name = mids.get(mid, 'unknown') + + try: + desc = journal.get_catalog(mid) + except FileNotFoundError: + if mid in logged: + continue + + print('{} {.hex}: no catalog entry'.format(name, mid)) + log_entry(x) + logged.add(mid) + continue + + fields = [field[1:-1] for field in pattern.findall(desc)] + for field in fields: + index = (mid, field) + if field in x or index in logged: + continue + print('{} {.hex}: no field {}'.format(name, mid, field)) + log_entry(x) + logged.add(index) diff --git a/tools/check-directives.sh b/tools/check-directives.sh new file mode 100755 index 0000000..e2fd388 --- /dev/null +++ b/tools/check-directives.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +set -e + +function generate_directives() { + perl -aF'/[\s,]+/' -ne ' + if (my ($s, $d) = ($F[0] =~ /^([^\s\.]+)\.([^\s\.]+)$/)) { $d{$s}{"$d="} = 1; } + END { while (my ($key, $value) = each %d) { + printf "[%s]\n%s\n", $key, join("\n", keys(%$value)) + }}' "$1" +} + +if [[ $(generate_directives src/network/networkd-network-gperf.gperf | wc -l) -ne $(wc -l ) { + if (m/^\s*#\s*include\s*[<"](\S*)[>"]/o) { + ++$includedfiles{$1}; + } + } + foreach $filename (keys %includedfiles) { + if ($includedfiles{$filename} > 1) { + print "$file: $filename is included more than once.\n"; + } + } + + close(FILE); +} diff --git a/tools/choose-default-locale.sh b/tools/choose-default-locale.sh new file mode 100755 index 0000000..3b30038 --- /dev/null +++ b/tools/choose-default-locale.sh @@ -0,0 +1,12 @@ +#!/bin/sh + +set -e + +# Fedora uses C.utf8 but Debian uses C.UTF-8 +if locale -a | grep -xq -E 'C\.(utf8|UTF-8)'; then + echo 'C.UTF-8' +elif locale -a | grep -xqF 'en_US.utf8'; then + echo 'en_US.UTF-8' +else + echo 'C' +fi diff --git a/tools/coverity.sh b/tools/coverity.sh new file mode 100755 index 0000000..af4c920 --- /dev/null +++ b/tools/coverity.sh @@ -0,0 +1,233 @@ +#!/bin/env bash + +# The official unmodified version of the script can be found at +# https://scan.coverity.com/scripts/travisci_build_coverity_scan.sh + +set -e + +# Declare build command +COVERITY_SCAN_BUILD_COMMAND="ninja -C cov-build" + +# Environment check +# Use default values if not set +SCAN_URL=${SCAN_URL:="https://scan.coverity.com"} +TOOL_BASE=${TOOL_BASE:="/tmp/coverity-scan-analysis"} +UPLOAD_URL=${UPLOAD_URL:="https://scan.coverity.com/builds"} + +# These must be set by environment +echo -e "\033[33;1mNote: COVERITY_SCAN_PROJECT_NAME and COVERITY_SCAN_TOKEN are available on Project Settings page on scan.coverity.com\033[0m" +[ -z "$COVERITY_SCAN_PROJECT_NAME" ] && echo "ERROR: COVERITY_SCAN_PROJECT_NAME must be set" && exit 1 +[ -z "$COVERITY_SCAN_NOTIFICATION_EMAIL" ] && echo "ERROR: COVERITY_SCAN_NOTIFICATION_EMAIL must be set" && exit 1 +[ -z "$COVERITY_SCAN_BRANCH_PATTERN" ] && echo "ERROR: COVERITY_SCAN_BRANCH_PATTERN must be set" && exit 1 +[ -z "$COVERITY_SCAN_BUILD_COMMAND" ] && echo "ERROR: COVERITY_SCAN_BUILD_COMMAND must be set" && exit 1 +[ -z "$COVERITY_SCAN_TOKEN" ] && echo "ERROR: COVERITY_SCAN_TOKEN must be set" && exit 1 + +# Do not run on pull requests +if [ "${TRAVIS_PULL_REQUEST}" = "true" ]; then + echo -e "\033[33;1mINFO: Skipping Coverity Analysis: branch is a pull request.\033[0m" + exit 0 +fi + +# Verify this branch should run +if [[ "${TRAVIS_BRANCH^^}" =~ "${COVERITY_SCAN_BRANCH_PATTERN^^}" ]]; then + echo -e "\033[33;1mCoverity Scan configured to run on branch ${TRAVIS_BRANCH}\033[0m" +else + echo -e "\033[33;1mCoverity Scan NOT configured to run on branch ${TRAVIS_BRANCH}\033[0m" + exit 1 +fi + +# Verify upload is permitted +AUTH_RES=`curl -s --form project="$COVERITY_SCAN_PROJECT_NAME" --form token="$COVERITY_SCAN_TOKEN" $SCAN_URL/api/upload_permitted` +if [ "$AUTH_RES" = "Access denied" ]; then + echo -e "\033[33;1mCoverity Scan API access denied. Check COVERITY_SCAN_PROJECT_NAME and COVERITY_SCAN_TOKEN.\033[0m" + exit 1 +else + AUTH=`echo $AUTH_RES | python -c "import sys, json; print(json.load(sys.stdin)['upload_permitted'])"` + if [ "$AUTH" = "True" ]; then + echo -e "\033[33;1mCoverity Scan analysis authorized per quota.\033[0m" + else + WHEN=`echo $AUTH_RES | python -c "import sys, json; print(json.load(sys.stdin)['next_upload_permitted_at'])"` + echo -e "\033[33;1mCoverity Scan analysis NOT authorized until $WHEN.\033[0m" + exit 1 + fi +fi + +TOOL_DIR=`find $TOOL_BASE -type d -name 'cov-analysis*'` +export PATH="$TOOL_DIR/bin:$PATH" + +# Disable CCACHE for cov-build to compilation units correctly +export CCACHE_DISABLE=1 + +# FUNCTION DEFINITIONS +# -------------------- +_help() +{ + # displays help and exits + cat <<-EOF + USAGE: $0 [CMD] [OPTIONS] + + CMD + build Issue Coverity build + upload Upload coverity archive for analysis + Note: By default, archive is created from default results directory. + To provide custom archive or results directory, see --result-dir + and --tar options below. + + OPTIONS + -h,--help Display this menu and exits + + Applicable to build command + --------------------------- + -o,--out-dir Specify Coverity intermediate directory (defaults to 'cov-int') + -t,--tar bool, archive the output to .tgz file (defaults to false) + + Applicable to upload command + ---------------------------- + -d, --result-dir Specify result directory if different from default ('cov-int') + -t, --tar ARCHIVE Use custom .tgz archive instead of intermediate directory or pre-archived .tgz + (by default 'analysis-result.tgz' + EOF + return; +} + +_pack() +{ + RESULTS_ARCHIVE=${RESULTS_ARCHIVE:-'analysis-results.tgz'} + + echo -e "\033[33;1mTarring Coverity Scan Analysis results...\033[0m" + tar czf $RESULTS_ARCHIVE $RESULTS_DIR + SHA=`git rev-parse --short HEAD` + + PACKED=true +} + + +_build() +{ + echo -e "\033[33;1mRunning Coverity Scan Analysis Tool...\033[0m" + local _cov_build_options="" + #local _cov_build_options="--return-emit-failures 8 --parse-error-threshold 85" + eval "${COVERITY_SCAN_BUILD_COMMAND_PREPEND}" + COVERITY_UNSUPPORTED=1 cov-build --dir $RESULTS_DIR $_cov_build_options sh -c "$COVERITY_SCAN_BUILD_COMMAND" + cov-import-scm --dir $RESULTS_DIR --scm git --log $RESULTS_DIR/scm_log.txt + + if [ $? != 0 ]; then + echo -e "\033[33;1mCoverity Scan Build failed: $TEXT.\033[0m" + return 1 + fi + + [ -z $TAR ] || [ $TAR = false ] && return 0 + + if [ "$TAR" = true ]; then + _pack + fi +} + + +_upload() +{ + # pack results + [ -z $PACKED ] || [ $PACKED = false ] && _pack + + # Upload results + echo -e "\033[33;1mUploading Coverity Scan Analysis results...\033[0m" + response=$(curl \ + --silent --write-out "\n%{http_code}\n" \ + --form project=$COVERITY_SCAN_PROJECT_NAME \ + --form token=$COVERITY_SCAN_TOKEN \ + --form email=$COVERITY_SCAN_NOTIFICATION_EMAIL \ + --form file=@$RESULTS_ARCHIVE \ + --form version=$SHA \ + --form description="Travis CI build" \ + $UPLOAD_URL) + printf "\033[33;1mThe response is\033[0m\n%s\n" "$response" + status_code=$(echo "$response" | sed -n '$p') + # Coverity Scan used to respond with 201 on successfully receieving analysis results. + # Now for some reason it sends 200 and may change back in the foreseeable future. + # See https://github.com/pmem/pmdk/commit/7b103fd2dd54b2e5974f71fb65c81ab3713c12c5 + if [ "$status_code" != "200" ]; then + TEXT=$(echo "$response" | sed '$d') + echo -e "\033[33;1mCoverity Scan upload failed: $TEXT.\033[0m" + exit 1 + fi + + echo -e "\n\033[33;1mCoverity Scan Analysis completed succesfully.\033[0m" + exit 0 +} + +# PARSE COMMAND LINE OPTIONS +# -------------------------- + +case $1 in + -h|--help) + _help + exit 0 + ;; + build) + CMD='build' + TEMP=`getopt -o ho:t --long help,out-dir:,tar -n '$0' -- "$@"` + _ec=$? + [[ $_ec -gt 0 ]] && _help && exit $_ec + shift + ;; + upload) + CMD='upload' + TEMP=`getopt -o hd:t: --long help,result-dir:tar: -n '$0' -- "$@"` + _ec=$? + [[ $_ec -gt 0 ]] && _help && exit $_ec + shift + ;; + *) + _help && exit 1 ;; +esac + +RESULTS_DIR='cov-int' + +eval set -- "$TEMP" +if [ $? != 0 ] ; then exit 1 ; fi + +# extract options and their arguments into variables. +if [[ $CMD == 'build' ]]; then + TAR=false + while true ; do + case $1 in + -h|--help) + _help + exit 0 + ;; + -o|--out-dir) + RESULTS_DIR="$2" + shift 2 + ;; + -t|--tar) + TAR=true + shift + ;; + --) _build; shift ; break ;; + *) echo "Internal error" ; _help && exit 6 ;; + esac + done + +elif [[ $CMD == 'upload' ]]; then + while true ; do + case $1 in + -h|--help) + _help + exit 0 + ;; + -d|--result-dir) + CHANGE_DEFAULT_DIR=true + RESULTS_DIR="$2" + shift 2 + ;; + -t|--tar) + RESULTS_ARCHIVE="$2" + [ -z $CHANGE_DEFAULT_DIR ] || [ $CHANGE_DEFAULT_DIR = false ] && PACKED=true + shift 2 + ;; + --) _upload; shift ; break ;; + *) echo "Internal error" ; _help && exit 6 ;; + esac + done + +fi diff --git a/tools/find-build-dir.sh b/tools/find-build-dir.sh new file mode 100755 index 0000000..06b6297 --- /dev/null +++ b/tools/find-build-dir.sh @@ -0,0 +1,32 @@ +#!/bin/sh +set -e + +# Try to guess the build directory: +# we look for subdirectories of the parent directory that look like ninja build dirs. + +if [ -n "$BUILD_DIR" ]; then + echo "$(realpath "$BUILD_DIR")" + exit 0 +fi + +root="$(dirname "$(realpath "$0")")" + +found= +for i in "$root"/../*/build.ninja; do + c="$(dirname $i)" + [ -d "$c" ] || continue + [ "$(basename "$c")" != mkosi.builddir ] || continue + + if [ -n "$found" ]; then + echo 'Found multiple candidates, specify build directory with $BUILD_DIR' >&2 + exit 2 + fi + found="$c" +done + +if [ -z "$found" ]; then + echo 'Specify build directory with $BUILD_DIR' >&2 + exit 1 +fi + +echo "$(realpath $found)" diff --git a/tools/find-double-newline.sh b/tools/find-double-newline.sh new file mode 100755 index 0000000..6a6790b --- /dev/null +++ b/tools/find-double-newline.sh @@ -0,0 +1,42 @@ +#!/bin/sh +# SPDX-License-Identifier: LGPL-2.1+ + +TOP=`git rev-parse --show-toplevel` + +case "$1" in + + recdiff) + if [ "$2" = "" ] ; then + DIR="$TOP" + else + DIR="$2" + fi + + find $DIR -type f \( -name '*.[ch]' -o -name '*.xml' \) -exec $0 diff \{\} \; + ;; + + recpatch) + if [ "$2" = "" ] ; then + DIR="$TOP" + else + DIR="$2" + fi + + find $DIR -type f \( -name '*.[ch]' -o -name '*.xml' \) -exec $0 patch \{\} \; + ;; + + diff) + T=`mktemp` + sed '/^$/N;/^\n$/D' < "$2" > "$T" + diff -u "$2" "$T" + rm -f "$T" + ;; + + patch) + sed -i '/^$/N;/^\n$/D' "$2" + ;; + + *) + echo "Expected recdiff|recpatch|diff|patch as verb." >&2 + ;; +esac diff --git a/tools/find-tabs.sh b/tools/find-tabs.sh new file mode 100755 index 0000000..e32eac8 --- /dev/null +++ b/tools/find-tabs.sh @@ -0,0 +1,42 @@ +#!/bin/sh +# SPDX-License-Identifier: LGPL-2.1+ + +TOP=`git rev-parse --show-toplevel` + +case "$1" in + + recdiff) + if [ "$2" = "" ] ; then + DIR="$TOP" + else + DIR="$2" + fi + + find $DIR -type f \( -name '*.[ch]' -o -name '*.xml' \) -exec $0 diff \{\} \; + ;; + + recpatch) + if [ "$2" = "" ] ; then + DIR="$TOP" + else + DIR="$2" + fi + + find $DIR -type f \( -name '*.[ch]' -o -name '*.xml' \) -exec $0 patch \{\} \; + ;; + + diff) + T=`mktemp` + sed 's/\t/ /g' < "$2" > "$T" + diff -u "$2" "$T" + rm -f "$T" + ;; + + patch) + sed -i 's/\t/ /g' "$2" + ;; + + *) + echo "Expected recdiff|recpatch|diff|patch as verb." >&2 + ;; +esac diff --git a/tools/gdb-sd_dump_hashmaps.py b/tools/gdb-sd_dump_hashmaps.py new file mode 100644 index 0000000..4e8593f --- /dev/null +++ b/tools/gdb-sd_dump_hashmaps.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: LGPL-2.1+ + +from __future__ import print_function + +import gdb + +class sd_dump_hashmaps(gdb.Command): + "dump systemd's hashmaps" + + def __init__(self): + super(sd_dump_hashmaps, self).__init__("sd_dump_hashmaps", gdb.COMMAND_DATA, gdb.COMPLETE_NONE) + + def invoke(self, arg, from_tty): + d = gdb.parse_and_eval("hashmap_debug_list") + all_entry_sizes = gdb.parse_and_eval("all_entry_sizes") + all_direct_buckets = gdb.parse_and_eval("all_direct_buckets") + uchar_t = gdb.lookup_type("unsigned char") + ulong_t = gdb.lookup_type("unsigned long") + debug_offset = gdb.parse_and_eval("(unsigned long)&((HashmapBase*)0)->debug") + + print("type, hash, indirect, entries, max_entries, buckets, creator") + while d: + h = gdb.parse_and_eval("(HashmapBase*)((char*)%d - %d)" % (int(d.cast(ulong_t)), debug_offset)) + + if h["has_indirect"]: + storage_ptr = h["indirect"]["storage"].cast(uchar_t.pointer()) + n_entries = h["indirect"]["n_entries"] + n_buckets = h["indirect"]["n_buckets"] + else: + storage_ptr = h["direct"]["storage"].cast(uchar_t.pointer()) + n_entries = h["n_direct_entries"] + n_buckets = all_direct_buckets[int(h["type"])]; + + t = ["plain", "ordered", "set"][int(h["type"])] + + print("{}, {}, {}, {}, {}, {}, {} ({}:{})".format(t, h["hash_ops"], bool(h["has_indirect"]), n_entries, d["max_entries"], n_buckets, d["func"], d["file"], d["line"])) + + if arg != "" and n_entries > 0: + dib_raw_addr = storage_ptr + (all_entry_sizes[h["type"]] * n_buckets) + + histogram = {} + for i in xrange(0, n_buckets): + dib = int(dib_raw_addr[i]) + histogram[dib] = histogram.get(dib, 0) + 1 + + for dib in sorted(iter(histogram)): + if dib != 255: + print("{:>3} {:>8} {} of entries".format(dib, histogram[dib], 100.0*histogram[dib]/n_entries)) + else: + print("{:>3} {:>8} {} of slots".format(dib, histogram[dib], 100.0*histogram[dib]/n_buckets)) + print("mean DIB of entries: {}".format(sum([dib*histogram[dib] for dib in iter(histogram) if dib != 255])*1.0/n_entries)) + + blocks = [] + current_len = 1 + prev = int(dib_raw_addr[0]) + for i in xrange(1, n_buckets): + dib = int(dib_raw_addr[i]) + if (dib == 255) != (prev == 255): + if prev != 255: + blocks += [[i, current_len]] + current_len = 1 + else: + current_len += 1 + + prev = dib + if prev != 255: + blocks += [[i, current_len]] + # a block may be wrapped around + if len(blocks) > 1 and blocks[0][0] == blocks[0][1] and blocks[-1][0] == n_buckets - 1: + blocks[0][1] += blocks[-1][1] + blocks = blocks[0:-1] + print("max block: {}".format(max(blocks, key=lambda a: a[1]))) + print("sum block lens: {}".format(sum(b[1] for b in blocks))) + print("mean block len: {}".format((1.0 * sum(b[1] for b in blocks) / len(blocks)))) + + d = d["debug_list_next"] + +sd_dump_hashmaps() diff --git a/tools/generate-gperfs.py b/tools/generate-gperfs.py new file mode 100755 index 0000000..5392df0 --- /dev/null +++ b/tools/generate-gperfs.py @@ -0,0 +1,24 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: LGPL-2.1+ + +""" +Generate %-from-name.gperf from %-list.txt +""" + +import sys + +name, prefix, input = sys.argv[1:] + +print("""\ +%{ +#if __GNUC__ >= 7 +_Pragma("GCC diagnostic ignored \\"-Wimplicit-fallthrough\\"") +#endif +%}""") +print("""\ +struct {}_name {{ const char* name; int id; }}; +%null-strings +%%""".format(name)) + +for line in open(input): + print("{0}, {1}{0}".format(line.rstrip(), prefix)) diff --git a/tools/make-directive-index.py b/tools/make-directive-index.py new file mode 100755 index 0000000..9d94487 --- /dev/null +++ b/tools/make-directive-index.py @@ -0,0 +1,307 @@ +#!/usr/bin/env python3 +# SPDX-License-Identifier: LGPL-2.1+ + +import sys +import collections +import re +from xml_helper import xml_parse, xml_print, tree +from copy import deepcopy + +TEMPLATE = '''\ + + + + systemd.directives + systemd + + + + systemd.directives + 7 + + + + systemd.directives + Index of configuration directives + + + + Unit directives + + Directives for configuring units, used in unit + files. + + + + + + Options on the kernel command line + + Kernel boot options for configuring the behaviour of the + systemd process. + + + + + + Environment variables + + Environment variables understood by the systemd manager + and other programs and environment variable-compatible settings. + + + + + + EFI variables + + EFI variables understood by + systemd-boot7 + and other programs. + + + + + + UDEV directives + + Directives for configuring systemd units through the + udev database. + + + + + + Network directives + + Directives for configuring network links through the + net-setup-link udev builtin and networks through + systemd-networkd. + + + + + + Journal fields + + Fields in the journal events with a well known meaning. + + + + + + PAM configuration directives + + Directives for configuring PAM behaviour. + + + + + + <filename>/etc/crypttab</filename> and + <filename>/etc/fstab</filename> options + + Options which influence mounted filesystems and + encrypted volumes. + + + + + + <citerefentry><refentrytitle>systemd.nspawn</refentrytitle><manvolnum>5</manvolnum></citerefentry> + directives + + Directives for configuring systemd-nspawn containers. + + + + + + Program configuration options + + Directives for configuring the behaviour of the + systemd process and other tools through configuration files. + + + + + + Command line options + + Command-line options accepted by programs in the + systemd suite. + + + + + + Constants + + Various constant used and/or defined by systemd. + + + + + + Miscellaneous options and directives + + Other configuration elements which don't fit in + any of the above groups. + + + + + + Files and directories + + Paths and file names referred to in the + documentation. + + + + + + Colophon + + + +''' + +COLOPHON = '''\ +This index contains {count} entries in {sections} sections, +referring to {pages} individual manual pages. +''' + +def _extract_directives(directive_groups, formatting, page): + t = xml_parse(page) + section = t.find('./refmeta/manvolnum').text + pagename = t.find('./refmeta/refentrytitle').text + + storopt = directive_groups['options'] + for variablelist in t.iterfind('.//variablelist'): + klass = variablelist.attrib.get('class') + storvar = directive_groups[klass or 'miscellaneous'] + #