summaryrefslogtreecommitdiffstats
path: root/scripts
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 15:26:00 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-07 15:26:00 +0000
commit830407e88f9d40d954356c3754f2647f91d5c06a (patch)
treed6a0ece6feea91f3c656166dbaa884ef8a29740e /scripts
parentInitial commit. (diff)
downloadknot-resolver-830407e88f9d40d954356c3754f2647f91d5c06a.tar.xz
knot-resolver-830407e88f9d40d954356c3754f2647f91d5c06a.zip
Adding upstream version 5.6.0.upstream/5.6.0upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rwxr-xr-xscripts/bench.sh12
-rwxr-xr-xscripts/bugreport-journals.py194
-rwxr-xr-xscripts/build-in-obs.sh32
-rwxr-xr-xscripts/coverage_c_combine.sh26
-rwxr-xr-xscripts/coverage_env.sh42
-rwxr-xr-xscripts/doh_b64encode_query.py26
-rwxr-xr-xscripts/gen-cdefs.sh82
-rwxr-xr-xscripts/gen-pgp-keyblock.sh38
-rwxr-xr-xscripts/get-date.sh14
-rwxr-xr-xscripts/kresd-host.lua115
-rwxr-xr-xscripts/kresd-query.lua63
-rw-r--r--scripts/kresd.apparmor29
-rwxr-xr-xscripts/luacov_gen_empty.sh18
-rwxr-xr-xscripts/luacov_to_info.lua57
-rwxr-xr-xscripts/make-archive.sh38
-rwxr-xr-xscripts/make-doc.sh42
-rwxr-xr-xscripts/make-obs.sh59
-rwxr-xr-xscripts/map_install_src.lua168
-rwxr-xr-xscripts/run-pylint.sh12
-rwxr-xr-xscripts/run-scanbuild-with-args.sh51
-rwxr-xr-xscripts/test-config.sh32
-rwxr-xr-xscripts/test-integration-prepare.sh8
-rwxr-xr-xscripts/update-authors.sh41
-rwxr-xr-xscripts/update-root-hints.sh28
24 files changed, 1227 insertions, 0 deletions
diff --git a/scripts/bench.sh b/scripts/bench.sh
new file mode 100755
index 0000000..232c523
--- /dev/null
+++ b/scripts/bench.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+# SPDX-License-Identifier: GPL-3.0-or-later
+set -o errexit -o nounset
+
+# Run benchmark
+cd "${MESON_SOURCE_ROOT}"
+
+echo "Test LRU with increasing overfill, misses should increase ~ linearly"
+
+for num in 65536 32768 16384 8192 4096; do
+ "${MESON_BUILD_ROOT}/${MESON_SUBDIR}/bench_lru" 23 "${MESON_SOURCE_ROOT}/${MESON_SUBDIR}/bench_lru_set1.tsv" - "${num}"
+done
diff --git a/scripts/bugreport-journals.py b/scripts/bugreport-journals.py
new file mode 100755
index 0000000..d66ddfb
--- /dev/null
+++ b/scripts/bugreport-journals.py
@@ -0,0 +1,194 @@
+#!/usr/bin/python3
+"""
+Collect systemd-journal log entries around time of daemon exit and coredumps.
+"""
+
+import datetime
+import json
+import logging
+import pathlib
+import shutil
+import subprocess
+import sys
+
+
+TIMESPAN_BEFORE = 600 # s
+TIMESPAN_AFTER = TIMESPAN_BEFORE
+CURSOR_DIR = pathlib.Path('/var/lib/knot-resolver')
+CURSOR_PATH = CURSOR_DIR / 'coredump_watcher.cursor'
+
+
+class Timestamp:
+ def __init__(self, usec):
+ self.usec = int(usec)
+
+ @property
+ def unix(self):
+ return self.usec // 10**6
+
+ def __str__(self):
+ return datetime.datetime.utcfromtimestamp(self.unix).strftime('%Y-%m-%d_%H:%M:%S')
+
+ def __lt__(self, other):
+ return self.usec < other.usec
+
+ def __eq__(self, other):
+ return self.usec == other.usec
+
+
+class Entry(dict):
+ @property
+ def timestamp(self):
+ usec = self.get('__REALTIME_TIMESTAMP')
+ if usec is None:
+ return None
+ return Timestamp(usec)
+
+ @property
+ def core_path(self):
+ filename = self.get('COREDUMP_FILENAME')
+ if filename is None:
+ return None
+ return pathlib.Path(filename)
+
+ def get_first(self, *keys):
+ for key in keys:
+ try:
+ return self[key]
+ except KeyError:
+ continue
+ return None
+
+ @property
+ def program(self):
+ return self.get_first('COREDUMP_UNIT', 'UNIT', '_SYSTEMD_UNIT', 'SYSLOG_IDENTIFIER')
+
+ @property
+ def pid(self):
+ return self.get_first('COREDUMP_PID', '_PID')
+
+
+def save_cursor(cursor):
+ if cursor is None:
+ return
+ CURSOR_DIR.mkdir(parents=True, exist_ok=True)
+ with CURSOR_PATH.open('w') as curfile:
+ curfile.write(cursor)
+ logging.info('log cursor saved into %s, next run will skip old logs',
+ CURSOR_PATH)
+
+
+def load_cursor():
+ try:
+ with CURSOR_PATH.open('r') as curfile:
+ logging.info('log cursor read from %s, skipping old logs',
+ CURSOR_PATH)
+ return curfile.read().strip()
+ except FileNotFoundError:
+ logging.info('log cursor file %s does not exist, parsing all logs',
+ CURSOR_PATH)
+ return None
+
+
+def get_cursor():
+ journal_args = ['journalctl', '-o', 'json', '-n', '1']
+ with subprocess.Popen(
+ journal_args,
+ bufsize=1, # line buffering
+ universal_newlines=True,
+ stdout=subprocess.PIPE) as jproc:
+ stdout, _ = jproc.communicate()
+ data = json.loads(stdout)
+ entry = Entry(**data)
+ return entry.get('__CURSOR')
+
+
+def read_journal(*args):
+ journal_args = [
+ 'journalctl',
+ '-o', 'json',
+ '-u', 'kres*',
+ '-u', 'systemd-coredump*']
+ journal_args += args
+ with subprocess.Popen(
+ journal_args,
+ bufsize=1, # line buffering
+ universal_newlines=True,
+ stdout=subprocess.PIPE) as jproc:
+ for line in jproc.stdout:
+ data = json.loads(line)
+ yield Entry(**data)
+
+
+def extract_logs(around_time, log_name):
+ start_time = Timestamp(around_time.usec - TIMESPAN_BEFORE * 10**6)
+ end_time = Timestamp(around_time.usec + TIMESPAN_AFTER * 10**6)
+ log_window = list(read_journal(
+ '--since', '@{}'.format(start_time.unix),
+ '--until', '@{}'.format(end_time.unix)))
+ with log_name.with_suffix('.json').open('w') as jsonf:
+ json.dump(log_window, jsonf, indent=4)
+ with log_name.with_suffix('.log').open('w') as logf:
+ logf.write('##### logs since {}\n'.format(start_time))
+ for entry in log_window:
+ if entry.timestamp == around_time:
+ logf.write('##### HERE #####\n')
+ logf.write('{t} {h} {prg}[{pid}]: {m}\n'.format(
+ t=entry.timestamp,
+ h=entry.get('_HOSTNAME'),
+ prg=entry.program,
+ pid=entry.pid,
+ m=entry.get('MESSAGE')))
+ logf.write('##### logs until {}\n'.format(end_time))
+
+
+def main():
+ logging.basicConfig(level=logging.INFO)
+
+ if len(sys.argv) != 2:
+ sys.exit('Usage: {} <output log directory>'.format(sys.argv[0]))
+ outdir = pathlib.Path(sys.argv[1])
+ outdir.mkdir(parents=True, exist_ok=True)
+
+ cursor_previous = load_cursor()
+ cursor_at_start = get_cursor()
+
+ exit_times = []
+ coredumps = {}
+ filter_args = []
+ if cursor_previous is not None:
+ filter_args = ['--after-cursor', cursor_previous]
+ for entry in read_journal(*filter_args):
+ if 'EXIT_CODE' in entry:
+ logging.debug('exit@%s: %s', entry.timestamp, entry)
+ exit_times.append(entry.timestamp)
+ if 'COREDUMP_FILENAME' in entry:
+ logging.debug('coredump @ %s: %s', entry.timestamp, entry.core_path)
+ coredumps[entry.core_path] = entry.timestamp
+
+ exit_times.sort()
+ logging.debug('detected exits: %s', exit_times)
+ for exit_time in exit_times:
+ extract_logs(exit_time, outdir / str(exit_time))
+
+ coredumps_missing = 0
+ logging.debug('detected coredumps: %s', coredumps)
+ for core_path, core_time in coredumps.items():
+ core_name = core_path.name
+ out_path_prefix = (outdir / str(core_time))
+ extract_logs(core_time, out_path_prefix.with_suffix('.logs'))
+ try:
+ shutil.copy(
+ str(core_path),
+ str(out_path_prefix.with_suffix('.{}'.format(core_name))))
+ except FileNotFoundError as ex:
+ logging.error('coredump file %s cannot be copied: %s', core_path, ex)
+ coredumps_missing += 1
+ logging.info('wrote %d coredumps and %d logs snippets (%s coredumps missing)',
+ len(coredumps) - coredumps_missing, len(exit_times), coredumps_missing)
+
+ save_cursor(cursor_at_start)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/scripts/build-in-obs.sh b/scripts/build-in-obs.sh
new file mode 100755
index 0000000..3256dde
--- /dev/null
+++ b/scripts/build-in-obs.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# Push packaging files to OBS
+#
+# Example usage:
+# 1. ./scripts/make-obs.sh
+# 2. ./scripts/build-in-obs.sh knot-resolver-latest
+set -o errexit -o nounset -o xtrace
+
+pkgdir='pkg/obs'
+
+project=home:CZ-NIC:$1
+package=knot-resolver
+
+if ! [[ "$1" == *-devel || "$1" == *-testing ]]; then
+ read -p "Pushing to '$project', are you sure? [y/N]: " yn
+ case $yn in
+ [Yy]* )
+ ;;
+ * )
+ exit 1
+ esac
+fi
+
+osc co "${project}" "${package}"
+pushd "${project}/${package}"
+osc del * ||:
+cp -r ../../${pkgdir}/* ./
+osc addremove
+osc ci -n
+popd
diff --git a/scripts/coverage_c_combine.sh b/scripts/coverage_c_combine.sh
new file mode 100755
index 0000000..a891ded
--- /dev/null
+++ b/scripts/coverage_c_combine.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# $1 = top source directory
+# $2 = coverage data directory path
+# $3 = output directory for *.info files
+
+set -o errexit -o nounset
+shopt -s nullglob
+IFS=$'\n'
+
+TOPSRCDIR="$1"
+DATAROOT="$2"
+OUTDIR="$3"
+
+cd "${TOPSRCDIR}"
+for COVNAME in $(find "${DATAROOT}" -name .topdir_kresd_coverage)
+do
+ find "${DATAROOT}" -name '*.gcda' -not -path "${DATAROOT}/*" -delete
+ COVDIR="$(dirname "${COVNAME}")"
+ COVDATA_FILENAMES=("${COVDIR}"/*) # filenames in BASH array
+ (( ${#COVDATA_FILENAMES[*]} )) || continue # skip empty dirs
+
+ cp -r -t ${TOPSRCDIR} "${COVDIR}"/*
+ ${LCOV} -q --no-external --capture -d lib -d daemon -d modules -o "$(mktemp -p "${OUTDIR}" -t XXXXXXXX.c.info)" > /dev/null
+done
diff --git a/scripts/coverage_env.sh b/scripts/coverage_env.sh
new file mode 100755
index 0000000..0f6810f
--- /dev/null
+++ b/scripts/coverage_env.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+# SPDX-License-Identifier: GPL-3.0-or-later
+
+# generate variables for coverage testing
+# $1 = top source directory
+# $2 = coverage data directory path
+# $3 = name of test/new subdirectory name
+# $4 = [optional] --export to generate export commands
+
+set -o errexit -o nounset
+shopt -s nullglob
+
+test -z "${COVERAGE:-}" && exit 0 # not enabled, do nothing
+test ! -z "${V:-}" && set -o xtrace # verbose mode
+
+EXPORT=""
+test "${4:-}" == "--export" && EXPORT="export "
+TOPSRCDIR="$1"
+DATAROOT="$2"
+OUTPATH="$2/$3"
+
+# check that output directory is empty
+# beware: Makefile will always call coverage_env.sh for all targets
+# so directories get created but not populated
+# i.e. test -d is not sufficient check
+OUTPATH_FILENAMES=("${OUTPATH}"/*) # filenames in BASH array
+(( ${#OUTPATH_FILENAMES[*]} )) && echo "false" && >&2 echo "fatal: output directory ${OUTPATH} must be empty (or non-existent)" && exit 1
+
+mkdir -p "${OUTPATH}"
+# convert paths to absolute
+pushd "${OUTPATH}" &> /dev/null
+touch .topdir_kresd_coverage
+OUTPATH="$(pwd -P)"
+popd &> /dev/null
+
+# determine GCOV_PREFIX_STRIP value for current source directory
+TOPSRCDIR_SLASHES="${TOPSRCDIR//[^\/]/}" # remove everything except /
+GCOV_PREFIX_STRIP="${#TOPSRCDIR_SLASHES}" # number of / == number of components
+
+KRESD_COVERAGE_STATS="${OUTPATH}/luacov.stats.out"
+GCOV_PREFIX="${OUTPATH}"
+echo "${EXPORT}KRESD_COVERAGE_STATS=\"${KRESD_COVERAGE_STATS}\" ${EXPORT}GCOV_PREFIX=\"${GCOV_PREFIX}\" ${EXPORT}GCOV_PREFIX_STRIP=\"${GCOV_PREFIX_STRIP}\""
diff --git a/scripts/doh_b64encode_query.py b/scripts/doh_b64encode_query.py
new file mode 100755
index 0000000..59569b8
--- /dev/null
+++ b/scripts/doh_b64encode_query.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python3
+import argparse
+import base64
+
+import dns
+import dns.message
+
+
+def main():
+ parser = argparse.ArgumentParser(
+ description='Convert query name and type to base64 URL-encoded form')
+ parser.add_argument('qname', type=str, help='query name')
+ parser.add_argument('qtype', type=str, help='query type')
+ args = parser.parse_args()
+
+ msg = dns.message.make_query(args.qname, args.qtype, dns.rdataclass.IN)
+ msg.id = 0
+ wire = msg.to_wire()
+ encoded = base64.urlsafe_b64encode(wire)
+ printable = encoded.decode('utf-8')
+
+ print(printable)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/scripts/gen-cdefs.sh b/scripts/gen-cdefs.sh
new file mode 100755
index 0000000..ddb0aa7
--- /dev/null
+++ b/scripts/gen-cdefs.sh
@@ -0,0 +1,82 @@
+#!/bin/bash
+# SPDX-License-Identifier: GPL-3.0-or-later
+set -o pipefail -o errexit
+
+if [ "$2" != types ] && [ "$2" != functions ]; then
+ echo "Usage: $0 libkres (types|functions)" >&2
+ echo " and input identifiers, one per line." >&2
+ echo " You need debug symbols in the library." >&2
+ echo
+ echo " If you call this on a type that's a typedef, it gets expanded." >&2
+ echo " To avoid that, prefix the identifier with 'typedef '." >&2
+ exit 1
+fi
+
+if ! command -v gdb >/dev/null; then
+ echo "Failed to find gdb" >&2
+ exit 1
+fi
+
+if ! command -v sed >/dev/null; then
+ echo "Failed to find GNU sed" >&2
+ exit 1
+fi
+
+if ! sed --version | head -1 | grep -q "GNU sed"; then
+ echo "GNU sed required to run this script" >&2
+fi
+
+# be very precise with the directories for libraries to not pick wrong library
+case "$1" in
+ libknot) library="$(PATH="$(pkg-config libknot --variable=libdir)" command -v "$1.so")" ;;
+ libzscanner) library="$(PATH="$(pkg-config libzscanner --variable=libdir)" command -v "$1.so")" ;;
+ *) library="$(command -v "$1")" # use absolute path to library
+esac
+
+if [ -z "$library" ]; then
+ echo "$1 not found. Note: only .so platforms work currently." >&2
+ exit 1
+fi
+
+# Let's use an array to hold command-line arguments, to simplify quoting.
+GDB=(gdb)
+GDB+=(-n -quiet -batch "-symbols=$library")
+GDB+=(-iex "set width unlimited" -iex "set max-value-size unlimited")
+
+grep -v '^#\|^$' | while read -r ident; do
+ if [ "$2" = functions ]; then
+ output="$("${GDB[@]}" --ex "info functions ^$ident\$" \
+ | sed '0,/^All functions/ d; /^File .*:$/ d')"
+ else # types
+ case "$ident" in
+ struct\ *|union\ *|enum\ *)
+ output="$("${GDB[@]}" --ex "ptype $ident" \
+ | sed '0,/^type = /s/^type = /\n/; $ s/$/;/')"
+ ;;
+ typedef\ *) # typedef that shouldn't be expanded
+ output="$("${GDB[@]}" --ex "info types ^"$(echo "$ident" | sed 's/^typedef //')"\$" \
+ | sed -e '0,/^File .*:$/ d' -e '/^File .*:$/,$ d')"
+ # we need to stop early to remove ^^ multiple matches
+ ;;
+ *) # we assume it's a typedef that should be expanded
+ output="$("${GDB[@]}" --ex "ptype $ident" \
+ | sed "0,/^type = /s/^type = /typedef /; $ s/$/ $ident;/")"
+ ;;
+ esac
+ fi
+ # LuaJIT FFI blows up on "uint" type
+ output="$(echo "$output" | sed 's/\buint\b/unsigned int/g')"
+ # GDB 8.2+ added source line prefix to output
+ output="$(echo "$output" | sed 's/^[0-9]\+:[[:space:]]*//g')"
+ # use tabs instead of spaces
+ output="$(echo "$output" | sed 's/ /\t/g')"
+
+ # abort on empty output
+ if [ -z "$(echo "$output" | tr -d "\n;")" ]; then
+ echo "Failed to find cdef of $ident" >&2
+ exit 1
+ fi
+ echo "$output" | grep -v '^$'
+done
+
+exit 0
diff --git a/scripts/gen-pgp-keyblock.sh b/scripts/gen-pgp-keyblock.sh
new file mode 100755
index 0000000..2985531
--- /dev/null
+++ b/scripts/gen-pgp-keyblock.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+# Script to create/update Knot Resolver PGP keyring
+set -o errexit -o nounset
+
+keys=(
+ 'B6006460B60A80E782062449E747DF1F9575A3AA' # vladimir.cunat@nic.cz
+ '3057EE9A448F362D74205A779AB120DA0A76F6DE' # ales.mrazek@nic.cz
+ # '4A8BA48C2AED933BD495C509A1FBA5F7EF8C4869' # tomas.krizek@nic.cz expired 2022-03-31
+)
+outfile="kresd-keyblock.asc"
+url="https://secure.nic.cz/files/knot-resolver/kresd-keyblock.asc"
+
+keyring="$(mktemp -d)"
+keyring_import="$(mktemp -d)"
+published="$(mktemp)"
+
+cleanup() {
+ rm -rf "${keyring}"
+ rm -rf "${keyring_import}"
+ rm -rf "${published}"
+}
+trap cleanup EXIT
+
+# obtain keys from keys.openpgp.org
+gpg --homedir "${keyring}" -q --keyserver keys.openpgp.org --recv-keys "${keys[@]}"
+
+# export minimal size keys with just the necessary signatures
+rm -f "${outfile}"
+gpg --homedir "${keyring}" -q --export --export-options export-minimal --armor --output "${outfile}" "${keys[@]}"
+
+# display keys after import
+gpg --homedir "${keyring_import}" -q --import "${outfile}"
+gpg --homedir "${keyring_import}" -k
+echo "Created: ${outfile}"
+
+# check if update of secure.nic.cz keyblock might be needed
+curl -sfo "${published}" "${url}"
+diff -q "${outfile}" "${published}" &>/dev/null || echo "Generated keyblock differs from ${url}"
diff --git a/scripts/get-date.sh b/scripts/get-date.sh
new file mode 100755
index 0000000..3653155
--- /dev/null
+++ b/scripts/get-date.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+# SPDX-License-Identifier: GPL-3.0-or-later
+set -o nounset
+cd "$(dirname $0)/.."
+
+# Get date from NEWS if possible (regular release)
+DATE=$(head -n1 < NEWS | sed 's/.*(\(.*\)).*/\1/' | grep -E '^[0-9]{4}-[0-9]{2}-[0-9]{2}$$')
+
+if [[ $? -ne 0 ]]; then
+ # or use last modification time of NEWS (dev versions)
+ DATE=$(date -u -r NEWS +%F)
+fi
+
+echo -n $DATE
diff --git a/scripts/kresd-host.lua b/scripts/kresd-host.lua
new file mode 100755
index 0000000..be6efd4
--- /dev/null
+++ b/scripts/kresd-host.lua
@@ -0,0 +1,115 @@
+#!/usr/bin/env luajit
+-- SPDX-License-Identifier: GPL-3.0-or-later
+-- Work around OS X stripping dyld variables
+cli_bin = 'luajit scripts/kresd-query.lua'
+libdir = os.getenv('DYLD_LIBRARY_PATH')
+if libdir then
+ cli_bin = string.format('DYLD_LIBRARY_PATH="%s" %s', libdir, cli_bin)
+end
+-- Parse CLI arguments
+local function help(rc)
+ print(string.format([[
+Usage: %s [-vdh46D] [-c class] [-t type]
+ [-f keyfile] hostname
+ Queries the DNS for information.
+ The hostname is looked up for IP4, IP6 and mail.
+ Use the -v option to see DNSSEC security information.
+ -t type what type to look for.
+ -c class what class to look for, if not class IN.
+ -C confstr additional kresd-style configuration.
+ -D DNSSEC enable with default root anchor
+ -f keyfile read trust anchors from file, with lines as -y.
+ -v be more verbose, shows nodata and security.
+ -d debug, traces the action, -d -d shows more.
+ -4 use ipv4 network, avoid ipv6.
+ -6 use ipv6 network, avoid ipv4.
+ -h show this usage help.]],
+ arg[0]))
+ return rc
+end
+
+-- Parse CLI arguments
+if #arg < 1 then
+ return help(1)
+end
+local qtypes, qclass, qname = {}, 'IN', nil
+local verbose, config = false, {}
+k = 1 while k <= #arg do
+ local v = arg[k]
+ if v == '-h' or v == '--help' then
+ return help(0)
+ elseif v == '-C' then
+ k = k + 1
+ table.insert(config, arg[k])
+ elseif v == '-D' then
+ table.insert(config, 'trust_anchors.add_file("root.keys")')
+ elseif v == '-f' then
+ k = k + 1
+ table.insert(config, string.format('trust_anchors.add_file("%s")', arg[k]))
+ elseif v == '-v' then
+ verbose = true
+ elseif v == '-d' then
+ verbose = true
+ table.insert(config, 'log_level("debug")')
+ elseif v == '-4' then
+ table.insert(config, 'net.ipv6 = false')
+ elseif v == '-6' then
+ table.insert(config, 'net.ipv4 = false')
+ elseif v == '-c' then
+ k = k + 1
+ qclass = arg[k]:upper()
+ elseif v == '-t' then
+ k = k + 1
+ table.insert(qtypes, arg[k]:upper())
+ elseif v:byte() == string.byte('-') then
+ return help(1)
+ else
+ qname = v
+ -- Check if name is an IP addresses
+ -- @TODO: convert to domain name and make a PTR lookup
+ end
+ k = k + 1
+end
+if not qname then
+ return help(1)
+end
+if #qtypes == 0 then
+ qtypes = {'A', 'AAAA', 'MX'}
+end
+-- Assemble config/query
+for _, qtype in ipairs(qtypes) do
+ query = string.format('-t %s -c %s %s', qtype, qclass, qname)
+ capture = string.format([[
+ local qname = "%s"
+ local qtype = "%s"
+ local qverbose = %s]], qname, qtype, tostring(verbose))..[[
+ local qry = req:resolved()
+ local section = pkt:rrsets(kres.section.ANSWER)
+ for i = 1, #section do
+ local rr = section[i]
+ for k = 1, rr.rrs.count do
+ local rdata = rr:tostring(k - 1)
+ local owner = kres.dname2str(rr:owner())
+ if qverbose then
+ if not qry.flags.DNSSEC_WANT or qry.flags.DNSSEC_INSECURE then
+ rdata = rdata .. " (insecure)"
+ else
+ rdata = rdata .. " (secure)"
+ end
+ end
+ if rr.type == kres.type.A then
+ print(string.format("%s has address %s", owner, rdata))
+ elseif rr.type == kres.type.AAAA then
+ print(string.format("%s has IPv6 address %s", owner, rdata))
+ elseif rr.type == kres.type.MX then
+ print(string.format("%s mail is handled by %s", owner, rdata))
+ elseif rr.type == kres.type.CNAME then
+ print(string.format("%s is an alias for %s", owner, rdata))
+ else
+ print(string.format("%s has %s record %s", owner, qtype, rdata))
+ end
+ end
+ end
+ ]]
+ os.execute(string.format('%s -C \'%s\' %s \'%s\'', cli_bin, table.concat(config, ' '), query, capture))
+end
diff --git a/scripts/kresd-query.lua b/scripts/kresd-query.lua
new file mode 100755
index 0000000..713b9c7
--- /dev/null
+++ b/scripts/kresd-query.lua
@@ -0,0 +1,63 @@
+#!/usr/bin/env luajit
+-- SPDX-License-Identifier: GPL-3.0-or-later
+cli_bin = 'kresd -q -c -'
+-- Work around OS X stripping dyld variables
+libdir = os.getenv('DYLD_LIBRARY_PATH')
+if libdir then
+ cli_bin = string.format('DYLD_LIBRARY_PATH="%s" %s', libdir, cli_bin)
+end
+cli_cmd = [[echo '
+option("ALWAYS_CUT", true)
+%s
+return resolve("%s", kres.type.%s, kres.class.%s, 0,
+function (pkt, req)
+ local ok, err = pcall(function () %s end)
+ if not ok then
+ print(err)
+ end
+ quit()
+end)']]
+-- Parse CLI arguments
+local function help()
+ name = 'kresd-query.lua'
+ print(string.format('Usage: %s [-t type] [-c class] [-C config] <name> <script>', name))
+ print('Execute a single-shot query and run a script on the result.')
+ print('There are two variables available: pkt (kres.pkt_t), req (kres.request_t)')
+ print('See modules README to learn about their APIs.')
+ print('')
+ print('Options:')
+ print('\t-h,--help ... print this help')
+ print('\t-t TYPE ... query for given type (default: A)')
+ print('\t-c CLASS ... query in given class (default: IN)')
+ print('\t-C config_str ... kresd-style config (default: -)')
+ print('Examples:')
+ print('\t'..name..' -t SOA cz "print(pkt:qname())" ... print response QNAME')
+end
+-- Parse CLI arguments
+if #arg < 2 then help() return 1 end
+local qtype, qclass, qname = 'A', 'IN', nil
+local config, scripts = '', {}
+k = 1 while k <= #arg do
+ local v = arg[k]
+ if v == '-h' or v == '--help' then
+ return help()
+ elseif v == '-C' then
+ k = k + 1
+ config = arg[k]
+ elseif v == '-c' then
+ k = k + 1
+ qclass = arg[k]:upper()
+ elseif v == '-t' then
+ k = k + 1
+ qtype = arg[k]:upper()
+ elseif v:byte() == string.byte('-') then
+ return help()
+ elseif not qname then
+ qname = v
+ else
+ table.insert(scripts, v)
+ end
+ k = k + 1
+end
+cli_cmd = string.format(cli_cmd, config, qname, qtype, qclass, table.concat(scripts, ' '))
+return os.execute(cli_cmd..' | '..cli_bin)
diff --git a/scripts/kresd.apparmor b/scripts/kresd.apparmor
new file mode 100644
index 0000000..ad6f911
--- /dev/null
+++ b/scripts/kresd.apparmor
@@ -0,0 +1,29 @@
+#include <tunables/global>
+
+/usr/sbin/kresd {
+ #include <abstractions/base>
+ #include <abstractions/p11-kit>
+ #include <abstractions/nameservice>
+ capability net_bind_service,
+ capability setgid,
+ capability setuid,
+ # seems to be needed during start to read /var/lib/knot-resolver
+ # while we still run as root.
+ capability dac_override,
+
+ network tcp,
+ network udp,
+
+ /proc/sys/net/core/somaxconn r,
+ /etc/knot-resolver/* r,
+ /var/lib/knot-resolver/ r,
+ /var/lib/knot-resolver/** rwlk,
+
+ # modules
+ /usr/lib{,64}/kdns_modules/*.lua r,
+ /usr/lib{,64}/kdns_modules/*.so rm,
+
+ # Site-specific additions and overrides. See local/README for details.
+ #include <local/usr.sbin.kresd>
+}
+
diff --git a/scripts/luacov_gen_empty.sh b/scripts/luacov_gen_empty.sh
new file mode 100755
index 0000000..127734d
--- /dev/null
+++ b/scripts/luacov_gen_empty.sh
@@ -0,0 +1,18 @@
+#!/bin/bash
+# SPDX-License-Identifier: GPL-3.0-or-later
+# Generate stats file in luacov format indicating that files named on stdin
+# were not processed.
+#
+# Normally luacov does not know about files which were not loaded so
+# without this manual addition the files are missing in coverage report.
+
+# Usage:
+# $ luacov_gen_empty.sh < list_of_lua_files > luacov.empty_stats.out
+
+set -o errexit -o nounset
+IFS=$'\n'
+
+while read FILENAME
+do
+ echo -e "0:${FILENAME}\n "
+done
diff --git a/scripts/luacov_to_info.lua b/scripts/luacov_to_info.lua
new file mode 100755
index 0000000..b27ba99
--- /dev/null
+++ b/scripts/luacov_to_info.lua
@@ -0,0 +1,57 @@
+#!/usr/bin/env luajit
+-- SPDX-License-Identifier: GPL-3.0-or-later
+
+local luacov = require('luacov')
+local ReporterBase = require('luacov.reporter').ReporterBase
+local LcovReporter = setmetatable({}, ReporterBase)
+LcovReporter.__index = LcovReporter
+
+function LcovReporter:on_new_file(filename)
+ self.finfo = self.current_files[filename] or {name=filename, coverage={}}
+end
+
+function LcovReporter:on_mis_line(_, lineno, _)
+ self.finfo.coverage[lineno] = self.finfo.coverage[lineno] or 0
+end
+
+function LcovReporter:on_hit_line(_, lineno, _, hits)
+ self.finfo.coverage[lineno] = (self.finfo.coverage[lineno] or 0) + hits
+end
+
+function LcovReporter:on_end_file()
+ self.current_files[self.finfo.name] = self.finfo
+ self.finfo = nil
+end
+
+-- Write out results in lcov format
+local function write_lcov_info(files)
+ for fname, finfo in pairs(files) do
+ local instrumented, nonzero = 0, 0
+ print('TN:')
+ print(string.format('SF:%s', fname))
+ for i, hits in pairs(finfo.coverage) do
+ print(string.format('DA:%d,%d', i, hits))
+ instrumented = instrumented + 1
+ if hits > 0 then
+ nonzero = nonzero + 1
+ end
+ end
+ print(string.format('LH:%d', nonzero))
+ print(string.format('LF:%d', instrumented))
+ print('end_of_record')
+ end
+end
+
+-- Accumulate total coverage
+local all_files = {}
+for _, fname in ipairs(arg) do
+ local conf = luacov.load_config()
+ conf.statsfile = fname
+ local reporter = assert(LcovReporter:new(conf))
+ reporter.current_files = all_files
+ reporter:run()
+ reporter:close()
+end
+
+-- Write results
+write_lcov_info(all_files)
diff --git a/scripts/make-archive.sh b/scripts/make-archive.sh
new file mode 100755
index 0000000..8270367
--- /dev/null
+++ b/scripts/make-archive.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+# SPDX-License-Identifier: GPL-3.0-or-later
+# Create a development tarball
+set -o errexit -o nounset -o xtrace
+
+cd "$(dirname ${0})/.."
+
+# make sure we don't accidentally add / overwrite forgotten changes in git
+(git diff-index --quiet HEAD && git diff-index --cached --quiet HEAD) || \
+ (echo 'git index has uncommitted changes!'; exit 1)
+
+if ! git describe --tags --exact-match; then
+ # devel version
+ GIT_HASH=$(git rev-parse --short HEAD )
+ TIMESTAMP=$(date -u +'%s' 2>/dev/null)
+
+ # modify and commit meson.build
+ sed -i "s/^\(\s*version\s*:\s*'\)\([^']\+\)\('.*\)/\1\2.$TIMESTAMP.$GIT_HASH\3/" meson.build
+
+ : changed version in meson.build, changes must be committed to git
+ git add meson.build
+ git commit -m 'DROP: devel version archive'
+
+ cleanup() {
+ # undo commit
+ git reset --hard HEAD^ >/dev/null
+ }
+ trap cleanup EXIT
+fi
+
+# create tarball
+rm -rf build_dist ||:
+meson build_dist
+ninja -C build_dist dist
+
+# print path to generated tarball
+set +o xtrace
+find "${PWD}/build_dist/meson-dist/" -name "knot-resolver-*.tar.xz"
diff --git a/scripts/make-doc.sh b/scripts/make-doc.sh
new file mode 100755
index 0000000..d41e234
--- /dev/null
+++ b/scripts/make-doc.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+# SPDX-License-Identifier: GPL-3.0-or-later
+cd "$(dirname ${0})/.."
+
+pushd doc
+doxygen
+popd
+
+SPHINX=$(command -v sphinx-build-3)
+if [ $? -ne 0 ]; then
+ SPHINX=$(command -v sphinx-build)
+fi
+
+set -o errexit -o nounset
+
+rm -rf doc/html
+${SPHINX} ${@} -b html -d doc/.doctrees doc doc/html
+
+if command -v makeinfo &>/dev/null; then
+ rm -rf doc/texinfo
+ ${SPHINX} ${@} -b texinfo -d doc/.doctrees doc doc/texinfo
+
+ # Sphinx < 2 doesn't create a separate directory for figures, so if
+ # necessary move them to the correct location and update the references in
+ # the generated Texinfo file
+ if [ ! -d doc/texinfo/knot-resolver-figures ]; then
+ cd doc/texinfo
+ mkdir knot-resolver-figures
+ mv *.png *.svg knot-resolver-figures/
+ sed -e 's/\(@image{\)/\1knot-resolver-figures\//' \
+ knot-resolver.texi > knot-resolver.texi.tmp
+ mv knot-resolver.texi.tmp knot-resolver.texi
+ cd ../..
+ fi
+
+ make -C doc/texinfo info
+
+ mkdir doc/texinfo/.install
+ mv doc/texinfo/knot-resolver.info \
+ doc/texinfo/knot-resolver-figures \
+ doc/texinfo/.install/
+fi
diff --git a/scripts/make-obs.sh b/scripts/make-obs.sh
new file mode 100755
index 0000000..abe9670
--- /dev/null
+++ b/scripts/make-obs.sh
@@ -0,0 +1,59 @@
+#!/bin/bash
+# SPDX-License-Identifier: GPL-3.0-or-later
+#
+# create OpenSUSE Build System (OBS) source package
+#
+# this needs to be run on a system with:
+#
+# * apkg
+# * dpkg-buildpackage
+#
+# usage:
+# ./scripts/make-obs.sh [path.to.archive.xz] [1]
+#
+# supply archives as optional arguments to build from,
+# otherwise archive will be built from sources by apkg
+# second argument is optional release number (defaults to 1)
+#
+# output at pkg/obs/ (removed on each run)
+set -o errexit -o nounset
+
+pushd "$(dirname ${0})/.."
+
+OUTDIR="pkg/obs"
+APKG_OPTS="-O $OUTDIR"
+
+if [ -z $@ ]; then
+ echo "building OBS srcpkg from project files"
+else
+ AR=$1
+ echo "building OBS srcpkg from specified archive(s)"
+ APKG_OPTS="-a $AR $APKG_OPTS"
+
+ RELEASE=${2:-}
+ if [ ! -z "$RELEASE" ]; then
+ echo "custom release: $RELEASE"
+ APKG_OPTS="-r $RELEASE $APKG_OPTS"
+ fi
+fi
+
+set -o xtrace
+
+: removing existing output files at output dir: $OUTDIR
+rm -rf "$OUTDIR"
+: making debian source package from archive
+apkg srcpkg $APKG_OPTS -d debian
+: removing extra debian source package files
+rm -f $OUTDIR/*_source.*
+: rendering RPM template
+apkg srcpkg $APKG_OPTS -d fedora --render-template
+: fixing RPM .spec to use debian source archive
+sed -i 's/^\(Source0:\s\+\).*/\1knot-resolver_%{version}.orig.tar.xz/' $OUTDIR/*.spec
+: rendering PKGBUILD template
+apkg srcpkg $APKG_OPTS -d arch --render-template
+: fixing PKGBUILD to use debian source archive
+sed -i 's/^source=.*/source=("knot-resolver_${pkgver}.orig.tar.xz")/' $OUTDIR/PKGBUILD
+popd >/dev/null
+
+echo "OBS srcpkg ready at: $OUTDIR"
+
diff --git a/scripts/map_install_src.lua b/scripts/map_install_src.lua
new file mode 100755
index 0000000..ffc9a30
--- /dev/null
+++ b/scripts/map_install_src.lua
@@ -0,0 +1,168 @@
+#!/usr/bin/env luajit
+-- SPDX-License-Identifier: GPL-3.0-or-later
+
+-- parse install commands from stdin
+-- input: PREFIX=... make install --dry-run --always-make
+-- output: <install path> <source path>
+-- (or sed commands if --sed was specified)
+
+output = 'list'
+if #arg > 1 or arg[1] == '-h' or arg[1] == '--help' then
+ print(string.format([[
+Read install commands and map install paths to paths in source directory.
+
+Usage:
+$ PREFIX=... make install --dry-run --always-make | %s
+
+Example output:
+/kresd/git/.local/lib/kdns_modules/policy.lua modules/policy/policy.lua
+
+Option --sed will produce output suitable as input suitable for sed.]],
+ arg[0]))
+ os.exit(1)
+elseif #arg == 0 then
+ output = 'list'
+elseif arg[1] == '--sed' then
+ output = 'sed'
+else
+ print('Invalid arguments. See --help.')
+ os.exit(2)
+end
+
+-- remove double // from paths and remove trailing /
+function normalize_path(path)
+ assert(path)
+ repeat
+ path, changes = path:gsub('//', '/')
+ until changes == 0
+ return path:gsub('/$', '')
+end
+
+function is_opt(word)
+ return word:match('^-')
+end
+
+-- opts requiring additional argument to be skipped
+local ignored_opts_with_arg = {
+ ['--backup'] = true,
+ ['-g'] = true,
+ ['--group'] = true,
+ ['-m'] = true,
+ ['--mode'] = true,
+ ['-o'] = true,
+ ['--owner'] = true,
+ ['--strip-program'] = true,
+ ['--suffix'] = true,
+}
+
+-- state machine junctions caused by --opts
+-- returns: new state (expect, mode) and target name if any
+function parse_opts(word, expect, mode)
+ if word == '--' then
+ return 'names', mode, nil -- no options anymore
+ elseif word == '-d' or word == '--directory' then
+ return 'opt_or_name', 'newdir', nil
+ elseif word == '-t' or word == '--target-directory' then
+ return 'targetdir', mode, nil
+ elseif word:match('^--target-directory=') then
+ return 'opt_or_name', mode, string.sub(word, 20)
+ elseif ignored_opts_with_arg[word] then
+ return 'ignore', mode, nil -- ignore next word
+ else
+ return expect, mode, nil -- unhandled opt
+ end
+end
+
+
+-- cmd: complete install command line: install -m 0644 -t dest src1 src2
+-- dirs: names known to be directories: name => true
+-- returns: updated dirs
+function process_cmd(cmd, dirs)
+ -- print('# ' .. cmd)
+ sanity_check(cmd)
+ local expect = 'install'
+ local mode = 'copy' -- copy or newdir
+ local target -- last argument or argument for install -t
+ local names = {} -- non-option arguments
+
+ for word in cmd:gmatch('%S+') do
+ if expect == 'install' then -- parsing 'install'
+ assert(word == 'install')
+ expect = 'opt_or_name'
+ elseif expect == 'opt_or_name' then
+ if is_opt(word) then
+ expect, mode, newtarget = parse_opts(word, expect, mode)
+ target = newtarget or target
+ else
+ if mode == 'copy' then
+ table.insert(names, word)
+ elseif mode == 'newdir' then
+ local path = normalize_path(word)
+ dirs[path] = true
+ else
+ assert(false, 'bad mode')
+ end
+ end
+ elseif expect == 'targetdir' then
+ local path = normalize_path(word)
+ dirs[path] = true
+ target = word
+ expect = 'opt_or_name'
+ elseif expect == 'names' then
+ table.insert(names, word)
+ elseif expect == 'ignore' then
+ expect = 'opt_or_name'
+ else
+ assert(false, 'bad expect')
+ end
+ end
+ if mode == 'newdir' then
+ -- no mapping to print, this cmd just created directory
+ return dirs
+ end
+
+ if not target then -- last argument is the target
+ target = table.remove(names)
+ end
+ assert(target, 'fatal: no target in install cmd')
+ target = normalize_path(target)
+
+ for _, name in pairs(names) do
+ basename = string.gsub(name, "(.*/)(.*)", "%2")
+ if not dirs[target] then
+ print('fatal: target directory "' .. target .. '" was not created yet!')
+ os.exit(2)
+ end
+ -- mapping installed name -> source name
+ if output == 'list' then
+ print(target .. '/' .. basename, name)
+ elseif output == 'sed' then
+ print(string.format([[s`%s`%s`g]],
+ target .. '/' .. basename, name))
+ else
+ assert(false, 'unsupported output')
+ end
+ end
+ return dirs
+end
+
+function sanity_check(cmd)
+ -- shell quotation is not supported
+ assert(not cmd:match('"'), 'quotes " are not supported')
+ assert(not cmd:match("'"), "quotes ' are not supported")
+ assert(not cmd:match('\\'), "escapes like \\ are not supported")
+ assert(cmd:match('^install%s'), 'not an install command')
+end
+
+-- remember directories created by install -d so we can expand relative paths
+local dirs = {}
+while true do
+ local cmd = io.read("*line")
+ if not cmd then
+ break
+ end
+ local isinstall = cmd:match('^install%s')
+ if isinstall then
+ dirs = process_cmd(cmd, dirs)
+ end
+end
diff --git a/scripts/run-pylint.sh b/scripts/run-pylint.sh
new file mode 100755
index 0000000..9241382
--- /dev/null
+++ b/scripts/run-pylint.sh
@@ -0,0 +1,12 @@
+#!/bin/bash
+# SPDX-License-Identifier: GPL-3.0-or-later
+set -o errexit -o nounset
+
+cd "$(dirname ${0})/.."
+
+# Find Python modules and standalone Python scripts
+FILES=$(find ./tests/pytests \
+ -type d -exec test -e '{}/__init__.py' \; -print -prune -o \
+ -name '*.py' -print)
+
+python3 -m pylint -j 0 --rcfile ./tests/pytests/pylintrc ${FILES}
diff --git a/scripts/run-scanbuild-with-args.sh b/scripts/run-scanbuild-with-args.sh
new file mode 100755
index 0000000..b295453
--- /dev/null
+++ b/scripts/run-scanbuild-with-args.sh
@@ -0,0 +1,51 @@
+#!/bin/bash
+# SPDX-License-Identifier: GPL-3.0-or-later
+set -o errexit -o nounset
+
+# following checkers are disabled on purpose:
+# Clang does not support attribute cleanup and this is causing false positives in following checkers:
+# unix.Malloc
+# alpha.unix.SimpleStream
+# alpha.unix.Stream
+# https://bugs.llvm.org/show_bug.cgi?id=3888
+
+# These are disabled for other reasons:
+# alpha.clone.CloneChecker # way too many false positives
+# alpha.core.CastToStruct # we use this pattern too much, hard to avoid in many cases
+# alpha.deadcode.UnreachableCode # false positives/flags sanity checks depending on implementation details
+# alpha.security.MallocOverflow # not smart enough to infer max values from data types
+
+exec scan-build --status-bugs -no-failure-reports \
+-analyzer-config aggressive-binary-operation-simplification=true \
+-disable-checker unix.Malloc \
+-enable-checker alpha.core.BoolAssignment \
+-enable-checker alpha.core.CastSize \
+-enable-checker alpha.core.Conversion \
+-enable-checker alpha.core.DynamicTypeChecker \
+-enable-checker alpha.core.FixedAddr \
+-enable-checker alpha.core.IdenticalExpr \
+-enable-checker alpha.core.PointerArithm \
+-enable-checker alpha.core.PointerSub \
+-enable-checker alpha.core.SizeofPtr \
+-enable-checker alpha.core.TestAfterDivZero \
+-enable-checker alpha.cplusplus.IteratorRange \
+-enable-checker alpha.security.ArrayBound \
+-enable-checker alpha.security.ArrayBoundV2 \
+-enable-checker alpha.security.ReturnPtrRange \
+-enable-checker alpha.security.taint.TaintPropagation \
+-enable-checker alpha.unix.BlockInCriticalSection \
+-enable-checker alpha.unix.Chroot \
+-enable-checker alpha.unix.PthreadLock \
+-enable-checker alpha.unix.cstring.BufferOverlap \
+-enable-checker alpha.unix.cstring.NotNullTerminated \
+-enable-checker alpha.unix.cstring.OutOfBounds \
+-enable-checker nullability.NullableDereferenced \
+-enable-checker nullability.NullablePassedToNonnull \
+-enable-checker nullability.NullableReturnedFromNonnull \
+-enable-checker optin.performance.Padding \
+-enable-checker optin.portability.UnixAPI \
+-enable-checker security.FloatLoopCounter \
+-enable-checker valist.CopyToSelf \
+-enable-checker valist.Uninitialized \
+-enable-checker valist.Unterminated \
+"$@"
diff --git a/scripts/test-config.sh b/scripts/test-config.sh
new file mode 100755
index 0000000..695e518
--- /dev/null
+++ b/scripts/test-config.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+# SPDX-License-Identifier: GPL-3.0-or-later
+# Utility script used by meson to run config tests post installation
+set -o nounset -o errexit
+
+# if anything fails during test setup, use exit code 77 to mark it as skipped
+function skip {
+ exit 77
+}
+trap skip ERR
+
+TEST_DIR="$(dirname ${TEST_FILE})"
+TMP_RUNDIR="$(mktemp -d)"
+
+function finish {
+ if [[ "$(jobs -p)" != "" ]]
+ then
+ echo "SIGKILLing leftover processes:"
+ jobs -l
+ kill -s SIGKILL $(jobs -p)
+ fi
+ rm -rf "${TMP_RUNDIR}"
+}
+trap finish EXIT
+
+cp -a "${TEST_DIR}/"* "${TMP_RUNDIR}/"
+cd "${TMP_RUNDIR}"
+
+which kresd || (echo "kresd not executable!"; exit 77)
+trap ERR # get actual kresd error code from now on
+
+kresd "$@"
diff --git a/scripts/test-integration-prepare.sh b/scripts/test-integration-prepare.sh
new file mode 100755
index 0000000..13db143
--- /dev/null
+++ b/scripts/test-integration-prepare.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+# SPDX-License-Identifier: GPL-3.0-or-later
+set -o errexit -o nounset
+
+cd "${1}"
+
+git submodule update --init --recursive
+make depend &>/dev/null
diff --git a/scripts/update-authors.sh b/scripts/update-authors.sh
new file mode 100755
index 0000000..fe1d857
--- /dev/null
+++ b/scripts/update-authors.sh
@@ -0,0 +1,41 @@
+#!/bin/bash
+# SPDX-License-Identifier: GPL-3.0-or-later
+set -o nounset -o xtrace
+
+function spdx_originator_to_authors {
+ # $1 = Person/Organization
+ find -name '*.spdx' | xargs grep --no-filename "^PackageOriginator: $1: " \
+ | cut -d : -f 3 | sed -e 's/^ *//' -e 's/(/</' -e 's/)/>/' | sort -u
+}
+
+cd "$(git rev-parse --show-toplevel)"
+AUTHORS_FILE=AUTHORS
+TEMP_FILE="$(mktemp AUTHORS.XXXXXXXXXX)"
+
+# drop all names from the current file
+sed '/^People who contributed commits to our Git repo are/q' "${AUTHORS_FILE}" > "${TEMP_FILE}"
+# append to the new file
+git log --format="%aN <%aE>" | sort -u | git check-mailmap --stdin | sort -u >> "${TEMP_FILE}"
+
+echo '' >> "${TEMP_FILE}"
+echo 'Knot Resolver source tree also bundles code and content published by:' >> "${TEMP_FILE}"
+spdx_originator_to_authors "Person" >> "${TEMP_FILE}"
+spdx_originator_to_authors "Organization" >> "${TEMP_FILE}"
+
+echo '' >> "${TEMP_FILE}"
+echo 'Thanks to everyone who knowingly or unknowingly contributed!' >> "${TEMP_FILE}"
+
+# check for changes
+diff "${AUTHORS_FILE}" "${TEMP_FILE}"
+CHANGED=$?
+
+if [ $CHANGED -ne 0 ]; then
+ # update
+ mv "${TEMP_FILE}" "${AUTHORS_FILE}"
+fi
+
+# cleanup
+rm -f "${TEMP_FILE}"
+
+# signal change with exit code
+exit $CHANGED
diff --git a/scripts/update-root-hints.sh b/scripts/update-root-hints.sh
new file mode 100755
index 0000000..5f7a564
--- /dev/null
+++ b/scripts/update-root-hints.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+# SPDX-License-Identifier: GPL-3.0-or-later
+set -o nounset -o xtrace
+
+TEMP_FILE=/tmp/root.hints
+HINTS_FILE=etc/root.hints
+
+# download latest root hints
+wget -O ${TEMP_FILE} https://www.internic.net/domain/named.root
+
+# strip comments for diff
+sed '/^;/d' ${TEMP_FILE} > ${TEMP_FILE}.clean
+sed '/^;/d' ${HINTS_FILE} > ${HINTS_FILE}.clean
+
+# check for changes
+diff ${TEMP_FILE}.clean ${HINTS_FILE}.clean >/dev/null
+CHANGED=$?
+
+if [ $CHANGED -ne 0 ]; then
+ # update root.hints
+ mv ${TEMP_FILE} ${HINTS_FILE}
+fi
+
+# cleanup
+rm -f ${TEMP_FILE} ${TEMP_FILE}.clean ${HINTS_FILE}.clean
+
+# signal change with exit code
+exit $CHANGED