summaryrefslogtreecommitdiffstats
path: root/packaging
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 16:14:31 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-17 16:14:31 +0000
commit2d5707c7479eacb3b1ad98e01b53f56a88f8fb78 (patch)
treed9c334e83692851c02e3e1b8e65570c97bc82481 /packaging
parentInitial commit. (diff)
downloadrsync-2d5707c7479eacb3b1ad98e01b53f56a88f8fb78.tar.xz
rsync-2d5707c7479eacb3b1ad98e01b53f56a88f8fb78.zip
Adding upstream version 3.2.7.upstream/3.2.7
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'packaging')
-rw-r--r--packaging/auto-Makefile12
-rwxr-xr-xpackaging/branch-from-patch174
-rwxr-xr-xpackaging/cull-options148
-rw-r--r--packaging/lsb/rsync.spec87
-rw-r--r--packaging/lsb/rsync.xinetd13
-rw-r--r--packaging/openssl-rsync.cnf18
-rwxr-xr-xpackaging/patch-update244
-rw-r--r--packaging/pkglib.py266
-rwxr-xr-xpackaging/pre-push16
-rwxr-xr-xpackaging/prep-auto-dir43
-rwxr-xr-xpackaging/release-rsync399
-rwxr-xr-xpackaging/smart-make45
-rw-r--r--packaging/solaris/build_pkg.sh94
-rw-r--r--packaging/systemd/rsync.service32
-rw-r--r--packaging/systemd/rsync.socket10
-rw-r--r--packaging/systemd/rsync@.service28
-rwxr-xr-xpackaging/var-checker94
-rwxr-xr-xpackaging/year-tweak94
18 files changed, 1817 insertions, 0 deletions
diff --git a/packaging/auto-Makefile b/packaging/auto-Makefile
new file mode 100644
index 0000000..7f2e258
--- /dev/null
+++ b/packaging/auto-Makefile
@@ -0,0 +1,12 @@
+TARGETS := all install install-ssl-daemon install-all install-strip conf gen gensend reconfigure restatus \
+ proto man clean cleantests distclean test check check29 check30 installcheck splint \
+ doxygen doxygen-upload finddead rrsync
+
+.PHONY: $(TARGETS) auto-prep
+
+$(TARGETS): auto-prep
+ make -C build $@
+
+auto-prep:
+ @if test x`packaging/prep-auto-dir` = x; then echo "auto-build-save is not setup"; exit 1; fi
+ @echo 'Build branch: '`readlink build/.branch | tr % /`
diff --git a/packaging/branch-from-patch b/packaging/branch-from-patch
new file mode 100755
index 0000000..440b583
--- /dev/null
+++ b/packaging/branch-from-patch
@@ -0,0 +1,174 @@
+#!/usr/bin/env -S python3 -B
+
+# This script turns one or more diff files in the patches dir (which is
+# expected to be a checkout of the rsync-patches git repo) into a branch
+# in the main rsync git checkout. This allows the applied patch to be
+# merged with the latest rsync changes and tested. To update the diff
+# with the resulting changes, see the patch-update script.
+
+import os, sys, re, argparse, glob
+
+sys.path = ['packaging'] + sys.path
+
+from pkglib import *
+
+def main():
+ global created, info, local_branch
+
+ cur_branch, args.base_branch = check_git_state(args.base_branch, not args.skip_check, args.patches_dir)
+
+ local_branch = get_patch_branches(args.base_branch)
+
+ if args.delete_local_branches:
+ for name in sorted(local_branch):
+ branch = f"patch/{args.base_branch}/{name}"
+ cmd_chk(['git', 'branch', '-D', branch])
+ local_branch = set()
+
+ if args.add_missing:
+ for fn in sorted(glob.glob(f"{args.patches_dir}/*.diff")):
+ name = re.sub(r'\.diff$', '', re.sub(r'.+/', '', fn))
+ if name not in local_branch and fn not in args.patch_files:
+ args.patch_files.append(fn)
+
+ if not args.patch_files:
+ return
+
+ for fn in args.patch_files:
+ if not fn.endswith('.diff'):
+ die(f"Filename is not a .diff file: {fn}")
+ if not os.path.isfile(fn):
+ die(f"File not found: {fn}")
+
+ scanned = set()
+ info = { }
+
+ patch_list = [ ]
+ for fn in args.patch_files:
+ m = re.match(r'^(?P<dir>.*?)(?P<name>[^/]+)\.diff$', fn)
+ patch = argparse.Namespace(**m.groupdict())
+ if patch.name in scanned:
+ continue
+ patch.fn = fn
+
+ lines = [ ]
+ commit_hash = None
+ with open(patch.fn, 'r', encoding='utf-8') as fh:
+ for line in fh:
+ m = re.match(r'^based-on: (\S+)', line)
+ if m:
+ commit_hash = m[1]
+ break
+ if (re.match(r'^index .*\.\..* \d', line)
+ or re.match(r'^diff --git ', line)
+ or re.match(r'^--- (old|a)/', line)):
+ break
+ lines.append(re.sub(r'\s*\Z', "\n", line, 1))
+ info_txt = ''.join(lines).strip() + "\n"
+ lines = None
+
+ parent = args.base_branch
+ patches = re.findall(r'patch -p1 <%s/(\S+)\.diff' % args.patches_dir, info_txt)
+ if patches:
+ last = patches.pop()
+ if last != patch.name:
+ warn(f"No identity patch line in {patch.fn}")
+ patches.append(last)
+ if patches:
+ parent = patches.pop()
+ if parent not in scanned:
+ diff_fn = patch.dir + parent + '.diff'
+ if not os.path.isfile(diff_fn):
+ die(f"Failed to find parent of {patch.fn}: {parent}")
+ # Add parent to args.patch_files so that we will look for the
+ # parent's parent. Any duplicates will be ignored.
+ args.patch_files.append(diff_fn)
+ else:
+ warn(f"No patch lines found in {patch.fn}")
+
+ info[patch.name] = [ parent, info_txt, commit_hash ]
+
+ patch_list.append(patch)
+
+ created = set()
+ for patch in patch_list:
+ create_branch(patch)
+
+ cmd_chk(['git', 'checkout', args.base_branch])
+
+
+def create_branch(patch):
+ if patch.name in created:
+ return
+ created.add(patch.name)
+
+ parent, info_txt, commit_hash = info[patch.name]
+ parent = argparse.Namespace(dir=patch.dir, name=parent, fn=patch.dir + parent + '.diff')
+
+ if parent.name == args.base_branch:
+ parent_branch = commit_hash if commit_hash else args.base_branch
+ else:
+ create_branch(parent)
+ parent_branch = '/'.join(['patch', args.base_branch, parent.name])
+
+ branch = '/'.join(['patch', args.base_branch, patch.name])
+ print("\n" + '=' * 64)
+ print(f"Processing {branch} ({parent_branch})")
+
+ if patch.name in local_branch:
+ cmd_chk(['git', 'branch', '-D', branch])
+
+ cmd_chk(['git', 'checkout', '-b', branch, parent_branch])
+
+ info_fn = 'PATCH.' + patch.name
+ with open(info_fn, 'w', encoding='utf-8') as fh:
+ fh.write(info_txt)
+ cmd_chk(['git', 'add', info_fn])
+
+ with open(patch.fn, 'r', encoding='utf-8') as fh:
+ patch_txt = fh.read()
+
+ cmd_run('patch -p1'.split(), input=patch_txt)
+
+ for fn in glob.glob('*.orig') + glob.glob('*/*.orig'):
+ os.unlink(fn)
+
+ pos = 0
+ new_file_re = re.compile(r'\nnew file mode (?P<mode>\d+)\s+--- /dev/null\s+\+\+\+ b/(?P<fn>.+)')
+ while True:
+ m = new_file_re.search(patch_txt, pos)
+ if not m:
+ break
+ os.chmod(m['fn'], int(m['mode'], 8))
+ cmd_chk(['git', 'add', m['fn']])
+ pos = m.end()
+
+ while True:
+ cmd_chk('git status'.split())
+ ans = input('Press Enter to commit, Ctrl-C to abort, or type a wild-name to add a new file: ')
+ if ans == '':
+ break
+ cmd_chk("git add " + ans, shell=True)
+
+ while True:
+ s = cmd_run(['git', 'commit', '-a', '-m', f"Creating branch from {patch.name}.diff."])
+ if not s.returncode:
+ break
+ s = cmd_run(['/bin/zsh'])
+ if s.returncode:
+ die('Aborting due to shell error code')
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description="Create a git patch branch from an rsync patch file.", add_help=False)
+ parser.add_argument('--branch', '-b', dest='base_branch', metavar='BASE_BRANCH', default='master', help="The branch the patch is based on. Default: master.")
+ parser.add_argument('--add-missing', '-a', action='store_true', help="Add a branch for every patches/*.diff that doesn't have a branch.")
+ parser.add_argument('--skip-check', action='store_true', help="Skip the check that ensures starting with a clean branch.")
+ parser.add_argument('--delete', dest='delete_local_branches', action='store_true', help="Delete all the local patch/BASE/* branches, not just the ones that are being recreated.")
+ parser.add_argument('--patches-dir', '-p', metavar='DIR', default='patches', help="Override the location of the rsync-patches dir. Default: patches.")
+ parser.add_argument('patch_files', metavar='patches/DIFF_FILE', nargs='*', help="Specify what patch diff files to process. Default: all of them.")
+ parser.add_argument("--help", "-h", action="help", help="Output this help message and exit.")
+ args = parser.parse_args()
+ main()
+
+# vim: sw=4 et ft=python
diff --git a/packaging/cull-options b/packaging/cull-options
new file mode 100755
index 0000000..e71818c
--- /dev/null
+++ b/packaging/cull-options
@@ -0,0 +1,148 @@
+#!/usr/bin/env python3
+# This script outputs either perl or python code that parses all possible options
+# that the code in options.c might send to the server. The resulting code is then
+# included in the rrsync script.
+
+import re, argparse
+
+short_no_arg = { }
+short_with_num = { '@': 1 }
+long_opts = { # These include some extra long-args that BackupPC uses:
+ 'block-size': 1,
+ 'daemon': -1,
+ 'debug': 1,
+ 'fake-super': 0,
+ 'fuzzy': 0,
+ 'group': 0,
+ 'hard-links': 0,
+ 'ignore-times': 0,
+ 'info': 1,
+ 'links': 0,
+ 'log-file': 3,
+ 'munge-links': 0,
+ 'no-munge-links': -1,
+ 'one-file-system': 0,
+ 'owner': 0,
+ 'perms': 0,
+ 'recursive': 0,
+ 'stderr': 1,
+ 'times': 0,
+ 'copy-devices': -1,
+ 'write-devices': -1,
+ }
+
+def main():
+ last_long_opt = None
+
+ with open('../options.c') as fh:
+ for line in fh:
+ m = re.search(r"argstr\[x\+\+\] = '([^.ie])'", line)
+ if m:
+ short_no_arg[m.group(1)] = 1
+ last_long_opt = None
+ continue
+
+ m = re.search(r'asprintf\([^,]+, "-([a-zA-Z0-9])\%l?[ud]"', line)
+ if m:
+ short_with_num[m.group(1)] = 1
+ last_long_opt = None
+ continue
+
+ m = re.search(r'args\[ac\+\+\] = "--([^"=]+)"', line)
+ if m:
+ last_long_opt = m.group(1)
+ if last_long_opt not in long_opts:
+ long_opts[last_long_opt] = 0
+ else:
+ last_long_opt = None
+ continue
+
+ if last_long_opt:
+ m = re.search(r'args\[ac\+\+\] = safe_arg\("", ([^[("\s]+)\);', line)
+ if m:
+ long_opts[last_long_opt] = 2
+ last_long_opt = None
+ continue
+ if 'args[ac++] = ' in line:
+ last_long_opt = None
+
+ m = re.search(r'return "--([^"]+-dest)";', line)
+ if m:
+ long_opts[m.group(1)] = 2
+ last_long_opt = None
+ continue
+
+ m = re.search(r'asprintf\([^,]+, "--([^"=]+)=', line)
+ if not m:
+ m = re.search(r'args\[ac\+\+\] = "--([^"=]+)=', line)
+ if not m:
+ m = re.search(r'args\[ac\+\+\] = safe_arg\("--([^"=]+)"', line)
+ if not m:
+ m = re.search(r'fmt = .*: "--([^"=]+)=', line)
+ if m:
+ long_opts[m.group(1)] = 1
+ last_long_opt = None
+
+ long_opts['files-from'] = 3
+
+ txt = """\
+### START of options data produced by the cull-options script. ###
+
+# To disable a short-named option, add its letter to this string:
+"""
+
+ txt += str_assign('short_disabled', 's') + "\n"
+ txt += '# These are also disabled when the restricted dir is not "/":\n'
+ txt += str_assign('short_disabled_subdir', 'KLk') + "\n"
+ txt += '# These are all possible short options that we will accept (when not disabled above):\n'
+ txt += str_assign('short_no_arg', ''.join(sorted(short_no_arg)), 'DO NOT REMOVE ANY')
+ txt += str_assign('short_with_num', ''.join(sorted(short_with_num)), 'DO NOT REMOVE ANY')
+
+ txt += """
+# To disable a long-named option, change its value to a -1. The values mean:
+# 0 = the option has no arg; 1 = the arg doesn't need any checking; 2 = only
+# check the arg when receiving; and 3 = always check the arg.
+"""
+
+ print(txt, end='')
+
+ if args.python:
+ print("long_opts = {")
+ sep = ':'
+ else:
+ print("our %long_opt = (")
+ sep = ' =>'
+
+ for opt in sorted(long_opts):
+ if opt.startswith(('min-', 'max-')):
+ val = 1
+ else:
+ val = long_opts[opt]
+ print(' ', repr(opt) + sep, str(val) + ',')
+
+ if args.python:
+ print("}")
+ else:
+ print(");")
+ print("\n### END of options data produced by the cull-options script. ###")
+
+
+def str_assign(name, val, comment=None):
+ comment = ' # ' + comment if comment else ''
+ if args.python:
+ return name + ' = ' + repr(val) + comment + "\n"
+ return 'our $' + name + ' = ' + repr(val) + ';' + comment + "\n"
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description="Output culled rsync options for rrsync.", add_help=False)
+ out_group = parser.add_mutually_exclusive_group()
+ out_group.add_argument('--perl', action='store_true', help="Output perl code.")
+ out_group.add_argument('--python', action='store_true', help="Output python code (the default).")
+ parser.add_argument('--help', '-h', action='help', help="Output this help message and exit.")
+ args = parser.parse_args()
+ if not args.perl:
+ args.python = True
+ main()
+
+# vim: sw=4 et
diff --git a/packaging/lsb/rsync.spec b/packaging/lsb/rsync.spec
new file mode 100644
index 0000000..f2d7aa4
--- /dev/null
+++ b/packaging/lsb/rsync.spec
@@ -0,0 +1,87 @@
+Summary: A fast, versatile, remote (and local) file-copying tool
+Name: rsync
+Version: 3.2.7
+%define fullversion %{version}
+Release: 1
+%define srcdir src
+Group: Applications/Internet
+License: GPL
+Source0: https://rsync.samba.org/ftp/rsync/%{srcdir}/rsync-%{fullversion}.tar.gz
+#Source1: https://rsync.samba.org/ftp/rsync/%{srcdir}/rsync-patches-%{fullversion}.tar.gz
+URL: https://rsync.samba.org/
+
+Prefix: %{_prefix}
+BuildRoot: /var/tmp/%{name}-root
+
+%package ssl-daemon
+Summary: An stunnel config file to support ssl rsync daemon connections.
+Group: Applications/Internet
+Requires: rsync, stunnel >= 4
+
+%description
+Rsync is a fast and extraordinarily versatile file copying tool. It can
+copy locally, to/from another host over any remote shell, or to/from a
+remote rsync daemon. It offers a large number of options that control
+every aspect of its behavior and permit very flexible specification of the
+set of files to be copied. It is famous for its delta-transfer algorithm,
+which reduces the amount of data sent over the network by sending only the
+differences between the source files and the existing files in the
+destination. Rsync is widely used for backups and mirroring and as an
+improved copy command for everyday use.
+
+%description ssl-daemon
+Provides a config file for stunnel that will (if you start your stunnel
+service) cause stunnel to listen for ssl rsync-daemon connections and run
+"rsync --daemon" to handle them.
+
+%prep
+# Choose one -- setup source only, or setup source + rsync-patches:
+%setup -q -n rsync-%{fullversion}
+#%setup -q -b1 -n rsync-%{fullversion}
+
+# If you you used "%setup -q -b1 ...", choose the patches you wish to apply:
+#patch -p1 <patches/acls.diff
+#patch -p1 <patches/xattrs.diff
+#patch -p1 <patches/remote-option.diff
+#patch -p1 <patches/db.diff
+
+# Avoid extra perl dependencies for scripts going into doc dir.
+chmod -x support/*
+
+%build
+#./prepare-source
+%configure
+
+make
+
+%install
+rm -rf $RPM_BUILD_ROOT
+make install install-ssl-daemon DESTDIR=$RPM_BUILD_ROOT
+
+mkdir -p $RPM_BUILD_ROOT/etc/xinetd.d $RPM_BUILD_ROOT/etc/rsync-ssl/certs
+install -m 644 packaging/lsb/rsync.xinetd $RPM_BUILD_ROOT/etc/xinetd.d/rsync
+
+%clean
+rm -rf $RPM_BUILD_ROOT
+
+%files
+%defattr(-,root,root)
+%doc COPYING NEWS.md README.md support/ tech_report.tex
+%config(noreplace) /etc/xinetd.d/rsync
+%{_prefix}/bin/rsync
+%{_prefix}/bin/rsync-ssl
+%{_mandir}/man1/rsync.1*
+%{_mandir}/man1/rsync-ssl.1*
+%{_mandir}/man5/rsyncd.conf.5*
+
+%files ssl-daemon
+%config(noreplace) /etc/stunnel/rsyncd.conf
+%dir /etc/rsync-ssl/certs
+
+%changelog
+* Thu Oct 20 2022 Wayne Davison <wayne@opencoder.net>
+Released 3.2.7.
+
+* Fri Mar 21 2008 Wayne Davison <wayne@opencoder.net>
+Added installation of /etc/xinetd.d/rsync file and some commented-out
+lines that demonstrate how to use the rsync-patches tar file.
diff --git a/packaging/lsb/rsync.xinetd b/packaging/lsb/rsync.xinetd
new file mode 100644
index 0000000..63fc11c
--- /dev/null
+++ b/packaging/lsb/rsync.xinetd
@@ -0,0 +1,13 @@
+# default: off
+# description: The rsync server is a good addition to an ftp server, as it
+# allows crc checksumming etc.
+service rsync
+{
+ disable = yes
+ socket_type = stream
+ wait = no
+ user = root
+ server = /usr/bin/rsync
+ server_args = --daemon
+ log_on_failure += USERID
+}
diff --git a/packaging/openssl-rsync.cnf b/packaging/openssl-rsync.cnf
new file mode 100644
index 0000000..7432285
--- /dev/null
+++ b/packaging/openssl-rsync.cnf
@@ -0,0 +1,18 @@
+# This config file can be used with rsync to enable legacy digests
+# (such as MD4) by using the OPENSSL_CONF environment variable.
+# See rsync's configure --with-openssl-conf=/path/name option.
+
+openssl_conf = openssl_init
+
+[openssl_init]
+providers = provider_sect
+
+[provider_sect]
+default = default_sect
+legacy = legacy_sect
+
+[default_sect]
+activate = 1
+
+[legacy_sect]
+activate = 1
diff --git a/packaging/patch-update b/packaging/patch-update
new file mode 100755
index 0000000..fd56a9d
--- /dev/null
+++ b/packaging/patch-update
@@ -0,0 +1,244 @@
+#!/usr/bin/env -S python3 -B
+
+# This script is used to turn one or more of the "patch/BASE/*" branches
+# into one or more diffs in the "patches" directory. Pass the option
+# --gen if you want generated files in the diffs. Pass the name of
+# one or more diffs if you want to just update a subset of all the
+# diffs.
+
+import os, sys, re, argparse, time, shutil
+
+sys.path = ['packaging'] + sys.path
+
+from pkglib import *
+
+MAKE_GEN_CMDS = [
+ './prepare-source'.split(),
+ 'cd build && if test -f config.status ; then ./config.status ; else ../configure ; fi',
+ 'make -C build gen'.split(),
+ ]
+TMP_DIR = "patches.gen"
+
+os.environ['GIT_MERGE_AUTOEDIT'] = 'no'
+
+def main():
+ global master_commit, parent_patch, description, completed, last_touch
+
+ if not os.path.isdir(args.patches_dir):
+ die(f'No "{args.patches_dir}" directory was found.')
+ if not os.path.isdir('.git'):
+ die('No ".git" directory present in the current dir.')
+
+ starting_branch, args.base_branch = check_git_state(args.base_branch, not args.skip_check, args.patches_dir)
+
+ master_commit = latest_git_hash(args.base_branch)
+
+ if cmd_txt_chk(['packaging/prep-auto-dir']).out == '':
+ die('You must setup an auto-build-save dir to use this script.')
+
+ if args.gen:
+ if os.path.lexists(TMP_DIR):
+ die(f'"{TMP_DIR}" must not exist in the current directory.')
+ gen_files = get_gen_files()
+ os.mkdir(TMP_DIR, 0o700)
+ for cmd in MAKE_GEN_CMDS:
+ cmd_chk(cmd)
+ cmd_chk(['rsync', '-a', *gen_files, f'{TMP_DIR}/master/'])
+
+ last_touch = int(time.time())
+
+ # Start by finding all patches so that we can load all possible parents.
+ patches = sorted(list(get_patch_branches(args.base_branch)))
+
+ parent_patch = { }
+ description = { }
+
+ for patch in patches:
+ branch = f"patch/{args.base_branch}/{patch}"
+ desc = ''
+ proc = cmd_pipe(['git', 'diff', '-U1000', f"{args.base_branch}...{branch}", '--', f"PATCH.{patch}"])
+ in_diff = False
+ for line in proc.stdout:
+ if in_diff:
+ if not re.match(r'^[ +]', line):
+ continue
+ line = line[1:]
+ m = re.search(r'patch -p1 <patches/(\S+)\.diff', line)
+ if m and m[1] != patch:
+ parpat = parent_patch[patch] = m[1]
+ if not parpat in patches:
+ die(f"Parent of {patch} is not a local branch: {parpat}")
+ desc += line
+ elif re.match(r'^@@ ', line):
+ in_diff = True
+ description[patch] = desc
+ proc.communicate()
+
+ if args.patch_files: # Limit the list of patches to actually process
+ valid_patches = patches
+ patches = [ ]
+ for fn in args.patch_files:
+ name = re.sub(r'\.diff$', '', re.sub(r'.+/', '', fn))
+ if name not in valid_patches:
+ die(f"Local branch not available for patch: {name}")
+ patches.append(name)
+
+ completed = set()
+
+ for patch in patches:
+ if patch in completed:
+ continue
+ if not update_patch(patch):
+ break
+
+ if args.gen:
+ shutil.rmtree(TMP_DIR)
+
+ while last_touch >= int(time.time()):
+ time.sleep(1)
+ cmd_chk(['git', 'checkout', starting_branch])
+ cmd_chk(['packaging/prep-auto-dir'], discard='output')
+
+
+def update_patch(patch):
+ global last_touch
+
+ completed.add(patch) # Mark it as completed early to short-circuit any (bogus) dependency loops.
+
+ parent = parent_patch.get(patch, None)
+ if parent:
+ if parent not in completed:
+ if not update_patch(parent):
+ return 0
+ based_on = parent = f"patch/{args.base_branch}/{parent}"
+ else:
+ parent = args.base_branch
+ based_on = master_commit
+
+ print(f"======== {patch} ========")
+
+ while args.gen and last_touch >= int(time.time()):
+ time.sleep(1)
+
+ branch = f"patch/{args.base_branch}/{patch}"
+ s = cmd_run(['git', 'checkout', branch])
+ if s.returncode != 0:
+ return 0
+
+ s = cmd_run(['git', 'merge', based_on])
+ ok = s.returncode == 0
+ skip_shell = False
+ if not ok or args.cmd or args.make or args.shell:
+ cmd_chk(['packaging/prep-auto-dir'], discard='output')
+ if not ok:
+ print(f'"git merge {based_on}" incomplete -- please fix.')
+ if not run_a_shell(parent, patch):
+ return 0
+ if not args.make and not args.cmd:
+ skip_shell = True
+ if args.make:
+ if cmd_run(['packaging/smart-make']).returncode != 0:
+ if not run_a_shell(parent, patch):
+ return 0
+ if not args.cmd:
+ skip_shell = True
+ if args.cmd:
+ if cmd_run(args.cmd).returncode != 0:
+ if not run_a_shell(parent, patch):
+ return 0
+ skip_shell = True
+ if args.shell and not skip_shell:
+ if not run_a_shell(parent, patch):
+ return 0
+
+ with open(f"{args.patches_dir}/{patch}.diff", 'w', encoding='utf-8') as fh:
+ fh.write(description[patch])
+ fh.write(f"\nbased-on: {based_on}\n")
+
+ if args.gen:
+ gen_files = get_gen_files()
+ for cmd in MAKE_GEN_CMDS:
+ cmd_chk(cmd)
+ cmd_chk(['rsync', '-a', *gen_files, f"{TMP_DIR}/{patch}/"])
+ else:
+ gen_files = [ ]
+ last_touch = int(time.time())
+
+ proc = cmd_pipe(['git', 'diff', based_on])
+ skipping = False
+ for line in proc.stdout:
+ if skipping:
+ if not re.match(r'^diff --git a/', line):
+ continue
+ skipping = False
+ elif re.match(r'^diff --git a/PATCH', line):
+ skipping = True
+ continue
+ if not re.match(r'^index ', line):
+ fh.write(line)
+ proc.communicate()
+
+ if args.gen:
+ e_tmp_dir = re.escape(TMP_DIR)
+ diff_re = re.compile(r'^(diff -Nurp) %s/[^/]+/(.*?) %s/[^/]+/(.*)' % (e_tmp_dir, e_tmp_dir))
+ minus_re = re.compile(r'^\-\-\- %s/[^/]+/([^\t]+)\t.*' % e_tmp_dir)
+ plus_re = re.compile(r'^\+\+\+ %s/[^/]+/([^\t]+)\t.*' % e_tmp_dir)
+
+ if parent == args.base_branch:
+ parent_dir = 'master'
+ else:
+ m = re.search(r'([^/]+)$', parent)
+ parent_dir = m[1]
+
+ proc = cmd_pipe(['diff', '-Nurp', f"{TMP_DIR}/{parent_dir}", f"{TMP_DIR}/{patch}"])
+ for line in proc.stdout:
+ line = diff_re.sub(r'\1 a/\2 b/\3', line)
+ line = minus_re.sub(r'--- a/\1', line)
+ line = plus_re.sub(r'+++ b/\1', line)
+ fh.write(line)
+ proc.communicate()
+
+ return 1
+
+
+def run_a_shell(parent, patch):
+ m = re.search(r'([^/]+)$', parent)
+ parent_dir = m[1]
+ os.environ['PS1'] = f"[{parent_dir}] {patch}: "
+
+ while True:
+ s = cmd_run([os.environ.get('SHELL', '/bin/sh')])
+ if s.returncode != 0:
+ ans = input("Abort? [n/y] ")
+ if re.match(r'^y', ans, flags=re.I):
+ return False
+ continue
+ cur_branch, is_clean, status_txt = check_git_status(0)
+ if is_clean:
+ break
+ print(status_txt, end='')
+
+ cmd_run('rm -f build/*.o build/*/*.o')
+
+ return True
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description="Turn a git branch back into a diff files in the patches dir.", add_help=False)
+ parser.add_argument('--branch', '-b', dest='base_branch', metavar='BASE_BRANCH', default='master', help="The branch the patch is based on. Default: master.")
+ parser.add_argument('--skip-check', action='store_true', help="Skip the check that ensures starting with a clean branch.")
+ parser.add_argument('--make', '-m', action='store_true', help="Run the smart-make script in every patch branch.")
+ parser.add_argument('--cmd', '-c', help="Run a command in every patch branch.")
+ parser.add_argument('--shell', '-s', action='store_true', help="Launch a shell for every patch/BASE/* branch updated, not just when a conflict occurs.")
+ parser.add_argument('--gen', metavar='DIR', nargs='?', const='', help='Include generated files. Optional DIR value overrides the default of using the "patches" dir.')
+ parser.add_argument('--patches-dir', '-p', metavar='DIR', default='patches', help="Override the location of the rsync-patches dir. Default: patches.")
+ parser.add_argument('patch_files', metavar='patches/DIFF_FILE', nargs='*', help="Specify what patch diff files to process. Default: all of them.")
+ parser.add_argument("--help", "-h", action="help", help="Output this help message and exit.")
+ args = parser.parse_args()
+ if args.gen == '':
+ args.gen = args.patches_dir
+ elif args.gen is not None:
+ args.patches_dir = args.gen
+ main()
+
+# vim: sw=4 et ft=python
diff --git a/packaging/pkglib.py b/packaging/pkglib.py
new file mode 100644
index 0000000..c4c5741
--- /dev/null
+++ b/packaging/pkglib.py
@@ -0,0 +1,266 @@
+import os, sys, re, subprocess, argparse
+
+# This python3 library provides a few helpful routines that are
+# used by the latest packaging scripts.
+
+default_encoding = 'utf-8'
+
+# Output the msg args to stderr. Accepts all the args that print() accepts.
+def warn(*msg):
+ print(*msg, file=sys.stderr)
+
+
+# Output the msg args to stderr and die with a non-zero return-code.
+# Accepts all the args that print() accepts.
+def die(*msg):
+ warn(*msg)
+ sys.exit(1)
+
+
+# Set this to an encoding name or set it to None to avoid the default encoding idiom.
+def set_default_encoding(enc):
+ default_encoding = enc
+
+
+# Set shell=True if the cmd is a string; sets a default encoding unless raw=True was specified.
+def _tweak_opts(cmd, opts, **maybe_set_args):
+ def _maybe_set(o, **msa): # Only set a value if the user didn't already set it.
+ for var, val in msa.items():
+ if var not in o:
+ o[var] = val
+
+ opts = opts.copy()
+ _maybe_set(opts, **maybe_set_args)
+
+ if isinstance(cmd, str):
+ _maybe_set(opts, shell=True)
+
+ want_raw = opts.pop('raw', False)
+ if default_encoding and not want_raw:
+ _maybe_set(opts, encoding=default_encoding)
+
+ capture = opts.pop('capture', None)
+ if capture:
+ if capture == 'stdout':
+ _maybe_set(opts, stdout=subprocess.PIPE)
+ elif capture == 'stderr':
+ _maybe_set(opts, stderr=subprocess.PIPE)
+ elif capture == 'output':
+ _maybe_set(opts, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ elif capture == 'combined':
+ _maybe_set(opts, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+
+ discard = opts.pop('discard', None)
+ if discard:
+ # We DO want to override any already set stdout|stderr values (unlike above).
+ if discard == 'stdout' or discard == 'output':
+ opts['stdout'] = subprocess.DEVNULL
+ if discard == 'stderr' or discard == 'output':
+ opts['stderr'] = subprocess.DEVNULL
+
+ return opts
+
+
+# This does a normal subprocess.run() with some auto-args added to make life easier.
+def cmd_run(cmd, **opts):
+ return subprocess.run(cmd, **_tweak_opts(cmd, opts))
+
+
+# Like cmd_run() with a default check=True specified.
+def cmd_chk(cmd, **opts):
+ return subprocess.run(cmd, **_tweak_opts(cmd, opts, check=True))
+
+
+# Capture stdout in a string and return an object with out, err, and rc (return code).
+# It defaults to capture='stdout' (so err is empty) but can be overridden using
+# capture='combined' or capture='output' (the latter populates the err value).
+def cmd_txt(cmd, **opts):
+ input = opts.pop('input', None)
+ if input is not None:
+ opts['stdin'] = subprocess.PIPE
+ proc = subprocess.Popen(cmd, **_tweak_opts(cmd, opts, capture='stdout'))
+ out, err = proc.communicate(input=input)
+ return argparse.Namespace(out=out, err=err, rc=proc.returncode)
+
+
+# Just like calling cmd_txt() except that it raises an error if the command has a non-0 return code.
+# The raised error includes the cmd, the return code, and the captured output.
+def cmd_txt_chk(cmd, **opts):
+ ct = cmd_txt(cmd, **opts)
+ if ct.rc != 0:
+ cmd_err = f'Command "{cmd}" returned non-0 exit status "{ct.rc}" and output:\n{ct.out}{ct.err}'
+ raise Exception(cmd_err)
+ return ct
+
+
+# Starts a piped-output command of stdout (by default) and leaves it up to you to read
+# the output and call communicate() on the returned object.
+def cmd_pipe(cmd, **opts):
+ return subprocess.Popen(cmd, **_tweak_opts(cmd, opts, capture='stdout'))
+
+
+# Runs a "git status" command and dies if the checkout is not clean (the
+# arg fatal_unless_clean can be used to make that non-fatal. Returns a
+# tuple of the current branch, the is_clean flag, and the status text.
+def check_git_status(fatal_unless_clean=True, subdir='.'):
+ status_txt = cmd_txt_chk(f"cd '{subdir}' && git status").out
+ is_clean = re.search(r'\nnothing to commit.+working (directory|tree) clean', status_txt) != None
+
+ if not is_clean and fatal_unless_clean:
+ if subdir == '.':
+ subdir = ''
+ else:
+ subdir = f" *{subdir}*"
+ die(f"The{subdir} checkout is not clean:\n" + status_txt)
+
+ m = re.match(r'^(?:# )?On branch (.+)\n', status_txt)
+ cur_branch = m[1] if m else None
+
+ return (cur_branch, is_clean, status_txt)
+
+
+# Calls check_git_status() on the current git checkout and (optionally) a subdir path's
+# checkout. Use fatal_unless_clean to indicate if an unclean checkout is fatal or not.
+# The master_branch arg indicates what branch we want both checkouts to be using, and
+# if the branch is wrong the user is given the option of either switching to the right
+# branch or aborting.
+def check_git_state(master_branch, fatal_unless_clean=True, check_extra_dir=None):
+ cur_branch = check_git_status(fatal_unless_clean)[0]
+ branch = re.sub(r'^patch/([^/]+)/[^/]+$', r'\1', cur_branch) # change patch/BRANCH/PATCH_NAME into BRANCH
+ if branch != master_branch:
+ print(f"The checkout is not on the {master_branch} branch.")
+ if master_branch != 'master':
+ sys.exit(1)
+ ans = input(f"Do you want me to continue with --branch={branch}? [n] ")
+ if not ans or not re.match(r'^y', ans, flags=re.I):
+ sys.exit(1)
+ master_branch = branch
+
+ if check_extra_dir and os.path.isdir(os.path.join(check_extra_dir, '.git')):
+ branch = check_git_status(fatal_unless_clean, check_extra_dir)[0]
+ if branch != master_branch:
+ print(f"The *{check_extra_dir}* checkout is on branch {branch}, not branch {master_branch}.")
+ ans = input(f"Do you want to change it to branch {master_branch}? [n] ")
+ if not ans or not re.match(r'^y', ans, flags=re.I):
+ sys.exit(1)
+ subdir.check_call(f"cd {check_extra_dir} && git checkout '{master_branch}'", shell=True)
+
+ return (cur_branch, master_branch)
+
+
+# Return the git hash of the most recent commit.
+def latest_git_hash(branch):
+ out = cmd_txt_chk(['git', 'log', '-1', '--no-color', branch]).out
+ m = re.search(r'^commit (\S+)', out, flags=re.M)
+ if not m:
+ die(f"Unable to determine commit hash for master branch: {branch}")
+ return m[1]
+
+
+# Return a set of all branch names that have the format "patch/BASE_BRANCH/NAME"
+# for the given base_branch string. Just the NAME portion is put into the set.
+def get_patch_branches(base_branch):
+ branches = set()
+ proc = cmd_pipe('git branch -l'.split())
+ for line in proc.stdout:
+ m = re.search(r' patch/([^/]+)/(.+)', line)
+ if m and m[1] == base_branch:
+ branches.add(m[2])
+ proc.communicate()
+ return branches
+
+
+def mandate_gensend_hook():
+ hook = '.git/hooks/pre-push'
+ if not os.path.exists(hook):
+ print('Creating hook file:', hook)
+ cmd_chk(['./rsync', '-a', 'packaging/pre-push', hook])
+ else:
+ ct = cmd_txt(['grep', 'make gensend', hook], discard='output')
+ if ct.rc:
+ die('Please add a "make gensend" into your', hook, 'script.')
+
+
+# Snag the GENFILES values out of the Makefile file and return them as a list.
+def get_gen_files(want_dir_plus_list=False):
+ cont_re = re.compile(r'\\\n')
+
+ gen_files = [ ]
+
+ auto_dir = os.path.join('auto-build-save', cmd_txt('git rev-parse --abbrev-ref HEAD').out.strip().replace('/', '%'))
+
+ with open(auto_dir + '/Makefile', 'r', encoding='utf-8') as fh:
+ for line in fh:
+ if not gen_files:
+ chk = re.sub(r'^GENFILES=', '', line)
+ if line == chk:
+ continue
+ line = chk
+ m = re.search(r'\\$', line)
+ line = re.sub(r'^\s+|\s*\\\n?$|\s+$', '', line)
+ gen_files += line.split()
+ if not m:
+ break
+
+ if want_dir_plus_list:
+ return (auto_dir, gen_files)
+
+ return [ os.path.join(auto_dir, fn) for fn in gen_files ]
+
+
+def get_rsync_version():
+ with open('version.h', 'r', encoding='utf-8') as fh:
+ txt = fh.read()
+ m = re.match(r'^#define\s+RSYNC_VERSION\s+"(\d.+?)"', txt)
+ if m:
+ return m[1]
+ die("Unable to find RSYNC_VERSION define in version.h")
+
+
+def get_NEWS_version_info():
+ rel_re = re.compile(r'^\| \S{2} \w{3} \d{4}\s+\|\s+(?P<ver>\d+\.\d+\.\d+)\s+\|\s+(?P<pdate>\d{2} \w{3} \d{4})?\s+\|\s+(?P<pver>\d+)\s+\|')
+ last_version = last_protocol_version = None
+ pdate = { }
+
+ with open('NEWS.md', 'r', encoding='utf-8') as fh:
+ for line in fh:
+ if not last_version: # Find the first non-dev|pre version with a release date.
+ m = re.search(r'rsync (\d+\.\d+\.\d+) .*\d\d\d\d', line)
+ if m:
+ last_version = m[1]
+ m = rel_re.match(line)
+ if m:
+ if m['pdate']:
+ pdate[m['ver']] = m['pdate']
+ if m['ver'] == last_version:
+ last_protocol_version = m['pver']
+
+ if not last_protocol_version:
+ die(f"Unable to determine protocol_version for {last_version}.")
+
+ return last_version, last_protocol_version, pdate
+
+
+def get_protocol_versions():
+ protocol_version = subprotocol_version = None
+
+ with open('rsync.h', 'r', encoding='utf-8') as fh:
+ for line in fh:
+ m = re.match(r'^#define\s+PROTOCOL_VERSION\s+(\d+)', line)
+ if m:
+ protocol_version = m[1]
+ continue
+ m = re.match(r'^#define\s+SUBPROTOCOL_VERSION\s+(\d+)', line)
+ if m:
+ subprotocol_version = m[1]
+ break
+
+ if not protocol_version:
+ die("Unable to determine the current PROTOCOL_VERSION.")
+
+ if not subprotocol_version:
+ die("Unable to determine the current SUBPROTOCOL_VERSION.")
+
+ return protocol_version, subprotocol_version
+
+# vim: sw=4 et
diff --git a/packaging/pre-push b/packaging/pre-push
new file mode 100755
index 0000000..8a71369
--- /dev/null
+++ b/packaging/pre-push
@@ -0,0 +1,16 @@
+#!/bin/bash -e
+
+cat >/dev/null # Just discard stdin data
+
+if [[ -f /proc/$PPID/cmdline ]]; then
+ while read -d $'\0' arg ; do
+ if [[ "$arg" == '--tags' ]] ; then
+ exit 0
+ fi
+ done </proc/$PPID/cmdline
+fi
+
+branch=`git rev-parse --abbrev-ref HEAD`
+if [[ "$branch" = master && "$*" == *github* ]]; then
+ make gensend
+fi
diff --git a/packaging/prep-auto-dir b/packaging/prep-auto-dir
new file mode 100755
index 0000000..b67f390
--- /dev/null
+++ b/packaging/prep-auto-dir
@@ -0,0 +1,43 @@
+#!/bin/sh -e
+
+# This script will setup the build dir based on the current git branch and the
+# directory auto-build-save/$BRANCH. We don't use a symlink for the build dir
+# because we want to maximize the ccache reuse, so all builds must happen in
+# the same real dir. When a dir is moved out of auto-build-save/$BRANCH to the
+# build dir, it is replaced with a symlink so that it can still be found under
+# that dir. The build dir also gets a .branch -> $BRANCH symlink so that we
+# can figure out the current build dir's branch.
+
+# To get started, just clone the rsync git repo and create the auto-build-save
+# dir. If you have an existing git checkout and it is not in a pristine state,
+# run "make distclean" before creating the auto-build-save dir.
+
+auto_top='auto-build-save'
+if test -d $auto_top && test -d .git; then
+ desired_branch=`git rev-parse --abbrev-ref HEAD | tr / %`
+ if test "$desired_branch" = HEAD; then
+ echo "ERROR: switch to the right build dir manually when in detached HEAD mode." 1>&2
+ exit 1
+ fi
+ auto_dir="$auto_top/$desired_branch"
+ if test -d build; then
+ cur_branch=`readlink build/.branch`
+ else
+ cur_branch='/'
+ fi
+ if test "$desired_branch" != "$cur_branch"; then
+ if test "$cur_branch" != /; then
+ rm -f "$auto_top/$cur_branch"
+ mv build "$auto_top/$cur_branch"
+ fi
+ test -d "$auto_dir" || mkdir "$auto_dir"
+ test -h "$auto_dir/.branch" || ln -s "$desired_branch" "$auto_dir/.branch"
+ mv "$auto_dir" build
+ ln -s ../build "$auto_dir"
+ fi
+ if test ! -h Makefile; then
+ rm -f Makefile
+ ln -s packaging/auto-Makefile Makefile
+ fi
+ echo $desired_branch
+fi
diff --git a/packaging/release-rsync b/packaging/release-rsync
new file mode 100755
index 0000000..511e90f
--- /dev/null
+++ b/packaging/release-rsync
@@ -0,0 +1,399 @@
+#!/usr/bin/env -S python3 -B
+
+# This script expects the directory ~/samba-rsync-ftp to exist and to be a
+# copy of the /home/ftp/pub/rsync dir on samba.org. When the script is done,
+# the git repository in the current directory will be updated, and the local
+# ~/samba-rsync-ftp dir will be ready to be rsynced to samba.org.
+
+import os, sys, re, argparse, glob, shutil, signal
+from datetime import datetime
+from getpass import getpass
+
+sys.path = ['packaging'] + sys.path
+
+from pkglib import *
+
+os.environ['LESS'] = 'mqeiXR'; # Make sure that -F is turned off and -R is turned on.
+dest = os.environ['HOME'] + '/samba-rsync-ftp'
+ORIGINAL_PATH = os.environ['PATH']
+
+def main():
+ if not os.path.isfile('packaging/release-rsync'):
+ die('You must run this script from the top of your rsync checkout.')
+
+ now = datetime.now()
+ cl_today = now.strftime('* %a %b %d %Y')
+ year = now.strftime('%Y')
+ ztoday = now.strftime('%d %b %Y')
+ today = ztoday.lstrip('0')
+
+ mandate_gensend_hook()
+
+ curdir = os.getcwd()
+
+ signal.signal(signal.SIGINT, signal_handler)
+
+ if cmd_txt_chk(['packaging/prep-auto-dir']).out == '':
+ die('You must setup an auto-build-save dir to use this script.');
+
+ auto_dir, gen_files = get_gen_files(True)
+ gen_pathnames = [ os.path.join(auto_dir, fn) for fn in gen_files ]
+
+ dash_line = '=' * 74
+
+ print(f"""\
+{dash_line}
+== This will release a new version of rsync onto an unsuspecting world. ==
+{dash_line}
+""")
+
+ with open('build/rsync.1') as fh:
+ for line in fh:
+ if line.startswith(r'.\" prefix='):
+ doc_prefix = line.split('=')[1].strip()
+ if doc_prefix != '/usr':
+ warn(f"*** The documentation was built with prefix {doc_prefix} instead of /usr ***")
+ die("*** Read the md2man script for a way to override this. ***")
+ break
+ if line.startswith('.P'):
+ die("Failed to find the prefix comment at the start of the rsync.1 manpage.")
+
+ if not os.path.isdir(dest):
+ die(dest, "dest does not exist")
+ if not os.path.isdir('.git'):
+ die("There is no .git dir in the current directory.")
+ if os.path.lexists('a'):
+ die('"a" must not exist in the current directory.')
+ if os.path.lexists('b'):
+ die('"b" must not exist in the current directory.')
+ if os.path.lexists('patches.gen'):
+ die('"patches.gen" must not exist in the current directory.')
+
+ check_git_state(args.master_branch, True, 'patches')
+
+ curversion = get_rsync_version()
+
+ # All version values are strings!
+ lastversion, last_protocol_version, pdate = get_NEWS_version_info()
+ protocol_version, subprotocol_version = get_protocol_versions()
+
+ version = curversion
+ m = re.search(r'pre(\d+)', version)
+ if m:
+ version = re.sub(r'pre\d+', 'pre' + str(int(m[1]) + 1), version)
+ else:
+ version = version.replace('dev', 'pre1')
+
+ ans = input(f"Please enter the version number of this release: [{version}] ")
+ if ans == '.':
+ version = re.sub(r'pre\d+', '', version)
+ elif ans != '':
+ version = ans
+ if not re.match(r'^[\d.]+(pre\d+)?$', version):
+ die(f'Invalid version: "{version}"')
+
+ v_ver = 'v' + version
+ rsync_ver = 'rsync-' + version
+
+ if os.path.lexists(rsync_ver):
+ die(f'"{rsync_ver}" must not exist in the current directory.')
+
+ out = cmd_txt_chk(['git', 'tag', '-l', v_ver]).out
+ if out != '':
+ print(f"Tag {v_ver} already exists.")
+ ans = input("\nDelete tag or quit? [Q/del] ")
+ if not re.match(r'^del', ans, flags=re.I):
+ die("Aborted")
+ cmd_chk(['git', 'tag', '-d', v_ver])
+ if os.path.isdir('patches/.git'):
+ cmd_chk(f"cd patches && git tag -d '{v_ver}'")
+
+ version = re.sub(r'[-.]*pre[-.]*', 'pre', version)
+ if 'pre' in version and not curversion.endswith('dev'):
+ lastversion = curversion
+
+ ans = input(f"Enter the previous version to produce a patch against: [{lastversion}] ")
+ if ans != '':
+ lastversion = ans
+ lastversion = re.sub(r'[-.]*pre[-.]*', 'pre', lastversion)
+
+ rsync_lastver = 'rsync-' + lastversion
+ if os.path.lexists(rsync_lastver):
+ die(f'"{rsync_lastver}" must not exist in the current directory.')
+
+ m = re.search(r'(pre\d+)', version)
+ pre = m[1] if m else ''
+
+ release = '0.1' if pre else '1'
+ ans = input(f"Please enter the RPM release number of this release: [{release}] ")
+ if ans != '':
+ release = ans
+ if pre:
+ release += '.' + pre
+
+ finalversion = re.sub(r'pre\d+', '', version)
+ proto_changed = protocol_version != last_protocol_version
+ if proto_changed:
+ if finalversion in pdate:
+ proto_change_date = pdate[finalversion]
+ else:
+ while True:
+ ans = input("On what date did the protocol change to {protocol_version} get checked in? (dd Mmm yyyy) ")
+ if re.match(r'^\d\d \w\w\w \d\d\d\d$', ans):
+ break
+ proto_change_date = ans
+ else:
+ proto_change_date = ' ' * 11
+
+ if 'pre' in lastversion:
+ if not pre:
+ die("You should not diff a release version against a pre-release version.")
+ srcdir = srcdiffdir = lastsrcdir = 'src-previews'
+ skipping = ' ** SKIPPING **'
+ elif pre:
+ srcdir = srcdiffdir = 'src-previews'
+ lastsrcdir = 'src'
+ skipping = ' ** SKIPPING **'
+ else:
+ srcdir = lastsrcdir = 'src'
+ srcdiffdir = 'src-diffs'
+ skipping = ''
+
+ print(f"""
+{dash_line}
+version is "{version}"
+lastversion is "{lastversion}"
+dest is "{dest}"
+curdir is "{curdir}"
+srcdir is "{srcdir}"
+srcdiffdir is "{srcdiffdir}"
+lastsrcdir is "{lastsrcdir}"
+release is "{release}"
+
+About to:
+ - tweak SUBPROTOCOL_VERSION in rsync.h, if needed
+ - tweak the version in version.h and the spec files
+ - tweak NEWS.md to ensure header values are correct
+ - generate configure.sh, config.h.in, and proto.h
+ - page through the differences
+""")
+ ans = input("<Press Enter to continue> ")
+
+ specvars = {
+ 'Version:': finalversion,
+ 'Release:': release,
+ '%define fullversion': f'%{{version}}{pre}',
+ 'Released': version + '.',
+ '%define srcdir': srcdir,
+ }
+
+ tweak_files = 'version.h rsync.h NEWS.md'.split()
+ tweak_files += glob.glob('packaging/*.spec')
+ tweak_files += glob.glob('packaging/*/*.spec')
+
+ for fn in tweak_files:
+ with open(fn, 'r', encoding='utf-8') as fh:
+ old_txt = txt = fh.read()
+ if fn == 'version.h':
+ x_re = re.compile(r'^(#define RSYNC_VERSION).*', re.M)
+ msg = f"Unable to update RSYNC_VERSION in {fn}"
+ txt = replace_or_die(x_re, r'\1 "%s"' % version, txt, msg)
+ elif '.spec' in fn:
+ for var, val in specvars.items():
+ x_re = re.compile(r'^%s .*' % re.escape(var), re.M)
+ txt = replace_or_die(x_re, var + ' ' + val, txt, f"Unable to update {var} in {fn}")
+ x_re = re.compile(r'^\* \w\w\w \w\w\w \d\d \d\d\d\d (.*)', re.M)
+ txt = replace_or_die(x_re, r'%s \1' % cl_today, txt, f"Unable to update ChangeLog header in {fn}")
+ elif fn == 'rsync.h':
+ x_re = re.compile('(#define\s+SUBPROTOCOL_VERSION)\s+(\d+)')
+ repl = lambda m: m[1] + ' ' + ('0' if not pre or not proto_changed else '1' if m[2] == '0' else m[2])
+ txt = replace_or_die(x_re, repl, txt, f"Unable to find SUBPROTOCOL_VERSION define in {fn}")
+ elif fn == 'NEWS.md':
+ efv = re.escape(finalversion)
+ x_re = re.compile(r'^# NEWS for rsync %s \(UNRELEASED\)\s+## Changes in this version:\n' % efv
+ + r'(\n### PROTOCOL NUMBER:\s+- The protocol number was changed to \d+\.\n)?')
+ rel_day = 'UNRELEASED' if pre else today
+ repl = (f'# NEWS for rsync {finalversion} ({rel_day})\n\n'
+ + '## Changes in this version:\n')
+ if proto_changed:
+ repl += f'\n### PROTOCOL NUMBER:\n\n - The protocol number was changed to {protocol_version}.\n'
+ good_top = re.sub(r'\(.*?\)', '(UNRELEASED)', repl, 1)
+ msg = f"The top lines of {fn} are not in the right format. It should be:\n" + good_top
+ txt = replace_or_die(x_re, repl, txt, msg)
+ x_re = re.compile(r'^(\| )(\S{2} \S{3} \d{4})(\s+\|\s+%s\s+\| ).{11}(\s+\| )\S{2}(\s+\|+)$' % efv, re.M)
+ repl = lambda m: m[1] + (m[2] if pre else ztoday) + m[3] + proto_change_date + m[4] + protocol_version + m[5]
+ txt = replace_or_die(x_re, repl, txt, f'Unable to find "| ?? ??? {year} | {finalversion} | ... |" line in {fn}')
+ else:
+ die(f"Unrecognized file in tweak_files: {fn}")
+
+ if txt != old_txt:
+ print(f"Updating {fn}")
+ with open(fn, 'w', encoding='utf-8') as fh:
+ fh.write(txt)
+
+ cmd_chk(['packaging/year-tweak'])
+
+ print(dash_line)
+ cmd_run("git diff".split())
+
+ srctar_name = f"{rsync_ver}.tar.gz"
+ pattar_name = f"rsync-patches-{version}.tar.gz"
+ diff_name = f"{rsync_lastver}-{version}.diffs.gz"
+ srctar_file = os.path.join(dest, srcdir, srctar_name)
+ pattar_file = os.path.join(dest, srcdir, pattar_name)
+ diff_file = os.path.join(dest, srcdiffdir, diff_name)
+ lasttar_file = os.path.join(dest, lastsrcdir, rsync_lastver + '.tar.gz')
+
+ print(f"""\
+{dash_line}
+
+About to:
+ - git commit all changes
+ - run a full build, ensuring that the manpages & configure.sh are up-to-date
+ - merge the {args.master_branch} branch into the patch/{args.master_branch}/* branches
+ - update the files in the "patches" dir and OPTIONALLY (if you type 'y') to
+ run patch-update with the --make option (which opens a shell on error)
+""")
+ ans = input("<Press Enter OR 'y' to continue> ")
+
+ s = cmd_run(['git', 'commit', '-a', '-m', f'Preparing for release of {version} [buildall]'])
+ if s.returncode:
+ die('Aborting')
+
+ cmd_chk('touch configure.ac && packaging/smart-make && make gen')
+
+ print('Creating any missing patch branches.')
+ s = cmd_run(f'packaging/branch-from-patch --branch={args.master_branch} --add-missing')
+ if s.returncode:
+ die('Aborting')
+
+ print('Updating files in "patches" dir ...')
+ s = cmd_run(f'packaging/patch-update --branch={args.master_branch}')
+ if s.returncode:
+ die('Aborting')
+
+ if re.match(r'^y', ans, re.I):
+ print(f'\nRunning smart-make on all "patch/{args.master_branch}/*" branches ...')
+ cmd_run(f"packaging/patch-update --branch={args.master_branch} --skip-check --make")
+
+ if os.path.isdir('patches/.git'):
+ s = cmd_run(f"cd patches && git commit -a -m 'The patches for {version}.'")
+ if s.returncode:
+ die('Aborting')
+
+ print(f"""\
+{dash_line}
+
+About to:
+ - create signed tag for this release: {v_ver}
+ - create release diffs, "{diff_name}"
+ - create release tar, "{srctar_name}"
+ - generate {rsync_ver}/patches/* files
+ - create patches tar, "{pattar_name}"
+ - update top-level README.md, NEWS.md, TODO, and ChangeLog
+ - update top-level rsync*.html manpages
+ - gpg-sign the release files
+ - update hard-linked top-level release files{skipping}
+""")
+ ans = input("<Press Enter to continue> ")
+
+ # TODO: is there a better way to ensure that our passphrase is in the agent?
+ cmd_run("touch TeMp; gpg --sign TeMp; rm TeMp*")
+
+ out = cmd_txt(f"git tag -s -m 'Version {version}.' {v_ver}", capture='combined').out
+ print(out, end='')
+ if 'bad passphrase' in out or 'failed' in out:
+ die('Aborting')
+
+ if os.path.isdir('patches/.git'):
+ out = cmd_txt(f"cd patches && git tag -s -m 'Version {version}.' {v_ver}", capture='combined').out
+ print(out, end='')
+ if 'bad passphrase' in out or 'failed' in out:
+ die('Aborting')
+
+ os.environ['PATH'] = ORIGINAL_PATH
+
+ # Extract the generated files from the old tar.
+ tweaked_gen_files = [ os.path.join(rsync_lastver, fn) for fn in gen_files ]
+ cmd_run(['tar', 'xzf', lasttar_file, *tweaked_gen_files])
+ os.rename(rsync_lastver, 'a')
+
+ print(f"Creating {diff_file} ...")
+ cmd_chk(['rsync', '-a', *gen_pathnames, 'b/'])
+
+ sed_script = r's:^((---|\+\+\+) [ab]/[^\t]+)\t.*:\1:' # CAUTION: must not contain any single quotes!
+ cmd_chk(f"(git diff v{lastversion} {v_ver} -- ':!.github'; diff -upN a b | sed -r '{sed_script}') | gzip -9 >{diff_file}")
+ shutil.rmtree('a')
+ os.rename('b', rsync_ver)
+
+ print(f"Creating {srctar_file} ...")
+ cmd_chk(f"git archive --format=tar --prefix={rsync_ver}/ {v_ver} | tar xf -")
+ cmd_chk(f"support/git-set-file-times --quiet --prefix={rsync_ver}/")
+ cmd_chk(['fakeroot', 'tar', 'czf', srctar_file, '--exclude=.github', rsync_ver])
+ shutil.rmtree(rsync_ver)
+
+ print(f'Updating files in "{rsync_ver}/patches" dir ...')
+ os.mkdir(rsync_ver, 0o755)
+ os.mkdir(f"{rsync_ver}/patches", 0o755)
+ cmd_chk(f"packaging/patch-update --skip-check --branch={args.master_branch} --gen={rsync_ver}/patches".split())
+
+ print(f"Creating {pattar_file} ...")
+ cmd_chk(['fakeroot', 'tar', 'chzf', pattar_file, rsync_ver + '/patches'])
+ shutil.rmtree(rsync_ver)
+
+ print(f"Updating the other files in {dest} ...")
+ md_files = 'README.md NEWS.md INSTALL.md'.split()
+ html_files = [ fn for fn in gen_pathnames if fn.endswith('.html') ]
+ cmd_chk(['rsync', '-a', *md_files, *html_files, dest])
+ cmd_chk(["./md-convert", "--dest", dest, *md_files])
+
+ cmd_chk(f"git log --name-status | gzip -9 >{dest}/ChangeLog.gz")
+
+ for fn in (srctar_file, pattar_file, diff_file):
+ asc_fn = fn + '.asc'
+ if os.path.lexists(asc_fn):
+ os.unlink(asc_fn)
+ res = cmd_run(['gpg', '--batch', '-ba', fn])
+ if res.returncode != 0 and res.returncode != 2:
+ die("gpg signing failed")
+
+ if not pre:
+ for find in f'{dest}/rsync-*.gz {dest}/rsync-*.asc {dest}/src-previews/rsync-*diffs.gz*'.split():
+ for fn in glob.glob(find):
+ os.unlink(fn)
+ top_link = [
+ srctar_file, f"{srctar_file}.asc",
+ pattar_file, f"{pattar_file}.asc",
+ diff_file, f"{diff_file}.asc",
+ ]
+ for fn in top_link:
+ os.link(fn, re.sub(r'/src(-\w+)?/', '/', fn))
+
+ print(f"""\
+{dash_line}
+
+Local changes are done. When you're satisfied, push the git repository
+and rsync the release files. Remember to announce the release on *BOTH*
+rsync-announce@lists.samba.org and rsync@lists.samba.org (and the web)!
+""")
+
+
+def replace_or_die(regex, repl, txt, die_msg):
+ m = regex.search(txt)
+ if not m:
+ die(die_msg)
+ return regex.sub(repl, txt, 1)
+
+
+def signal_handler(sig, frame):
+ die("\nAborting due to SIGINT.")
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description="Prepare a new release of rsync in the git repo & ftp dir.", add_help=False)
+ parser.add_argument('--branch', '-b', dest='master_branch', default='master', help="The branch to release. Default: master.")
+ parser.add_argument("--help", "-h", action="help", help="Output this help message and exit.")
+ args = parser.parse_args()
+ main()
+
+# vim: sw=4 et ft=python
diff --git a/packaging/smart-make b/packaging/smart-make
new file mode 100755
index 0000000..3826432
--- /dev/null
+++ b/packaging/smart-make
@@ -0,0 +1,45 @@
+#!/bin/sh
+
+set -e
+
+export LANG=C
+
+branch=`packaging/prep-auto-dir`
+if test x"$branch" = x; then
+ srcdir=.
+else
+ cd build
+ srcdir=..
+fi
+
+if test -f configure.sh; then
+ cp -p configure.sh configure.sh.old
+else
+ touch configure.sh.old
+fi
+
+if test -f .fetch; then
+ $srcdir/prepare-source fetch
+else
+ $srcdir/prepare-source
+fi
+
+if diff configure.sh configure.sh.old >/dev/null 2>&1; then
+ echo "configure.sh is unchanged."
+ rm configure.sh.old
+else
+ echo "configure.sh has CHANGED."
+ if test -f config.status; then
+ ./config.status --recheck
+ else
+ $srcdir/configure
+ fi
+fi
+
+./config.status
+
+make all
+
+if test x"$1" = x"check"; then
+ make check
+fi
diff --git a/packaging/solaris/build_pkg.sh b/packaging/solaris/build_pkg.sh
new file mode 100644
index 0000000..29c035a
--- /dev/null
+++ b/packaging/solaris/build_pkg.sh
@@ -0,0 +1,94 @@
+#!/bin/sh
+# Shell script for building Solaris package of rsync
+# Author: Jens Apel <jens.apel@web.de>
+# License: GPL
+#
+# BASEDIR is /usr/local and should be the same as the
+# --prefix parameter of configure
+#
+# this script should be copied under
+# packaging/solaris/5.8/build_pkg.sh
+
+# Definitions start here
+# you can edit this, if you like
+
+# The Package name under which rsync will b installed
+PKGNAME=SMBrsync
+
+# Extract common info requires for the 'info' part of the package.
+# This should be made generic and generated by the configure script
+# but for now it is hard coded
+BASEDIR=/usr/local
+VERSION="2.5.5"
+ARCH=`uname -p`
+NAME=rsync
+
+# Definitions end here
+# Please do not edit below this line or you know what you do.
+
+## Start by faking root install
+echo "Creating install directory (fake $BASEDIR)..."
+START=`pwd`
+FAKE_ROOT=$START/${PKGNAME}
+mkdir $FAKE_ROOT
+
+# copy the binary and the man page to their places
+mkdir $FAKE_ROOT/bin
+mkdir -p $FAKE_ROOT/doc/rsync
+mkdir -p $FAKE_ROOT/man/man1
+mkdir -p $FAKE_ROOT/man/man5
+
+cp ../../../rsync $FAKE_ROOT/bin/rsync
+cp ../../../rsync.1 $FAKE_ROOT/man/man1/rsync.1
+cp ../../../rsyncd.conf.5 $FAKE_ROOT/man/man5/rsyncd.conf.5
+cp ../../../README.md $FAKE_ROOT/doc/rsync/README.md
+cp ../../../COPYING $FAKE_ROOT/doc/rsync/COPYING
+cp ../../../tech_report.pdf $FAKE_ROOT/doc/rsync/tech_report.pdf
+cp ../../../COPYING $FAKE_ROOT/COPYING
+
+## Build info file
+echo "Building pkginfo file..."
+cat > $FAKE_ROOT/pkginfo << EOF_INFO
+PKG=$PKGNAME
+NAME=$NAME
+DESC="Program for efficient remote updates of files."
+VENDOR="Samba Team URL: http://samba.anu.edu.au/rsync/"
+BASEDIR=$BASEDIR
+ARCH=$ARCH
+VERSION=$VERSION
+CATEGORY=application
+CLASSES=none
+EOF_INFO
+
+## Build prototype file
+cat > $FAKE_ROOT/prototype << EOFPROTO
+i copyright=COPYING
+i pkginfo=pkginfo
+d none bin 0755 bin bin
+f none bin/rsync 0755 bin bin
+d none doc 0755 bin bin
+d none doc/$NAME 0755 bin bin
+f none doc/$NAME/README.md 0644 bin bin
+f none doc/$NAME/COPYING 0644 bin bin
+f none doc/$NAME/tech_report.pdf 0644 bin bin
+d none man 0755 bin bin
+d none man/man1 0755 bin bin
+f none man/man1/rsync.1 0644 bin bin
+d none man/man5 0755 bin bin
+f none man/man5/rsyncd.conf.5 0644 bin bin
+EOFPROTO
+
+## And now build the package.
+OUTPUTFILE=$PKGNAME-$VERSION-sol8-$ARCH-local.pkg
+echo "Building package.."
+echo FAKE_ROOT = $FAKE_ROOT
+cd $FAKE_ROOT
+pkgmk -d . -r . -f ./prototype -o
+pkgtrans -os . $OUTPUTFILE $PKGNAME
+
+mv $OUTPUTFILE ..
+cd ..
+
+# Comment this out if you want to see, which file structure has been created
+rm -rf $FAKE_ROOT
+
diff --git a/packaging/systemd/rsync.service b/packaging/systemd/rsync.service
new file mode 100644
index 0000000..8a867ca
--- /dev/null
+++ b/packaging/systemd/rsync.service
@@ -0,0 +1,32 @@
+[Unit]
+Description=fast remote file copy program daemon
+ConditionPathExists=/etc/rsyncd.conf
+After=network.target
+Documentation=man:rsync(1) man:rsyncd.conf(5)
+
+[Service]
+ExecStart=/usr/bin/rsync --daemon --no-detach
+RestartSec=1
+Restart=on-failure
+
+# Citing README.md:
+#
+# [...] Using ssh is recommended for its security features.
+#
+# Alternatively, rsync can run in `daemon' mode, listening on a socket.
+# This is generally used for public file distribution, [...]
+#
+# So let's assume some extra security is more than welcome here. We do full
+# system protection (which makes /usr, /boot, & /etc read-only) and hide
+# devices. To override these defaults, it's best to do so in the drop-in
+# directory, often done via `systemctl edit rsync.service`. The file needs
+# just the bare minimum of the right [heading] and override values.
+# See systemd.unit(5) and search for "drop-in" for full details.
+
+ProtectSystem=full
+#ProtectHome=on|off|read-only
+PrivateDevices=on
+NoNewPrivileges=on
+
+[Install]
+WantedBy=multi-user.target
diff --git a/packaging/systemd/rsync.socket b/packaging/systemd/rsync.socket
new file mode 100644
index 0000000..5bceefe
--- /dev/null
+++ b/packaging/systemd/rsync.socket
@@ -0,0 +1,10 @@
+[Unit]
+Description=socket for fast remote file copy program daemon
+Conflicts=rsync.service
+
+[Socket]
+ListenStream=873
+Accept=true
+
+[Install]
+WantedBy=sockets.target
diff --git a/packaging/systemd/rsync@.service b/packaging/systemd/rsync@.service
new file mode 100644
index 0000000..63ba0c7
--- /dev/null
+++ b/packaging/systemd/rsync@.service
@@ -0,0 +1,28 @@
+[Unit]
+Description=fast remote file copy program daemon
+After=network.target
+
+[Service]
+ExecStart=-/usr/bin/rsync --daemon
+StandardInput=socket
+StandardOutput=inherit
+StandardError=journal
+
+# Citing README.md:
+#
+# [...] Using ssh is recommended for its security features.
+#
+# Alternatively, rsync can run in `daemon' mode, listening on a socket.
+# This is generally used for public file distribution, [...]
+#
+# So let's assume some extra security is more than welcome here. We do full
+# system protection (which makes /usr, /boot, & /etc read-only) and hide
+# devices. To override these defaults, it's best to do so in the drop-in
+# directory, often done via `systemctl edit rsync@.service`. The file needs
+# just the bare minimum of the right [heading] and override values.
+# See systemd.unit(5) and search for "drop-in" for full details.
+
+ProtectSystem=full
+#ProtectHome=on|off|read-only
+PrivateDevices=on
+NoNewPrivileges=on
diff --git a/packaging/var-checker b/packaging/var-checker
new file mode 100755
index 0000000..f17c69a
--- /dev/null
+++ b/packaging/var-checker
@@ -0,0 +1,94 @@
+#!/usr/bin/env -S python3 -B
+
+# This script checks the *.c files for extraneous "extern" variables,
+# for vars that are defined but not used, and for inconsistent array
+# sizes. Run it from inside the main rsync directory.
+
+import os, sys, re, argparse, glob
+
+VARS_RE = re.compile(r'^(?!(?:extern|enum)\s)([a-zA-Z]\S*\s+.*);', re.M)
+EXTERNS_RE = re.compile(r'^extern\s+(.*);', re.M)
+
+sizes = { }
+
+def main():
+ add_syscall_c = set('t_stub.c t_unsafe.c tls.c trimslash.c'.split())
+ add_util_c = set('t_stub.c t_unsafe.c'.split())
+
+ if not os.path.exists('syscall.c'):
+ if os.path.exists('var-checker'):
+ os.chdir('..')
+ else:
+ print("Couldn't find the source dir.")
+ sys.exit(1)
+
+ syscall_c = slurp_file('syscall.c', True)
+ util_c = slurp_file('util1.c', True)
+
+ for fn in sorted(glob.glob('*.c')):
+ txt = slurp_file(fn)
+
+ var_list = parse_vars(fn, VARS_RE.findall(txt))
+ extern_list = parse_vars(fn, EXTERNS_RE.findall(txt))
+ if not var_list and not extern_list:
+ continue
+
+ if fn in add_syscall_c:
+ txt += syscall_c
+ if fn in add_util_c:
+ txt += util_c
+
+ txt = re.sub(r'INFO_GTE', 'info_levels ', txt)
+ txt = re.sub(r'DEBUG_GTE', 'debug_levels ', txt)
+ txt = re.sub(r'SIGACTION\(', 'sigact (', txt)
+
+ find = '|'.join([ re.escape(x) for x in var_list + extern_list ])
+ var_re = re.compile(r'(?<!\sstruct )\b(%s)(?!\w)' % find)
+
+ found = { x: 0 for x in var_list + extern_list }
+ for var in var_re.findall(txt):
+ found[var] += 1
+
+ for var in sorted(var_list + extern_list):
+ if found[var] == 1:
+ vtype = 'var' if var in var_list else 'extern'
+ print(fn, f'has extraneous {vtype}: "{var}"')
+
+
+def slurp_file(fn, drop_externs=False):
+ with open(fn, 'r', encoding='utf-8') as fh:
+ txt = fh.read()
+ if drop_externs:
+ txt = EXTERNS_RE.sub('', txt)
+ return txt
+
+
+def parse_vars(fn, lines):
+ ret = [ ]
+ for line in lines:
+ line = re.sub(r'\s*\{.*\}', '', line)
+ line = re.sub(r'\s*\(.*\)', '', line)
+ for item in re.split(r'\s*,\s*', line):
+ item = re.sub(r'\s*=.*', '', item)
+ m = re.search(r'(?P<var>\w+)(?P<sz>\[.*?\])?$', item)
+ if not m:
+ print(f"Bogus match? ({item})")
+ continue
+ if m['sz']:
+ if m['var'] in sizes:
+ if sizes[m['var']] != m['sz']:
+ var = m['var']
+ print(fn, f'has inconsistent size for "{var}":', m['sz'], 'vs', sizes[var])
+ else:
+ sizes[m['var']] = m['sz']
+ ret.append(m['var'])
+ return ret
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description='Check the *.c files for extraneous extern vars.', add_help=False)
+ parser.add_argument("--help", "-h", action="help", help="Output this help message and exit.")
+ args = parser.parse_args()
+ main()
+
+# vim: sw=4 et ft=python
diff --git a/packaging/year-tweak b/packaging/year-tweak
new file mode 100755
index 0000000..69d2f2f
--- /dev/null
+++ b/packaging/year-tweak
@@ -0,0 +1,94 @@
+#!/usr/bin/env python3
+
+# This uses the output from "support/git-set-file-times --list" to discern
+# the last-modified year of each *.c & *.h file and updates the copyright
+# year if it isn't set right.
+
+import sys, os, re, argparse, subprocess
+from datetime import datetime
+
+MAINTAINER_NAME = 'Wayne Davison'
+MAINTAINER_SUF = ' ' + MAINTAINER_NAME + "\n"
+
+def main():
+ latest_year = '2000'
+
+ proc = subprocess.Popen('support/git-set-file-times --list'.split(), stdout=subprocess.PIPE, encoding='utf-8')
+ for line in proc.stdout:
+ m = re.match(r'^\S\s+(?P<year>\d\d\d\d)\S+\s+\S+\s+(?P<fn>.+)', line)
+ if not m:
+ print("Failed to parse line from git-set-file-times:", line)
+ sys.exit(1)
+ m = argparse.Namespace(**m.groupdict())
+ if m.year > latest_year:
+ latest_year = m.year
+ if m.fn.startswith('zlib/') or m.fn.startswith('popt/'):
+ continue
+ if re.search(r'\.(c|h|sh|test)$', m.fn):
+ maybe_edit_copyright_year(m.fn, m.year)
+ proc.communicate()
+
+ fn = 'latest-year.h'
+ with open(fn, 'r', encoding='utf-8') as fh:
+ old_txt = fh.read()
+
+ txt = f'#define LATEST_YEAR "{latest_year}"\n'
+ if txt != old_txt:
+ print(f"Updating {fn} with year {latest_year}")
+ with open(fn, 'w', encoding='utf-8') as fh:
+ fh.write(txt)
+
+
+def maybe_edit_copyright_year(fn, year):
+ opening_lines = [ ]
+ copyright_line = None
+
+ with open(fn, 'r', encoding='utf-8') as fh:
+ for lineno, line in enumerate(fh):
+ opening_lines.append(line)
+ if lineno > 3 and not re.search(r'\S', line):
+ break
+ m = re.match(r'^(?P<pre>.*Copyright\s+\S+\s+)(?P<year>\d\d\d\d(?:-\d\d\d\d)?(,\s+\d\d\d\d)*)(?P<suf>.+)', line)
+ if not m:
+ continue
+ copyright_line = argparse.Namespace(**m.groupdict())
+ copyright_line.lineno = len(opening_lines)
+ copyright_line.is_maintainer_line = MAINTAINER_NAME in copyright_line.suf
+ copyright_line.txt = line
+ if copyright_line.is_maintainer_line:
+ break
+
+ if not copyright_line:
+ return
+
+ if copyright_line.is_maintainer_line:
+ cyears = copyright_line.year.split('-')
+ if year == cyears[0]:
+ cyears = [ year ]
+ else:
+ cyears = [ cyears[0], year ]
+ txt = copyright_line.pre + '-'.join(cyears) + MAINTAINER_SUF
+ if txt == copyright_line.txt:
+ return
+ opening_lines[copyright_line.lineno - 1] = txt
+ else:
+ if fn.startswith('lib/') or fn.startswith('testsuite/'):
+ return
+ txt = copyright_line.pre + year + MAINTAINER_SUF
+ opening_lines[copyright_line.lineno - 1] += txt
+
+ remaining_txt = fh.read()
+
+ print(f"Updating {fn} with year {year}")
+
+ with open(fn, 'w', encoding='utf-8') as fh:
+ fh.write(''.join(opening_lines))
+ fh.write(remaining_txt)
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(description="Grab the year of last mod for our c & h files and make sure the Copyright comment is up-to-date.")
+ args = parser.parse_args()
+ main()
+
+# vim: sw=4 et