From a86c5f7cae7ec9a3398300555a0b644689d946a1 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Thu, 19 Sep 2024 06:14:53 +0200 Subject: Merging upstream version 4.4.0. Signed-off-by: Daniel Baumann --- tools/.clang-tidy | 10 + tools/Get-HardenFlags.ps1 | 2 +- tools/alpine-setup.sh | 15 +- tools/arch-setup.sh | 83 +- tools/asn2wrs.py | 161 +- tools/asterix/README.md | 2 +- tools/asterix/convertspec.py | 339 ++++ tools/asterix/packet-asterix-template.c | 113 +- tools/asterix/update-specs.py | 189 +- tools/bsd-setup.sh | 21 +- tools/checkAPIs.pl | 12 +- tools/check_col_apis.py | 310 +++ tools/check_dissector.py | 25 +- tools/check_dissector_urls.py | 65 +- tools/check_help_urls.py | 2 +- tools/check_spelling.py | 179 +- tools/check_static.py | 85 +- tools/check_tfs.py | 179 +- tools/check_typed_item_calls.py | 594 ++++-- tools/check_val_to_str.py | 35 +- tools/checkfiltername.pl | 1 + tools/checkhf.pl | 2 +- tools/checklicenses.py | 12 +- tools/convert-glib-types.py | 62 +- tools/convert-proto-init.py | 73 + tools/convert_expert_add_info_format.pl | 5 +- tools/convert_proto_tree_add_text.pl | 21 +- tools/debian-setup.sh | 265 ++- tools/delete_includes.py | 3 - tools/detect_bad_alloc_patterns.py | 8 +- tools/eti2wireshark.py | 80 +- tools/fuzz-test.sh | 2 +- tools/generate-bacnet-vendors.py | 21 +- tools/generate-dissector.py | 2 +- tools/generate-nl80211-fields.py | 28 +- tools/generate-sysdig-event.py | 6 +- tools/generate_authors.py | 6 +- tools/indexcap.py | 8 +- tools/json2pcap/json2pcap.py | 2 +- tools/lemon/CMakeLists.txt | 2 + tools/lemon/lemon.c | 3 +- tools/lemon/patches/01-lemon-dashdash.patch | 14 + tools/macos-setup-brew.sh | 45 +- tools/macos-setup-patches/falco-include-dirs.patch | 15 + .../falco-uthash_h-install.patch | 9 + tools/macos-setup-patches/glib-pkgconfig.patch | 10 + tools/macos-setup-patches/gnutls-pkgconfig.patch | 8 + tools/macos-setup-patches/qt-fix-pc-file | 24 + tools/macos-setup-patches/qt-fix-pc-files | 21 + tools/macos-setup-patches/snappy-signed.patch | 11 + tools/macos-setup-patches/spandsp-configure-patch | 53 + tools/macos-setup.sh | 2118 +++++++++++--------- tools/make-bluetooth.py | 368 ++++ tools/make-enterprises.py | 29 +- tools/make-enums.py | 2 +- tools/make-iana-ip.py | 209 ++ tools/make-isobus.py | 15 +- tools/make-manuf.py | 10 +- tools/make-no-reassembly-profile.py | 4 +- tools/make-packet-dcm.py | 24 +- tools/make-pci-ids.py | 39 +- tools/make-plugin-reg.py | 18 +- tools/make-regs.py | 10 +- tools/make-services.py | 33 +- tools/make-usb.py | 2 +- tools/make-version.py | 45 +- tools/make-wsluarm.py | 458 +++++ tools/ncp2222.py | 1132 +++++------ tools/netscreen2dump.py | 137 -- tools/parse_xml2skinny_dissector.py | 32 +- tools/pidl/idl.yp | 29 +- tools/pidl/lib/Parse/Pidl.pm | 2 +- tools/pidl/lib/Parse/Pidl/CUtil.pm | 1 + tools/pidl/lib/Parse/Pidl/Compat.pm | 1 + tools/pidl/lib/Parse/Pidl/Dump.pm | 1 + tools/pidl/lib/Parse/Pidl/Expr.pm | 1 + tools/pidl/lib/Parse/Pidl/IDL.pm | 30 +- tools/pidl/lib/Parse/Pidl/NDR.pm | 187 +- tools/pidl/lib/Parse/Pidl/ODL.pm | 3 +- tools/pidl/lib/Parse/Pidl/Samba3/ClientNDR.pm | 11 +- tools/pidl/lib/Parse/Pidl/Samba3/ServerNDR.pm | 7 +- tools/pidl/lib/Parse/Pidl/Samba4.pm | 1 + tools/pidl/lib/Parse/Pidl/Samba4/COM/Header.pm | 1 + tools/pidl/lib/Parse/Pidl/Samba4/COM/Proxy.pm | 3 +- tools/pidl/lib/Parse/Pidl/Samba4/COM/Stub.pm | 7 +- tools/pidl/lib/Parse/Pidl/Samba4/Header.pm | 5 +- tools/pidl/lib/Parse/Pidl/Samba4/NDR/Client.pm | 23 +- tools/pidl/lib/Parse/Pidl/Samba4/NDR/Parser.pm | 522 +++-- tools/pidl/lib/Parse/Pidl/Samba4/NDR/Server.pm | 40 +- .../pidl/lib/Parse/Pidl/Samba4/NDR/ServerCompat.pm | 624 ++++++ tools/pidl/lib/Parse/Pidl/Samba4/Python.pm | 403 ++-- tools/pidl/lib/Parse/Pidl/Samba4/TDR.pm | 13 +- tools/pidl/lib/Parse/Pidl/Samba4/Template.pm | 1 + tools/pidl/lib/Parse/Pidl/Typelist.pm | 62 +- tools/pidl/lib/Parse/Pidl/Util.pm | 38 +- tools/pidl/lib/Parse/Pidl/Wireshark/Conformance.pm | 1 + tools/pidl/lib/Parse/Pidl/Wireshark/NDR.pm | 127 +- tools/pidl/lib/Parse/Yapp/Driver.pm | 471 ----- tools/pidl/lib/wscript_build | 37 - tools/pidl/pidl | 24 +- tools/pidl/tests/Util.pm | 1 + tools/pidl/tests/header.pl | 21 +- tools/pidl/tests/ndr.pl | 3 +- tools/pidl/tests/ndr_align.pl | 11 +- tools/pidl/tests/ndr_alloc.pl | 1 + tools/pidl/tests/ndr_array.pl | 1 + tools/pidl/tests/ndr_compat.pl | 1 + tools/pidl/tests/ndr_fullptr.pl | 3 +- tools/pidl/tests/ndr_refptr.pl | 37 +- tools/pidl/tests/ndr_represent.pl | 1 + tools/pidl/tests/ndr_simple.pl | 1 + tools/pidl/tests/ndr_string.pl | 1 + tools/pidl/tests/ndr_tagtype.pl | 8 +- tools/pidl/tests/parse_idl.pl | 2 +- tools/pidl/tests/samba-ndr.pl | 1 - tools/pidl/tests/samba3-cli.pl | 2 +- tools/pidl/tests/tdr.pl | 12 +- tools/pidl/tests/test_util.pl | 1 + tools/pidl/tests/typelist.pl | 4 +- tools/pidl/tests/wireshark-ndr.pl | 48 +- tools/pidl/wscript | 62 +- tools/pre-commit-ignore.py | 5 +- tools/process-x11-fields.pl | 2 +- tools/process-x11-xcb.pl | 133 +- tools/radiotap-gen/radiotap-gen.c | 2 +- tools/rpm-setup.sh | 126 +- tools/update-appdata.py | 7 +- tools/update-tools-help.py | 12 +- tools/validate-commit.py | 13 +- tools/win-setup.ps1 | 64 +- tools/wireshark_gen.py | 151 +- tools/wireshark_words.txt | 1001 ++++++++- tools/ws-coding-style.cfg | 2 +- tools/yacc.py | 159 +- 134 files changed, 8363 insertions(+), 4140 deletions(-) create mode 100644 tools/.clang-tidy create mode 100755 tools/asterix/convertspec.py create mode 100755 tools/check_col_apis.py create mode 100755 tools/convert-proto-init.py create mode 100644 tools/lemon/patches/01-lemon-dashdash.patch create mode 100644 tools/macos-setup-patches/falco-include-dirs.patch create mode 100644 tools/macos-setup-patches/falco-uthash_h-install.patch create mode 100644 tools/macos-setup-patches/glib-pkgconfig.patch create mode 100644 tools/macos-setup-patches/gnutls-pkgconfig.patch create mode 100755 tools/macos-setup-patches/qt-fix-pc-file create mode 100755 tools/macos-setup-patches/qt-fix-pc-files create mode 100644 tools/macos-setup-patches/snappy-signed.patch create mode 100644 tools/macos-setup-patches/spandsp-configure-patch create mode 100755 tools/make-bluetooth.py create mode 100755 tools/make-iana-ip.py mode change 100644 => 100755 tools/make-isobus.py create mode 100755 tools/make-wsluarm.py delete mode 100755 tools/netscreen2dump.py create mode 100644 tools/pidl/lib/Parse/Pidl/Samba4/NDR/ServerCompat.pm delete mode 100644 tools/pidl/lib/Parse/Yapp/Driver.pm delete mode 100644 tools/pidl/lib/wscript_build (limited to 'tools') diff --git a/tools/.clang-tidy b/tools/.clang-tidy new file mode 100644 index 00000000..84ea0537 --- /dev/null +++ b/tools/.clang-tidy @@ -0,0 +1,10 @@ +InheritParentConfig: true + +# We don't want to do any checks in this directory yet so hack around +# the fact that Clang-Tidy won't let us disable all checks. +# https://stackoverflow.com/a/58379342/82195 +Checks: + - '-*' + - 'misc-definitions-in-headers' +CheckOptions: + - { key: 'HeaderFileExtensions', value: 'DISABLED' } diff --git a/tools/Get-HardenFlags.ps1 b/tools/Get-HardenFlags.ps1 index c0785659..eb73b40f 100644 --- a/tools/Get-HardenFlags.ps1 +++ b/tools/Get-HardenFlags.ps1 @@ -16,7 +16,7 @@ # on all the binaries in the distribution, and then filters # for the NXCOMPAT and DYNAMICBASE flags. -# This script will probably fail for the forseeable future. +# This script will probably fail for the foreseeable future. # # Many of our third-party libraries are compiled using MinGW-w64. Its version # of `ld` doesn't enable the dynamicbase, nxcompat, or high-entropy-va flags diff --git a/tools/alpine-setup.sh b/tools/alpine-setup.sh index b5cd5a17..0cdb7799 100755 --- a/tools/alpine-setup.sh +++ b/tools/alpine-setup.sh @@ -13,7 +13,7 @@ set -e -u -o pipefail -function print_usage() { +print_usage() { printf "\\nUtility to setup a alpine system for Wireshark Development.\\n" printf "The basic usage installs the needed software\\n\\n" printf "Usage: %s [--install-optional] [...other options...]\\n" "$0" @@ -86,7 +86,7 @@ ADDITIONAL_LIST=" snappy-dev nghttp2-dev nghttp3-dev - lua5.2-dev + lua5.4-dev libnl3-dev sbc-dev minizip-dev @@ -98,10 +98,10 @@ ADDITIONAL_LIST=" " # Uncomment to add PNG compression utilities used by compress-pngs: -# ADDITIONAL_LIST="$ADDITIONAL_LIST \ -# advancecomp \ -# optipng \ -# oxipng \ +# ADDITIONAL_LIST="$ADDITIONAL_LIST +# advancecomp +# optipng +# oxipng # pngcrush" # Adds package $2 to list variable $1 if the package is found. @@ -110,7 +110,7 @@ add_package() { local list="$1" pkgname="$2" # fail if the package is not known - apk list $pkgname &> /dev/null || return 1 + apk list "$pkgname" &> /dev/null || return 1 # package is found, append it to list eval "${list}=\"\${${list}} \${pkgname}\"" @@ -125,6 +125,7 @@ then fi apk update || exit 2 +# shellcheck disable=SC2086 apk add $ACTUAL_LIST $OPTIONS || exit 2 if [ $ADDITIONAL -eq 0 ] diff --git a/tools/arch-setup.sh b/tools/arch-setup.sh index 1443c528..8c8e9f1a 100755 --- a/tools/arch-setup.sh +++ b/tools/arch-setup.sh @@ -58,51 +58,51 @@ then exit 1 fi -BASIC_LIST="base-devel \ - bcg729 \ - brotli \ - c-ares \ - cmake \ - git \ - glib2 \ - gnutls \ - krb5 \ - libcap \ - libgcrypt \ - libilbc \ - libmaxminddb \ - libnghttp2 \ - libnghttp3 \ - libnl \ - libpcap \ - libssh \ - libxml2 \ - lua52 \ - lz4 \ - minizip \ - ninja \ - pcre2 \ - python \ - qt6-base \ - qt6-multimedia \ - qt6-tools \ - qt6-5compat \ - sbc \ - snappy \ - spandsp \ - speexdsp \ - zlib \ +BASIC_LIST="base-devel + bcg729 + brotli + c-ares + cmake + git + glib2 + gnutls + krb5 + libcap + libgcrypt + libilbc + libmaxminddb + libnghttp2 + libnghttp3 + libnl + libpcap + libssh + libxml2 + lua + lz4 + minizip + ninja + pcre2 + python + qt6-base + qt6-multimedia + qt6-tools + qt6-5compat + sbc + snappy + spandsp + speexdsp + zlib zstd" -ADDITIONAL_LIST="asciidoctor \ - ccache \ - docbook-xml \ - docbook-xsl \ - doxygen \ - libxslt \ +ADDITIONAL_LIST="asciidoctor + ccache + docbook-xml + docbook-xsl + doxygen + libxslt perl" -TESTDEPS_LIST="python-pytest \ +TESTDEPS_LIST="python-pytest python-pytest-xdist" ACTUAL_LIST=$BASIC_LIST @@ -118,6 +118,7 @@ then fi # Partial upgrades are unsupported. +# shellcheck disable=SC2086 pacman --sync --refresh --sysupgrade --needed $ACTUAL_LIST $OPTIONS || exit 2 if [ $ADDITIONAL -eq 0 ] diff --git a/tools/asn2wrs.py b/tools/asn2wrs.py index 7f72443b..6d03a229 100755 --- a/tools/asn2wrs.py +++ b/tools/asn2wrs.py @@ -45,7 +45,7 @@ import os import os.path import time import getopt -import traceback +#import traceback try: from ply import lex @@ -160,6 +160,7 @@ input_file = None g_conform = None lexer = None in_oid = False +quiet = False class LexError(Exception): def __init__(self, tok, filename=None): @@ -644,9 +645,9 @@ class EthCtx: return False def value_max(self, a, b): - if (a == 'MAX') or (b == 'MAX'): return 'MAX'; - if a == 'MIN': return b; - if b == 'MIN': return a; + if (a == 'MAX') or (b == 'MAX'): return 'MAX' + if a == 'MIN': return b + if b == 'MIN': return a try: if (int(a) > int(b)): return a @@ -657,9 +658,9 @@ class EthCtx: return "MAX((%s),(%s))" % (a, b) def value_min(self, a, b): - if (a == 'MIN') or (b == 'MIN'): return 'MIN'; - if a == 'MAX': return b; - if b == 'MAX': return a; + if (a == 'MIN') or (b == 'MIN'): return 'MIN' + if a == 'MAX': return b + if b == 'MAX': return a try: if (int(a) < int(b)): return a @@ -723,7 +724,7 @@ class EthCtx: val = self.type[t]['val'] (ftype, display) = val.eth_ftype(self) attr.update({ 'TYPE' : ftype, 'DISPLAY' : display, - 'STRINGS' : val.eth_strings(), 'BITMASK' : '0' }); + 'STRINGS' : val.eth_strings(), 'BITMASK' : '0' }) else: attr.update(self.type[t]['attr']) attr.update(self.eth_type[self.type[t]['ethname']]['attr']) @@ -1060,7 +1061,7 @@ class EthCtx: #--- eth_clean -------------------------------------------------------------- def eth_clean(self): - self.proto = self.proto_opt; + self.proto = self.proto_opt #--- ASN.1 tables ---------------- self.assign = {} self.assign_ord = [] @@ -1242,14 +1243,12 @@ class EthCtx: for t in self.eth_type_ord: bits = self.eth_type[t]['val'].eth_named_bits() if (bits): - old_val = 0 for (val, id) in bits: self.named_bit.append({'name' : id, 'val' : val, 'ethname' : 'hf_%s_%s_%s' % (self.eproto, t, asn2c(id)), 'ftype' : 'FT_BOOLEAN', 'display' : '8', 'strings' : 'NULL', 'bitmask' : '0x'+('80','40','20','10','08','04','02','01')[val%8]}) - old_val = val + 1 if self.eth_type[t]['val'].eth_need_tree(): self.eth_type[t]['tree'] = "ett_%s_%s" % (self.eth_type[t]['proto'], t) else: @@ -1560,7 +1559,7 @@ class EthCtx: if len(cycle_funcs) > 1: out += f'''\ - // {' → '.join(cycle_funcs)} + // {' -> '.join(cycle_funcs)} actx->pinfo->dissection_depth += {len(cycle_funcs) - 1}; increment_dissection_depth(actx->pinfo); ''' @@ -1613,7 +1612,7 @@ class EthCtx: #--- eth_out_pdu_decl ---------------------------------------------------------- def eth_out_pdu_decl(self, f): - t = self.eth_hf[f]['ethtype'] + #t = self.eth_hf[f]['ethtype'] out = '' if (not self.eth_hf[f]['pdu']['export']): out += 'static ' @@ -1626,13 +1625,13 @@ class EthCtx: if not len(self.eth_hf_ord) and not len(self.eth_hfpdu_ord) and not len(self.named_bit): return fx = self.output.file_open('hf') for f in (self.eth_hfpdu_ord + self.eth_hf_ord): - fx.write("%-50s/* %s */\n" % ("static int %s = -1; " % (self.eth_hf[f]['fullname']), self.eth_hf[f]['ethtype'])) + fx.write("%-50s/* %s */\n" % ("static int %s; " % (self.eth_hf[f]['fullname']), self.eth_hf[f]['ethtype'])) if (self.named_bit): fx.write('/* named bits */\n') for nb in self.named_bit: - fx.write("static int %s = -1;\n" % (nb['ethname'])) + fx.write("static int %s;\n" % (nb['ethname'])) if (self.dummy_eag_field): - fx.write("static int %s = -1; /* never registered */\n" % (self.dummy_eag_field)) + fx.write("static int %s; /* never registered */\n" % (self.dummy_eag_field)) self.output.file_close(fx) #--- eth_output_hf_arr ------------------------------------------------------ @@ -1685,10 +1684,10 @@ class EthCtx: def eth_output_ett (self): fx = self.output.file_open('ett') fempty = True - #fx.write("static gint ett_%s = -1;\n" % (self.eproto)) + #fx.write("static int ett_%s;\n" % (self.eproto)) for t in self.eth_type_ord: if self.eth_type[t]['tree']: - fx.write("static gint %s = -1;\n" % (self.eth_type[t]['tree'])) + fx.write("static int %s;\n" % (self.eth_type[t]['tree'])) fempty = False self.output.file_close(fx, discard=fempty) @@ -1808,7 +1807,7 @@ class EthCtx: def eth_output_types(self): def out_pdu(f): t = self.eth_hf[f]['ethtype'] - impl = 'FALSE' + impl = 'false' out = '' if (not self.eth_hf[f]['pdu']['export']): out += 'static ' @@ -1821,20 +1820,20 @@ class EthCtx: ret_par = 'offset' if (self.Per()): if (self.Aligned()): - aligned = 'TRUE' + aligned = 'true' else: - aligned = 'FALSE' + aligned = 'false' out += " asn1_ctx_t asn1_ctx;\n" out += self.eth_fn_call('asn1_ctx_init', par=(('&asn1_ctx', 'ASN1_ENC_PER', aligned, 'pinfo'),)) if (self.Ber()): out += " asn1_ctx_t asn1_ctx;\n" - out += self.eth_fn_call('asn1_ctx_init', par=(('&asn1_ctx', 'ASN1_ENC_BER', 'TRUE', 'pinfo'),)) + out += self.eth_fn_call('asn1_ctx_init', par=(('&asn1_ctx', 'ASN1_ENC_BER', 'true', 'pinfo'),)) par=((impl, 'tvb', off_par,'&asn1_ctx', 'tree', self.eth_hf[f]['fullname']),) elif (self.Per()): par=(('tvb', off_par, '&asn1_ctx', 'tree', self.eth_hf[f]['fullname']),) elif (self.Oer()): out += " asn1_ctx_t asn1_ctx;\n" - out += self.eth_fn_call('asn1_ctx_init', par=(('&asn1_ctx', 'ASN1_ENC_OER', 'TRUE', 'pinfo'),)) + out += self.eth_fn_call('asn1_ctx_init', par=(('&asn1_ctx', 'ASN1_ENC_OER', 'true', 'pinfo'),)) par=(('tvb', off_par,'&asn1_ctx', 'tree', self.eth_hf[f]['fullname']),) else: par=((),) @@ -1993,7 +1992,7 @@ class EthCtx: if first_decl: fx.write(' /*--- Syntax registrations ---*/\n') first_decl = False - fx.write(' %sregister_ber_syntax_dissector(%s, proto_%s, dissect_%s_PDU);\n' % (new_prefix, k, self.eproto, reg['pdu'])); + fx.write(' %sregister_ber_syntax_dissector(%s, proto_%s, dissect_%s_PDU);\n' % (new_prefix, k, self.eproto, reg['pdu'])) fempty=False self.output.file_close(fx, discard=fempty) @@ -2068,6 +2067,8 @@ class EthCtx: #--- dupl_report ----------------------------------------------------- def dupl_report(self): + if quiet: + return # types tmplist = sorted(self.eth_type_dupl.keys()) for t in tmplist: @@ -2211,7 +2212,7 @@ class EthCtx: print(', '.join(dep)) # end of print_mod() (mod_ord, mod_cyc) = dependency_compute(self.module_ord, self.module, ignore_fn = lambda t: t not in self.module) - print("\n# ASN.1 Moudules") + print("\n# ASN.1 Modules") print("Module name Dependency") print("-" * 100) new_ord = False @@ -2219,7 +2220,7 @@ class EthCtx: print_mod(m) new_ord = new_ord or (self.module_ord.index(m) != mod_ord.index(m)) if new_ord: - print("\n# ASN.1 Moudules - in dependency order") + print("\n# ASN.1 Modules - in dependency order") print("Module name Dependency") print("-" * 100) for m in (mod_ord): @@ -2335,13 +2336,13 @@ class EthCnf: return name in self.fn and self.fn[name]['FN_BODY'] def get_fn_text(self, name, ctx): if (name not in self.fn): - return ''; + return '' if (not self.fn[name][ctx]): - return ''; + return '' self.fn[name][ctx]['used'] = True out = self.fn[name][ctx]['text'] if (not self.suppress_line): - out = '#line %u "%s"\n%s\n' % (self.fn[name][ctx]['lineno'], rel_dissector_path(self.fn[name][ctx]['fn']), out); + out = '#line %u "%s"\n%s\n' % (self.fn[name][ctx]['lineno'], rel_dissector_path(self.fn[name][ctx]['fn']), out) return out def add_pdu(self, par, fn, lineno): @@ -2963,7 +2964,7 @@ class EthOut: #--- output_fname ------------------------------------------------------- def output_fname(self, ftype, ext='c'): fn = '' - if not ext in ('cnf',): + if ext not in ('cnf',): fn += 'packet-' fn += self.outnm if (ftype): @@ -3060,10 +3061,10 @@ class EthOut: include = re.compile(r'^\s*#\s*include\s+[<"](?P[^>"]+)[>"]', re.IGNORECASE) - cont_linenum = 0; + cont_linenum = 0 while (True): - cont_linenum = cont_linenum + 1; + cont_linenum = cont_linenum + 1 line = fin.readline() if (line == ''): break ifile = None @@ -3372,8 +3373,8 @@ class Type (Node): (minv, maxv, ext) = self.constr.subtype[1].GetSize(ectx) if minv == 'MIN': minv = 'NO_BOUND' if maxv == 'MAX': maxv = 'NO_BOUND' - if (ext): ext = 'TRUE' - else: ext = 'FALSE' + if (ext): ext = 'true' + else: ext = 'false' return (minv, maxv, ext) def eth_get_value_constr(self, ectx): @@ -3386,16 +3387,16 @@ class Type (Node): minv += 'U' elif (str(minv)[0] == "-") and str(minv)[1:].isdigit(): if (int(minv) == -(2**31)): - minv = "G_MININT32" + minv = "INT32_MIN" elif (int(minv) < -(2**31)): - minv = "G_GINT64_CONSTANT(%s)" % (str(minv)) + minv = "INT64_C(%s)" % (str(minv)) if str(maxv).isdigit(): if (int(maxv) >= 2**32): - maxv = "G_GUINT64_CONSTANT(%s)" % (str(maxv)) + maxv = "UINT64_C(%s)" % (str(maxv)) else: maxv += 'U' - if (ext): ext = 'TRUE' - else: ext = 'FALSE' + if (ext): ext = 'true' + else: ext = 'false' return (minv, maxv, ext) def eth_get_alphabet_constr(self, ectx): @@ -3745,9 +3746,9 @@ class Module (Node): class Module_Body (Node): def to_python (self, ctx): # XXX handle exports, imports. - l = [x.to_python (ctx) for x in self.assign_list] - l = [a for a in l if a != ''] - return "\n".join (l) + list = [x.to_python (ctx) for x in self.assign_list] + list = [a for a in list if a != ''] + return "\n".join(list) def to_eth(self, ectx): # Exports @@ -3983,9 +3984,9 @@ class TaggedType (Type): pars['TYPE_REF_FN'] = 'dissect_%(TYPE_REF_PROTO)s_%(TYPE_REF_TNAME)s' (pars['TAG_CLS'], pars['TAG_TAG']) = self.GetTag(ectx) if self.HasImplicitTag(ectx): - pars['TAG_IMPL'] = 'TRUE' + pars['TAG_IMPL'] = 'true' else: - pars['TAG_IMPL'] = 'FALSE' + pars['TAG_IMPL'] = 'false' return pars def eth_type_default_body(self, ectx, tname): @@ -4090,7 +4091,8 @@ class SeqType (SqType): autotag = True lst = self.all_components() for e in (self.elt_list): - if e.val.HasOwnTag(): autotag = False; break; + if e.val.HasOwnTag(): autotag = False + break # expand COMPONENTS OF if self.need_components(): if components_available: @@ -4110,7 +4112,7 @@ class SeqType (SqType): e.val.SetName("eag_v%s" % (e.val.ver)) else: e.val.SetName("eag_%d" % (eag_num)) - eag_num += 1; + eag_num += 1 else: # expand new_ext_list = [] for e in (self.ext_list): @@ -4495,10 +4497,10 @@ class ChoiceType (Type): if (ectx.NeedTags() and (ectx.tag_def == 'AUTOMATIC')): autotag = True for e in (self.elt_list): - if e.HasOwnTag(): autotag = False; break; + if e.HasOwnTag(): autotag = False; break if autotag and hasattr(self, 'ext_list'): for e in (self.ext_list): - if e.HasOwnTag(): autotag = False; break; + if e.HasOwnTag(): autotag = False; break # do autotag if autotag: atag = 0 @@ -4811,9 +4813,9 @@ class EnumeratedType (Type): pars = Type.eth_type_default_pars(self, ectx, tname) (root_num, ext_num, map_table) = self.get_vals_etc(ectx)[1:] if self.ext is not None: - ext = 'TRUE' + ext = 'true' else: - ext = 'FALSE' + ext = 'false' pars['ROOT_NUM'] = str(root_num) pars['EXT'] = ext pars['EXT_NUM'] = str(ext_num) @@ -5610,7 +5612,7 @@ class BitStringType (Type): if (self.named_list): sorted_list = self.named_list sorted_list.sort() - expected_bit_no = 0; + expected_bit_no = 0 for e in (sorted_list): # Fill the table with "spare_bit" for "un named bits" if (int(e.val) != 0) and (expected_bit_no != int(e.val)): @@ -5973,7 +5975,7 @@ def p_Reference_1 (t): def p_Reference_2 (t): '''Reference : LCASE_IDENT_ASSIGNED - | identifier ''' # instead of valuereference wich causes reduce/reduce conflict + | identifier ''' # instead of valuereference which causes reduce/reduce conflict t[0] = Value_Ref(val=t[1]) def p_AssignmentList_1 (t): @@ -6014,7 +6016,7 @@ def p_DefinedValue_1(t): t[0] = t[1] def p_DefinedValue_2(t): - '''DefinedValue : identifier ''' # instead of valuereference wich causes reduce/reduce conflict + '''DefinedValue : identifier ''' # instead of valuereference which causes reduce/reduce conflict t[0] = Value_Ref(val=t[1]) # 13.6 @@ -6040,7 +6042,7 @@ def p_ValueAssignment (t): 'ValueAssignment : LCASE_IDENT ValueType ASSIGNMENT Value' t[0] = ValueAssignment(ident = t[1], typ = t[2], val = t[4]) -# only "simple" types are supported to simplify grammer +# only "simple" types are supported to simplify grammar def p_ValueType (t): '''ValueType : type_ref | BooleanType @@ -7320,11 +7322,11 @@ def p_cls_syntax_list_2 (t): # X.681 def p_cls_syntax_1 (t): 'cls_syntax : Type IDENTIFIED BY Value' - t[0] = { get_class_fieled(' ') : t[1], get_class_fieled(' '.join((t[2], t[3]))) : t[4] } + t[0] = { get_class_field(' ') : t[1], get_class_field(' '.join((t[2], t[3]))) : t[4] } def p_cls_syntax_2 (t): 'cls_syntax : HAS PROPERTY Value' - t[0] = { get_class_fieled(' '.join(t[1:-1])) : t[-1:][0] } + t[0] = { get_class_field(' '.join(t[1:-1])) : t[-1:][0] } # X.880 def p_cls_syntax_3 (t): @@ -7337,17 +7339,17 @@ def p_cls_syntax_3 (t): | PRIORITY Value | ALWAYS RESPONDS BooleanValue | IDEMPOTENT BooleanValue ''' - t[0] = { get_class_fieled(' '.join(t[1:-1])) : t[-1:][0] } + t[0] = { get_class_field(' '.join(t[1:-1])) : t[-1:][0] } def p_cls_syntax_4 (t): '''cls_syntax : ARGUMENT Type | RESULT Type | PARAMETER Type ''' - t[0] = { get_class_fieled(t[1]) : t[2] } + t[0] = { get_class_field(t[1]) : t[2] } def p_cls_syntax_5 (t): 'cls_syntax : CODE Value' - fld = get_class_fieled(t[1]); + fld = get_class_field(t[1]) t[0] = { fld : t[2] } if isinstance(t[2], ChoiceValue): fldt = fld + '.' + t[2].choice @@ -7357,7 +7359,7 @@ def p_cls_syntax_6 (t): '''cls_syntax : ARGUMENT Type OPTIONAL BooleanValue | RESULT Type OPTIONAL BooleanValue | PARAMETER Type OPTIONAL BooleanValue ''' - t[0] = { get_class_fieled(t[1]) : t[2], get_class_fieled(' '.join((t[1], t[3]))) : t[4] } + t[0] = { get_class_field(t[1]) : t[2], get_class_field(' '.join((t[1], t[3]))) : t[4] } # 12 Information object set definition and assignment @@ -7503,7 +7505,7 @@ def is_class_syntax(name): return False return name in class_syntaxes[class_current_syntax] -def get_class_fieled(name): +def get_class_field(name): if not class_current_syntax: return None return class_syntaxes[class_current_syntax][name] @@ -8064,13 +8066,14 @@ def ignore_comments(string): return ''.join(chunks) -def eth_main(): +def asn2wrs_main(): global input_file global g_conform global lexer - print("ASN.1 to Wireshark dissector compiler"); + global quiet + try: - opts, args = getopt.getopt(sys.argv[1:], "h?d:D:buXp:FTo:O:c:I:eESs:kLCr:"); + opts, args = getopt.getopt(sys.argv[1:], "h?d:D:buXp:qFTo:O:c:I:eESs:kLCr:") except getopt.GetoptError: eth_usage(); sys.exit(2) if len(args) < 1: @@ -8093,10 +8096,10 @@ def eth_main(): ectx.merge_modules = False ectx.group_by_prot = False ectx.conform.last_group = 0 - ectx.conform.suppress_line = False; + ectx.conform.suppress_line = False ectx.output.outnm = None ectx.output.single_file = None - ectx.constraints_check = False; + ectx.constraints_check = False for o, a in opts: if o in ("-h", "-?"): eth_usage(); sys.exit(2) @@ -8112,24 +8115,29 @@ def eth_main(): if o in ("-C",): ectx.constraints_check = True if o in ("-L",): - ectx.suppress_line = True + ectx.conform.suppress_line = True + if o in ("-q",): + quiet = True if o in ("-X",): warnings.warn("Command line option -X is obsolete and can be removed") if o in ("-T",): warnings.warn("Command line option -T is obsolete and can be removed") + if not quiet: + print("ASN.1 to Wireshark dissector compiler") + if conf_to_read: ectx.conform.read(conf_to_read) for o, a in opts: - if o in ("-h", "-?", "-c", "-I", "-E", "-D", "-C", "-X", "-T"): + if o in ("-h", "-?", "-c", "-I", "-E", "-D", "-C", "-q", "-X", "-T"): pass # already processed else: par = [] if a: par.append(a) ectx.conform.set_opt(o, par, "commandline", 0) - (ld, yd, pd) = (0, 0, 0); + (ld, yd, pd) = (0, 0, 0) if ectx.dbg('l'): ld = 1 if ectx.dbg('y'): yd = 1 if ectx.dbg('p'): pd = 2 @@ -8144,12 +8152,11 @@ def eth_main(): if (ectx.srcdir): fn = ectx.srcdir + '/' + fn # Read ASN.1 definition, trying one of the common encodings. data = open(fn, "rb").read() - for encoding in ('utf-8', 'windows-1252'): - try: - data = data.decode(encoding) - break - except Exception: - warnings.warn_explicit("Decoding %s as %s failed, trying next." % (fn, encoding), UserWarning, '', 0) + try: + data = data.decode('utf-8') + except UnicodeDecodeError: + warnings.warn_explicit(f"Decoding {fn} as UTF-8 failed.", UnicodeWarning, '', 0) + sys.exit(3) # Py2 compat, name.translate in eth_output_hf_arr fails with unicode if not isinstance(data, str): data = data.encode('utf-8') @@ -8197,7 +8204,7 @@ def eth_main(): if ectx.dbg('o'): ectx.output.dbg_print() - ectx.output.make_single_file(ectx.suppress_line) + ectx.output.make_single_file(ectx.conform.suppress_line) # Python compiler @@ -8226,7 +8233,7 @@ def main(): if __name__ == '__main__': if (os.path.splitext(os.path.basename(sys.argv[0]))[0].lower() in ('asn2wrs', 'asn2eth')): - eth_main() + asn2wrs_main() else: main() diff --git a/tools/asterix/README.md b/tools/asterix/README.md index d7b2101f..e936930d 100644 --- a/tools/asterix/README.md +++ b/tools/asterix/README.md @@ -3,7 +3,7 @@ *Asterix* is a set of standards, where each standard is defined as so called *asterix category*. In addition, each *asterix category* is potentially released -in number of editions. There is no guarantie about backward +in number of editions. There is no guarantee about backward compatibility between the editions. The structured version of asterix specifications is maintained diff --git a/tools/asterix/convertspec.py b/tools/asterix/convertspec.py new file mode 100755 index 00000000..18f81798 --- /dev/null +++ b/tools/asterix/convertspec.py @@ -0,0 +1,339 @@ +#!/usr/bin/env python3 +# +# By Zoran Bošnjak +# +# Convert json from new to old format +# +# SPDX-License-Identifier: GPL-2.0-or-later +# + +import sys +import argparse +import json + +def split(obj): + return (obj['tag'], obj['contents']) + +def handle_uap(obj): + t, cont = split(obj) + def f(i): + t, name = split(i) + if t == 'UapItem': + return name + elif t == 'UapItemRFS': + return 'RFS' + else: + return None + if t == 'Uap': + return { + 'type': 'uap', + 'items': [f(i) for i in cont], + } + elif t == 'Uaps': + def var(i): + name, lst = i + return { + 'name': name, + 'items': [f(i) for i in lst], + } + return { + 'type': 'uaps', + 'selector': { + 'name': cont['selector']['item'], + 'rules': cont['selector']['cases'], + }, + 'variations': [var(i) for i in cont['cases']], + } + else: + raise Exception('unexpected', t) + +def handle_number(obj): + t, cont = split(obj) + if t == 'NumInt': + return { + 'type': 'Integer', + 'value': cont, + } + elif t == 'NumDiv': + return { + 'type': 'Div', + 'numerator': handle_number(cont['numerator']), + 'denominator': handle_number(cont['denominator']), + } + elif t == 'NumPow': + return { + 'type': 'Pow', + 'base': cont['base'], + 'exponent': cont['exponent'], + } + else: + raise Exception('unexpected', t) + +def handle_signedness(obj): + t, cont = split(obj) + if t == 'Signed': + return True + elif t == 'Unsigned': + return False + else: + raise Exception('unexpected', t) + +def handle_constrain(obj): + t, cont = split(obj) + if t == 'EqualTo': s = '==' + elif t == 'NotEqualTo': s = '/=' + elif t == 'GreaterThan': s = '>' + elif t == 'GreaterThanOrEqualTo': s = '>=' + elif t == 'LessThan': s = '<' + elif t == 'LessThanOrEqualTo': s = '<=' + else: + raise Exception('unexpected', t) + return { + 'type': s, + 'value': handle_number(cont), + } + +def handle_content(obj): + t, cont = split(obj) + if t == 'ContentRaw': + return { + 'type': 'Raw', + } + elif t == 'ContentTable': + return { + 'type': 'Table', + 'values': cont, + } + elif t == 'ContentString': + return { + 'type': 'String', + 'variation': cont['tag'], + } + elif t == 'ContentInteger': + return { + 'type': 'Integer', + 'signed': handle_signedness(cont['signedness']), + 'constraints': [handle_constrain(i) for i in cont['constraints']], + } + elif t == 'ContentQuantity': + return { + 'type': 'Quantity', + 'constraints': [handle_constrain(i) for i in cont['constraints']], + 'lsb': handle_number(cont['lsb']), + 'signed': handle_signedness(cont['signedness']), + 'unit': cont['unit'], + } + elif t == 'ContentBds': + def f(obj): + t, cont = split(obj) + if t == 'BdsWithAddress': + return { + 'type': 'BdsWithAddress', + } + elif t == 'BdsAt': + return { + 'type': 'BdsAt', + 'address': hex(cont)[2:] if cont is not None else None, + } + else: + raise Exception('unexpected', t) + return { + 'type': 'Bds', + 'variation': f(cont), + } + else: + raise Exception('unexpected', t) + +def handle_rule(f, obj): + t, cont = split(obj) + if t == 'ContextFree': + return { + 'type': 'ContextFree', + 'value': f(cont) + } + elif t == 'Dependent': + def g(i): + a, b = i + return [ + a, + f(b), + ] + return { + 'type': 'Dependent', + 'items': cont['path'], + 'default': f(cont['default']), + 'cases': [g(i) for i in cont['cases']], + } + else: + raise Exception('unexpected', t) + +def handle_item(obj): + t, cont = split(obj) + if t == 'Spare': + return { + 'length': cont, + 'spare': True, + } + elif t == 'Item': + return handle_nonspare(cont) + else: + raise Exception('unexpected', t) + +def handle_maybe(f, obj): + if obj is None: + return None + return f(obj) + +def handle_variation(obj): + t, cont = split(obj) + if t == 'Element': + return { + 'type': t, + 'size': cont['bitSize'], + 'rule': handle_rule(handle_content, cont['rule']), + } + elif t == 'Group': + return { + 'type': t, + 'items': [handle_item(i) for i in cont] + } + elif t == 'Extended': + return { + 'type': t, + 'items': [handle_maybe(handle_item, i) for i in cont], + } + elif t == 'Repetitive': + def f(obj): + t, cont = split(obj) + if t == 'RepetitiveRegular': + return { + 'type': 'Regular', + 'size': cont['byteSize']*8, + } + elif t == 'RepetitiveFx': + return { + 'type': 'Fx', + } + else: + raise Exception('unexpected', t) + return { + 'type': t, + 'rep': f(cont['type']), + 'variation': handle_variation(cont['variation']), + } + elif t == 'Explicit': + def f(obj): + if obj is None: + return None + t, cont = split(obj) + if t == 'ReservedExpansion': + return 'RE' + elif t == 'SpecialPurpose': + return 'SP' + else: + raise Exception('unexpected', t) + return { + 'type': t, + 'expl': f(cont), + } + elif t == 'Compound': + return { + 'type': t, + 'fspec': None, + 'items': [handle_maybe(handle_nonspare, i) for i in cont], + } + else: + raise Exception('unexpected', t) + +def handle_nonspare(obj): + doc = obj['documentation'] + return { + 'definition': doc['definition'], + 'description': doc['description'], + 'name': obj['name'], + 'remark': doc['remark'], + 'rule': handle_rule(handle_variation, obj['rule']), + 'spare': False, + 'title': obj['title'], + } + +def has_rfs(obj): + t, cont = split(obj) + def check(obj): + t, cont = split(obj) + return t == 'UapItemRFS' + if t == 'Uap': + return any(check(i) for i in cont) + elif t == 'Uaps': + for (uap_name, lst) in cont['cases']: + if any(check(i) for i in lst): + return True + return False + else: + raise Exception('unexpected', t) + +def handle_asterix(obj): + t, cont = split(obj) + if t == 'AsterixBasic': + catalogue = [handle_nonspare(i) for i in cont['catalogue']] + if has_rfs(cont['uap']): + catalogue.append({ + "definition": "Random Field Sequencing\n", + "description": None, + "name": "RFS", + "remark": None, + "rule": { + "type": "ContextFree", + "value": { + "type": "Rfs" + } + }, + "spare": False, + "title": "Random Field Sequencing", + }) + return { + 'catalogue': catalogue, + 'date': cont['date'], + 'edition': cont['edition'], + 'number': cont['category'], + 'preamble': cont['preamble'], + 'title': cont['title'], + 'type': 'Basic', + 'uap': handle_uap(cont['uap']), + } + elif t == 'AsterixExpansion': + return { + 'date': cont['date'], + 'edition': cont['edition'], + 'number': cont['category'], + 'title': cont['title'], + 'type': 'Expansion', + 'variation': { + 'fspec': cont['fspecByteSize']*8, + 'items': [handle_maybe(handle_nonspare, i) for i in cont['items']], + 'type': 'Compound', + }, + } + else: + raise Exception('unexpected', t) + +def main(): + parser = argparse.ArgumentParser(description='Convert json from new to old format.') + parser.add_argument('--in-place', action='store_true') + parser.add_argument('path') + args = parser.parse_args() + + with open(args.path, 'r') as f: + s1 = f.read() + + obj = handle_asterix(json.loads(s1)) + s2 = json.dumps(obj, ensure_ascii=False, sort_keys=True, indent=4) + + if args.in_place: + with open(args.path, 'w') as f: + f.write(s2) + else: + print(s2) + +if __name__ == '__main__': + main() diff --git a/tools/asterix/packet-asterix-template.c b/tools/asterix/packet-asterix-template.c index e655cfd7..d584f1cf 100644 --- a/tools/asterix/packet-asterix-template.c +++ b/tools/asterix/packet-asterix-template.c @@ -49,22 +49,22 @@ void proto_reg_handoff_asterix(void); #define MAX_DISSECT_STR 1024 #define MAX_BUFFER 256 -static int proto_asterix = -1; - -static int hf_asterix_category = -1; -static int hf_asterix_length = -1; -static int hf_asterix_message = -1; -static int hf_asterix_fspec = -1; -static int hf_re_field_len = -1; -static int hf_spare = -1; -static int hf_counter = -1; -static int hf_XXX_FX = -1; - -static int ett_asterix = -1; -static int ett_asterix_category = -1; -static int ett_asterix_length = -1; -static int ett_asterix_message = -1; -static int ett_asterix_subtree = -1; +static int proto_asterix; + +static int hf_asterix_category; +static int hf_asterix_length; +static int hf_asterix_message; +static int hf_asterix_fspec; +static int hf_re_field_len; +static int hf_spare; +static int hf_counter; +static int hf_XXX_FX; + +static int ett_asterix; +static int ett_asterix_category; +static int ett_asterix_length; +static int ett_asterix_message; +static int ett_asterix_subtree; static dissector_handle_t asterix_handle; /* The following defines tell us how to decode the length of @@ -102,22 +102,20 @@ struct FieldPart_s { const char *format_string; /* format string for showing float values */ }; -DIAG_OFF_PEDANTIC typedef struct AsterixField_s AsterixField; struct AsterixField_s { - uint8_t type; /* type of field */ - unsigned length; /* fixed length */ - unsigned repetition_counter_size; /* size of repetition counter, length of one item is in length */ - unsigned header_length; /* the size is in first header_length bytes of the field */ - int *hf; /* pointer to Wireshark hf_register_info */ - const FieldPart **part; /* Look declaration and description of FieldPart above. */ - const AsterixField *field[]; /* subfields */ + uint8_t type; /* type of field */ + unsigned length; /* fixed length */ + unsigned repetition_counter_size; /* size of repetition counter, length of one item is in length */ + unsigned header_length; /* the size is in first header_length bytes of the field */ + int *hf; /* pointer to Wireshark hf_register_info */ + const FieldPart * const *part; /* Look declaration and description of FieldPart above. */ + const AsterixField * const field[]; /* subfields */ }; -DIAG_ON_PEDANTIC static void dissect_asterix_packet (tvbuff_t *, packet_info *pinfo, proto_tree *); static void dissect_asterix_data_block (tvbuff_t *tvb, packet_info *pinfo, unsigned, proto_tree *, uint8_t, int); -static int dissect_asterix_fields (tvbuff_t *, packet_info *pinfo, unsigned, proto_tree *, uint8_t, const AsterixField *[]); +static int dissect_asterix_fields (tvbuff_t *, packet_info *pinfo, unsigned, proto_tree *, uint8_t, const AsterixField * const []); static void asterix_build_subtree (tvbuff_t *, packet_info *pinfo, unsigned, proto_tree *, const AsterixField *); static void twos_complement (int64_t *, int); @@ -125,8 +123,8 @@ static uint8_t asterix_bit (uint8_t, uint8_t); static unsigned asterix_fspec_len (tvbuff_t *, unsigned); static uint8_t asterix_field_exists (tvbuff_t *, unsigned, int); static uint8_t asterix_get_active_uap (tvbuff_t *, unsigned, uint8_t); -static int asterix_field_length (tvbuff_t *, unsigned, const AsterixField *); -static int asterix_field_offset (tvbuff_t *, unsigned, const AsterixField *[], int); +static int asterix_field_length (tvbuff_t *, unsigned, const AsterixField * const); +static int asterix_field_offset (tvbuff_t *, unsigned, const AsterixField * const [], int); static int asterix_message_length (tvbuff_t *, unsigned, uint8_t, uint8_t); static const char AISCode[] = { ' ', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', @@ -149,7 +147,6 @@ static const FieldPart IXXX_6bit_spare = { 6, 1.0, FIELD_PART_UINT, NULL, NULL } static const FieldPart IXXX_7bit_spare = { 7, 1.0, FIELD_PART_UINT, NULL, NULL }; /* Spare Item */ -DIAG_OFF_PEDANTIC static const AsterixField IX_SPARE = { FIXED, 0, 0, 0, &hf_spare, NULL, { NULL } }; /* insert1 */ @@ -469,8 +466,8 @@ static void dissect_asterix_packet (tvbuff_t *tvb, packet_info *pinfo, proto_tre * The User Application Profile (UAP) is simply a mapping from the * FSPEC to fields. Each category has its own UAP. */ - category = tvb_get_guint8 (tvb, i); - length = (tvb_get_guint8 (tvb, i + 1) << 8) + tvb_get_guint8 (tvb, i + 2) - 3; /* -3 for category and length */ + category = tvb_get_uint8 (tvb, i); + length = (tvb_get_uint8 (tvb, i + 1) << 8) + tvb_get_uint8 (tvb, i + 2) - 3; /* -3 for category and length */ asterix_packet_item = proto_tree_add_item (tree, proto_asterix, tvb, i, length + 3, ENC_NA); proto_item_append_text (asterix_packet_item, ", Category %03d", category); @@ -513,7 +510,9 @@ static void dissect_asterix_data_block (tvbuff_t *tvb, packet_info *pinfo, unsig } } -static int dissect_asterix_fields (tvbuff_t *tvb, packet_info *pinfo, unsigned offset, proto_tree *tree, uint8_t category, const AsterixField *current_uap[]) +// We're transported over UDP and our offset always advances. +// NOLINTNEXTLINE(misc-no-recursion) +static int dissect_asterix_fields (tvbuff_t *tvb, packet_info *pinfo, unsigned offset, proto_tree *tree, uint8_t category, const AsterixField * const current_uap []) { unsigned i, j, size, start, len, inner_offset, fspec_len; uint64_t counter; @@ -536,13 +535,13 @@ static int dissect_asterix_fields (tvbuff_t *tvb, packet_info *pinfo, unsigned o asterix_field_tree = proto_item_add_subtree (asterix_field_item, ett_asterix_subtree); fspec_len = asterix_fspec_len (tvb, offset + start); proto_tree_add_item (asterix_field_tree, hf_asterix_fspec, tvb, offset + start, fspec_len, ENC_NA); - dissect_asterix_fields (tvb, pinfo, offset + start, asterix_field_tree, category, (const AsterixField **)current_uap[i]->field); + dissect_asterix_fields (tvb, pinfo, offset + start, asterix_field_tree, category, current_uap[i]->field); break; case REPETITIVE: asterix_field_item = proto_tree_add_item (tree, *current_uap[i]->hf, tvb, offset + start, len, ENC_NA); asterix_field_tree = proto_item_add_subtree (asterix_field_item, ett_asterix_subtree); for (j = 0, counter = 0; j < current_uap[i]->repetition_counter_size; j++) { - counter = (counter << 8) + tvb_get_guint8 (tvb, offset + start + j); + counter = (counter << 8) + tvb_get_uint8 (tvb, offset + start + j); } proto_tree_add_item (asterix_field_tree, hf_counter, tvb, offset + start, current_uap[i]->repetition_counter_size, ENC_BIG_ENDIAN); for (j = 0, inner_offset = 0; j < counter; j++, inner_offset += current_uap[i]->length) { @@ -559,7 +558,7 @@ static int dissect_asterix_fields (tvbuff_t *tvb, packet_info *pinfo, unsigned o start++; fspec_len = asterix_fspec_len (tvb, offset + start); proto_tree_add_item (asterix_field_tree, hf_asterix_fspec, tvb, offset + start, fspec_len, ENC_NA); - dissect_asterix_fields (tvb, pinfo, offset + start, asterix_field_tree, category, (const AsterixField **)current_uap[i]->field); + dissect_asterix_fields (tvb, pinfo, offset + start, asterix_field_tree, category, current_uap[i]->field); break;*/ default: /* FIXED, FX, FX_1, FX_UAP */ asterix_field_item = proto_tree_add_item (tree, *current_uap[i]->hf, tvb, offset + start, len, ENC_NA); @@ -650,7 +649,7 @@ static void asterix_build_subtree (tvbuff_t *tvb, packet_info *pinfo, unsigned o case FIELD_PART_IAS_IM: /* special processing for I021/150 and I062/380#4 because Air Speed depends on IM subfield */ air_speed_im_bit = wmem_new (pinfo->pool, uint8_t); - *air_speed_im_bit = (tvb_get_guint8 (tvb, offset_in_tvb) & 0x80) >> 7; + *air_speed_im_bit = (tvb_get_uint8 (tvb, offset_in_tvb) & 0x80) >> 7; /* Save IM info for the packet. key = 21150. */ p_add_proto_data (pinfo->pool, pinfo, proto_asterix, 21150, air_speed_im_bit); proto_tree_add_item (parent, *field->part[i]->hf, tvb, offset_in_tvb, length_in_tvb, ENC_BIG_ENDIAN); @@ -683,8 +682,8 @@ static uint8_t asterix_bit (uint8_t b, uint8_t bitNo) * If the number is positive, all other bits must remain 0. */ static void twos_complement (int64_t *v, int bit_len) { - if (*v & (G_GUINT64_CONSTANT(1) << (bit_len - 1))) { - *v |= (G_GUINT64_CONSTANT(0xffffffffffffffff) << bit_len); + if (*v & (UINT64_C(1) << (bit_len - 1))) { + *v |= (UINT64_C(0xffffffffffffffff) << bit_len); } } @@ -692,7 +691,7 @@ static unsigned asterix_fspec_len (tvbuff_t *tvb, unsigned offset) { unsigned i; unsigned max_length = tvb_reported_length (tvb) - offset; - for (i = 0; (tvb_get_guint8 (tvb, offset + i) & 1) && i < max_length; i++); + for (i = 0; (tvb_get_uint8 (tvb, offset + i) & 1) && i < max_length; i++); return i + 1; } @@ -701,16 +700,20 @@ static uint8_t asterix_field_exists (tvbuff_t *tvb, unsigned offset, int bitInde uint8_t bitNo, i; bitNo = bitIndex + bitIndex / 7; for (i = 0; i < bitNo / 8; i++) { - if (!(tvb_get_guint8 (tvb, offset + i) & 1)) return 0; + if (!(tvb_get_uint8 (tvb, offset + i) & 1)) return 0; } - return asterix_bit (tvb_get_guint8 (tvb, offset + i), bitNo % 8); + return asterix_bit (tvb_get_uint8 (tvb, offset + i), bitNo % 8); } -static int asterix_field_length (tvbuff_t *tvb, unsigned offset, const AsterixField *field) +// We're transported over UDP and our offset always advances. +// NOLINTNEXTLINE(misc-no-recursion) +static int asterix_field_length (tvbuff_t *tvb, unsigned offset, const AsterixField * const field) { + unsigned bit_size; unsigned size; uint64_t count; uint8_t i; + bool should_break; size = 0; switch(field->type) { @@ -719,20 +722,26 @@ static int asterix_field_length (tvbuff_t *tvb, unsigned offset, const AsterixFi break; case REPETITIVE: for (i = 0, count = 0; i < field->repetition_counter_size && i < sizeof (count); i++) - count = (count << 8) + tvb_get_guint8 (tvb, offset + i); + count = (count << 8) + tvb_get_uint8 (tvb, offset + i); size = (unsigned)(field->repetition_counter_size + count * field->length); break; case FX: - for (size = field->length + field->header_length; tvb_get_guint8 (tvb, offset + size - 1) & 1; size += field->length); + for (i = 0, bit_size = 0; field->part[i] != NULL; i++) { + // We don't need to shift value as FX bits are always at the end + should_break = field->part[i]->type == FIELD_PART_FX && !(tvb_get_uint8 (tvb, offset + bit_size / 8) & 1); + bit_size += field->part[i]->bit_length; + if (should_break) break; + } + size = bit_size / 8; break; case EXP: for (i = 0, size = 0; i < field->header_length; i++) { - size = (size << 8) + tvb_get_guint8 (tvb, offset + i); + size = (size << 8) + tvb_get_uint8 (tvb, offset + i); } break; case COMPOUND: /* FSPEC */ - for (size = 0; tvb_get_guint8 (tvb, offset + size) & 1; size++); + for (size = 0; tvb_get_uint8 (tvb, offset + size) & 1; size++); size++; for (i = 0; field->field[i] != NULL; i++) { @@ -748,17 +757,17 @@ static int asterix_field_length (tvbuff_t *tvb, unsigned offset, const AsterixFi static uint8_t asterix_get_active_uap (tvbuff_t *tvb, unsigned offset, uint8_t category) { int i, inner_offset; - AsterixField **current_uap; + AsterixField const * const *current_uap; if ((category == 1) && (categories[category] != NULL)) { /* if category is supported */ if (categories[category][global_categories_version[category]][1] != NULL) { /* if exists another uap */ - current_uap = (AsterixField **)categories[category][global_categories_version[category]][0]; + current_uap = categories[category][global_categories_version[category]][0]; if (current_uap != NULL) { inner_offset = asterix_fspec_len (tvb, offset); for (i = 0; current_uap[i] != NULL; i++) { if (asterix_field_exists (tvb, offset, i)) { if (i == 1) { /* uap selector (I001/020) is always at index '1' */ - return tvb_get_guint8 (tvb, offset + inner_offset) >> 7; + return tvb_get_uint8 (tvb, offset + inner_offset) >> 7; } inner_offset += asterix_field_length (tvb, offset + inner_offset, current_uap[i]); } @@ -769,7 +778,7 @@ static uint8_t asterix_get_active_uap (tvbuff_t *tvb, unsigned offset, uint8_t c return 0; } -static int asterix_field_offset (tvbuff_t *tvb, unsigned offset, const AsterixField *current_uap[], int field_index) +static int asterix_field_offset (tvbuff_t *tvb, unsigned offset, const AsterixField * const current_uap[], int field_index) { int i, inner_offset; inner_offset = 0; @@ -786,10 +795,10 @@ static int asterix_field_offset (tvbuff_t *tvb, unsigned offset, const AsterixFi static int asterix_message_length (tvbuff_t *tvb, unsigned offset, uint8_t category, uint8_t active_uap) { int i, size; - AsterixField **current_uap; + AsterixField const * const *current_uap; if (categories[category] != NULL) { /* if category is supported */ - current_uap = (AsterixField **)categories[category][global_categories_version[category]][active_uap]; + current_uap = categories[category][global_categories_version[category]][active_uap]; if (current_uap != NULL) { size = asterix_fspec_len (tvb, offset); for (i = 0; current_uap[i] != NULL; i++) { diff --git a/tools/asterix/update-specs.py b/tools/asterix/update-specs.py index 7af735dc..03850c50 100755 --- a/tools/asterix/update-specs.py +++ b/tools/asterix/update-specs.py @@ -20,6 +20,8 @@ import os import sys import re +import convertspec as convert + # Path to default upstream repository upstream_repo = 'https://zoranbosnjak.github.io/asterix-specs' dissector_file = 'epan/dissectors/packet-asterix.c' @@ -68,42 +70,15 @@ class Context(object): self.offset = Offset() def get_number(value): - """Get Natural/Real/Rational number as an object.""" - class Integer(object): - def __init__(self, val): - self.val = val - def __str__(self): - return '{}'.format(self.val) - def __float__(self): - return float(self.val) - - class Ratio(object): - def __init__(self, a, b): - self.a = a - self.b = b - def __str__(self): - return '{}/{}'.format(self.a, self.b) - def __float__(self): - return float(self.a) / float(self.b) - - class Real(object): - def __init__(self, val): - self.val = val - def __str__(self): - return '{0:f}'.format(self.val).rstrip('0') - def __float__(self): - return float(self.val) - t = value['type'] - val = value['value'] - if t == 'Integer': - return Integer(int(val)) - if t == 'Ratio': - x, y = val['numerator'], val['denominator'] - return Ratio(x, y) - if t == 'Real': - return Real(float(val)) + return float(value['value']) + if t == 'Div': + a = get_number(value['numerator']) + b = get_number(value['denominator']) + return a/b + if t == 'Pow': + return float(pow(value['base'], value['exponent'])) raise Exception('unexpected value type {}'.format(t)) def replace_string(s, mapping): @@ -132,19 +107,10 @@ def safe_string(s): def get_scaling(content): """Get scaling factor from the content.""" - k = content.get('scaling') - if k is None: + lsb = content.get('lsb') + if lsb is None: return None - k = get_number(k) - - fract = content['fractionalBits'] - - if fract > 0: - scale = format(float(k) / (pow(2, fract)), '.29f') - scale = scale.rstrip('0') - else: - scale = format(float(k)) - return scale + return '{}'.format(get_number(lsb)) def get_fieldpart(content): """Get FIELD_PART* from the content.""" @@ -297,14 +263,12 @@ def reference(cat, edition, path): return('{:03d}_{}'.format(cat, name)) return('{:03d}_V{}_{}_{}'.format(cat, edition['major'], edition['minor'], name)) -def get_content(rule): +def get_rule(rule): t = rule['type'] - # Most cases are 'ContextFree', use as specified. if t == 'ContextFree': - return rule['content'] - # Handle 'Dependent' contents as 'Raw'. + return rule['value'] elif t == 'Dependent': - return {'type': "Raw"} + return rule['default'] else: raise Exception('unexpected type: {}'.format(t)) @@ -313,7 +277,7 @@ def get_bit_size(item): if item['spare']: return item['length'] else: - return item['variation']['size'] + return get_rule(item['rule'])['size'] def get_description(item, content=None): """Return item description.""" @@ -336,12 +300,18 @@ def generate_group(item, variation=None): level2['is_generated'] = True if variation is None: level1 = copy(item) - level1['variation'] = { - 'type': 'Group', - 'items': [level2], + level1['rule'] = { + 'type': 'ContextFree', + 'value': { + 'type': 'Group', + 'items': [level2], + }, } else: - level2['variation'] = variation['variation'] + level2['rule'] = { + 'type': 'ContextFree', + 'value': variation, + } level1 = { 'type': "Group", 'items': [level2], @@ -353,15 +323,18 @@ def is_generated(item): def ungroup(item): """Convert group of items of known size to element""" - n = sum([get_bit_size(i) for i in item['variation']['items']]) + n = sum([get_bit_size(i) for i in get_rule(item['rule'])['items']]) result = copy(item) - result['variation'] = { - 'rule': { - 'content': {'type': 'Raw'}, - 'type': 'ContextFree', + result['rule'] = { + 'type': 'ContextFree', + 'value': { + 'type': 'Element', + 'size': n, + 'rule': { + 'type': 'ContextFree', + 'value': {'type': 'Raw'}, + }, }, - 'size': n, - 'type': 'Element', } return result @@ -397,9 +370,9 @@ def part1(ctx, get_ref, catalogue): return '&I{}_{}'.format(ref, item['name']) if t == 'Element': - tell('static int hf_{} = -1;'.format(ref)) + tell('static int hf_{};'.format(ref)) n = variation['size'] - content = get_content(variation['rule']) + content = get_rule(variation['rule']) scaling = get_scaling(content) scaling = scaling if scaling is not None else 1.0 fp = get_fieldpart(content) @@ -425,12 +398,12 @@ def part1(ctx, get_ref, catalogue): description = get_description(item) tell_pr(' {} &hf_{}, {} "{}", "asterix.{}", FT_NONE, BASE_NONE, NULL, 0x00, NULL, HFILL {} {},'.format('{', ref, '{', description, ref, '}', '}')) - tell('static int hf_{} = -1;'.format(ref)) + tell('static int hf_{};'.format(ref)) for i in variation['items']: handle_item(path, i) # FieldPart[] - tell('static const FieldPart *I{}_PARTS[] = {}'.format(ref,'{')) + tell('static const FieldPart * const I{}_PARTS[] = {}'.format(ref,'{')) for i in variation['items']: tell(' {},'.format(part_of(i))) tell(' NULL') @@ -450,15 +423,15 @@ def part1(ctx, get_ref, catalogue): description = get_description(item) tell_pr(' {} &hf_{}, {} "{}", "asterix.{}", FT_NONE, BASE_NONE, NULL, 0x00, NULL, HFILL {} {},'.format('{', ref, '{', description, ref, '}', '}')) - tell('static int hf_{} = -1;'.format(ref)) + tell('static int hf_{};'.format(ref)) items = [] for i in variation['items']: if i is None: items.append(i) continue - if i.get('variation') is not None: - if i['variation']['type'] == 'Group': + if i.get('rule') is not None: + if get_rule(i['rule'])['type'] == 'Group': i = ungroup(i) items.append(i) @@ -468,7 +441,7 @@ def part1(ctx, get_ref, catalogue): else: handle_item(path, i) - tell('static const FieldPart *I{}_PARTS[] = {}'.format(ref,'{')) + tell('static const FieldPart * const I{}_PARTS[] = {}'.format(ref,'{')) for i in items: if i is None: tell(' &IXXX_FX,') @@ -479,12 +452,10 @@ def part1(ctx, get_ref, catalogue): tell('};') # AsterixField - first_part = list(takewhile(lambda x: x is not None, items)) - n = (sum([get_bit_size(i) for i in first_part]) + 1) // 8 parts = 'I{}_PARTS'.format(ref) comp = '{ NULL }' - tell('static const AsterixField I{} = {} FX, {}, 0, {}, &hf_{}, {}, {} {};'.format - (ref, '{', n, 0, ref, parts, comp, '}')) + tell('static const AsterixField I{} = {} FX, 0, 0, 0, &hf_{}, {}, {} {};'.format + (ref, '{', ref, parts, comp, '}')) elif t == 'Repetitive': ctx.reset_offset() @@ -492,7 +463,7 @@ def part1(ctx, get_ref, catalogue): # Group is required below this item. if variation['variation']['type'] == 'Element': - subvar = generate_group(item, variation) + subvar = generate_group(item, variation['variation']) else: subvar = variation['variation'] handle_variation(path, subvar) @@ -509,14 +480,14 @@ def part1(ctx, get_ref, catalogue): elif t == 'Explicit': ctx.reset_offset() - tell('static int hf_{} = -1;'.format(ref)) + tell('static int hf_{};'.format(ref)) description = get_description(item) tell_pr(' {} &hf_{}, {} "{}", "asterix.{}", FT_NONE, BASE_NONE, NULL, 0x00, NULL, HFILL {} {},'.format('{', ref, '{', description, ref, '}', '}')) tell('static const AsterixField I{} = {} EXP, 0, 0, 1, &hf_{}, NULL, {} NULL {} {};'.format(ref, '{', ref, '{', '}', '}')) elif t == 'Compound': ctx.reset_offset() - tell('static int hf_{} = -1;'.format(ref)) + tell('static int hf_{};'.format(ref)) description = get_description(item) tell_pr(' {} &hf_{}, {} "{}", "asterix.{}", FT_NONE, BASE_NONE, NULL, 0x00, NULL, HFILL {} {},'.format('{', ref, '{', description, ref, '}', '}')) comp = '{' @@ -525,7 +496,7 @@ def part1(ctx, get_ref, catalogue): comp += ' &IX_SPARE,' continue # Group is required below this item. - if i['variation']['type'] == 'Element': + if get_rule(i['rule'])['type'] == 'Element': subitem = generate_group(i) else: subitem = i @@ -545,30 +516,36 @@ def part1(ctx, get_ref, catalogue): return # Group is required on the first level. - if path == [] and item['variation']['type'] == 'Element': - variation = generate_group(item)['variation'] + if path == [] and get_rule(item['rule'])['type'] == 'Element': + variation = get_rule(generate_group(item)['rule']) else: - variation = item['variation'] + variation = get_rule(item['rule']) handle_variation(path + [item['name']], variation) for item in catalogue: # adjust 'repetitive fx' item - if item['variation']['type'] == 'Repetitive' and item['variation']['rep']['type'] == 'Fx': - var = item['variation']['variation'].copy() + if get_rule(item['rule'])['type'] == 'Repetitive' and get_rule(item['rule'])['rep']['type'] == 'Fx': + var = get_rule(item['rule'])['variation'].copy() if var['type'] != 'Element': raise Exception("Expecting 'Element'") item = item.copy() - item['variation'] = { - 'type': 'Extended', - 'items': [{ - 'definition': None, - 'description': None, - 'name': 'Subitem', - 'remark': None, - 'spare': False, - 'title': 'Subitem', - 'variation': var, + item['rule'] = { + 'type': 'ContextFree', + 'value': { + 'type': 'Extended', + 'items': [{ + 'definition': None, + 'description': None, + 'name': 'Subitem', + 'remark': None, + 'spare': False, + 'title': 'Subitem', + 'rule': { + 'type': 'ContextFree', + 'value': var, + }, }, None] + } } handle_item([], item) tell('') @@ -577,7 +554,6 @@ def part2(ctx, ref, uap): """Generate UAPs""" tell = lambda s: ctx.tell('insert1', s) - tell('DIAG_OFF_PEDANTIC') ut = uap['type'] if ut == 'uap': @@ -588,7 +564,7 @@ def part2(ctx, ref, uap): raise Exception('unexpected uap type {}'.format(ut)) for var in variations: - tell('static const AsterixField *I{}_{}[] = {}'.format(ref, var['name'], '{')) + tell('static const AsterixField * const I{}_{}[] = {}'.format(ref, var['name'], '{')) for i in var['items']: if i is None: tell(' &IX_SPARE,') @@ -597,12 +573,11 @@ def part2(ctx, ref, uap): tell(' NULL') tell('};') - tell('static const AsterixField **I{}[] = {}'.format(ref, '{')) + tell('static const AsterixField * const * const I{}[] = {}'.format(ref, '{')) for var in variations: tell(' I{}_{},'.format(ref, var['name'])) tell(' NULL') tell('};') - tell('DIAG_ON_PEDANTIC') tell('') def part3(ctx, specs): @@ -620,9 +595,7 @@ def part3(ctx, specs): editions = sorted([val['edition'] for val in lst], key = lambda x: (x['major'], x['minor']), reverse=True) editions_fmt = [fmt_edition(cat, edition) for edition in editions] editions_str = ', '.join(['I{:03d}'.format(cat)] + editions_fmt) - tell('DIAG_OFF_PEDANTIC') - tell('static const AsterixField ***I{:03d}all[] = {} {} {};'.format(cat, '{', editions_str, '}')) - tell('DIAG_ON_PEDANTIC') + tell('static const AsterixField * const * const * const I{:03d}all[] = {} {} {};'.format(cat, '{', editions_str, '}')) tell('') tell('static const enum_val_t I{:03d}_versions[] = {}'.format(cat, '{')) @@ -646,7 +619,7 @@ def part4(ctx, cats): tell = lambda s: ctx.tell('insert1', s) tell_pr = lambda s: ctx.tell('insert3', s) - tell('static const AsterixField ****categories[] = {') + tell('static const AsterixField * const * const * const * const categories[] = {') for i in range(0, 256): val = 'I{:03d}all'.format(i) if i in cats else 'NULL' tell(' {}, /* {:03d} */'.format(val, i)) @@ -683,7 +656,7 @@ def remove_rfs(spec): catalogue = [] # create new catalogue without RFS rfs_items = [] for i in spec['catalogue']: - if i['variation']['type'] == 'Rfs': + if get_rule(i['rule'])['type'] == 'Rfs': rfs_items.append(i['name']) else: catalogue.append(i) @@ -716,7 +689,7 @@ def is_valid(spec): def check_item(item): if item['spare']: return True - return check_variation(item['variation']) + return check_variation(get_rule(item['rule'])) def check_variation(variation): t = variation['type'] if t == 'Element': @@ -757,6 +730,7 @@ def main(): # read and json-decode input files jsons = load_jsons(args.paths) jsons = [json.loads(i) for i in jsons] + jsons = [convert.handle_asterix(i) for i in jsons] jsons = sorted(jsons, key = lambda x: (x['number'], x['edition']['major'], x['edition']['minor'])) jsons = [spec for spec in jsons if spec['type'] == 'Basic'] jsons = [remove_rfs(spec) for spec in jsons] @@ -780,13 +754,15 @@ def main(): for spec in jsons: is_latest = spec['edition'] == latest_editions[spec['number']] - ctx.tell('insert1', '/* Category {:03d}, edition {}.{} */'.format(spec['number'], spec['edition']['major'], spec['edition']['minor'])) + ctx.tell('insert1', '/* Category {:03d}, edition {}.{} */'.format( + spec['number'], spec['edition']['major'], spec['edition']['minor'])) # handle part1 get_ref = lambda path: reference(spec['number'], spec['edition'], path) part1(ctx, get_ref, spec['catalogue']) if is_latest: - ctx.tell('insert1', '/* Category {:03d}, edition {}.{} (latest) */'.format(spec['number'], spec['edition']['major'], spec['edition']['minor'])) + ctx.tell('insert1', '/* Category {:03d}, edition {}.{} (latest) */'.format( + spec['number'], spec['edition']['major'], spec['edition']['minor'])) get_ref = lambda path: reference(spec['number'], None, path) part1(ctx, get_ref, spec['catalogue']) @@ -826,4 +802,3 @@ def main(): if __name__ == '__main__': main() - diff --git a/tools/bsd-setup.sh b/tools/bsd-setup.sh index 6b018c69..2a48b28d 100755 --- a/tools/bsd-setup.sh +++ b/tools/bsd-setup.sh @@ -21,7 +21,7 @@ print_usage() { printf "\\nUtility to setup a bsd-based system for Wireshark Development.\\n" printf "The basic usage installs the needed software\\n\\n" - printf "Usage: $0 [--install-optional] [...other options...]\\n" + printf "Usage: %s [--install-optional] [...other options...]\\n" "$0" printf "\\t--install-optional: install optional software as well\\n" printf "\\t[other]: other options are passed as-is to pkg manager.\\n" } @@ -44,7 +44,7 @@ for arg; do done # Check if the user is root -if [ $(id -u) -ne 0 ] +if [ "$(id -u)" -ne 0 ] then echo "You must be root." exit 1 @@ -66,7 +66,6 @@ ADDITIONAL_LIST="\ libsmi \ brotli \ zstd \ - lua52 \ " # Uncomment to add PNG compression utilities used by compress-pngs: @@ -76,7 +75,7 @@ ADDITIONAL_LIST="\ # pngcrush" # Guess which package manager we will use -PM=`which pkgin 2> /dev/null || which pkg 2> /dev/null || which pkg_add 2> /dev/null` +PM=$( which pkgin 2> /dev/null || which pkg 2> /dev/null || which pkg_add 2> /dev/null ) case $PM in */pkgin) @@ -101,6 +100,7 @@ echo "Using $PM ($PM_SEARCH)" # Adds package $2 to list variable $1 if the package is found add_package() { + # shellcheck disable=SC3043 local list="$1" pkgname="$2" # fail if the package is not known @@ -174,9 +174,17 @@ echo "ninja is unavailable" add_package ADDITIONAL_LIST libilbc || echo "libilbc is unavailable" +# lua: OpenBSD latest (current 5.4) +# lua54: FreeBSD, NetBSD 5.4.x +# lua53 is also acceptable +add_package ADDITIONAL_LIST lua || +add_package ADDITIONAL_LIST lua54 || +add_package ADDITIONAL_LIST lua53 || +echo "lua >= 5.3 is unavailable" + # Add OS-specific required/optional packages # Those not listed don't require additions. -case `uname` in +case $( uname ) in FreeBSD | NetBSD) add_package ADDITIONAL_LIST libgcrypt || echo "libgcrypt is unavailable" ;; @@ -190,6 +198,7 @@ then ACTUAL_LIST="$ACTUAL_LIST $ADDITIONAL_LIST" fi +# shellcheck disable=SC2086 $PM $PM_OPTIONS $ACTUAL_LIST $OPTIONS if [ ! $? ] then @@ -198,5 +207,5 @@ fi if [ $ADDITIONAL -eq 0 ] then - echo -e "\n*** Optional packages not installed. Rerun with --install-optional to have them.\n" + printf "\\n*** Optional packages not installed. Rerun with --install-optional to have them.\\n" fi diff --git a/tools/checkAPIs.pl b/tools/checkAPIs.pl index c9570b58..855718f0 100755 --- a/tools/checkAPIs.pl +++ b/tools/checkAPIs.pl @@ -454,14 +454,14 @@ sub check_value_string_arrays($$$) my $expectedTrailer; my $trailerHint; if ($type eq "string_string") { - # XXX shouldn't we reject 0 since it is gchar*? + # XXX shouldn't we reject 0 since it is char *? $expectedTrailer = "(NULL|0), NULL"; $trailerHint = "NULL, NULL"; } elsif ($type eq "range_string") { $expectedTrailer = "0(x0+)?, 0(x0+)?, NULL"; $trailerHint = "0, 0, NULL"; } elsif ($type eq "bytes_string") { - # XXX shouldn't we reject 0 since it is guint8*? + # XXX shouldn't we reject 0 since it is uint8_t *? $expectedTrailer = "(NULL|0), 0, NULL"; $trailerHint = "NULL, NULL"; } else { @@ -797,6 +797,10 @@ sub check_hf_entries($$) print STDERR "Error: $hf: BASE_EXT_STRING should use VALS_EXT_PTR for 'strings' instead of '$convert' in $filename\n"; $errorCount++; } + if ($display =~ /BASE_UNIT_STRING/ && ($convert !~ m/^((0[xX]0?)?0$|NULL$|UNS)/)) { + print STDERR "Error: $hf: BASE_UNIT_STRING with non-null 'convert' field missing UNS in $filename\n"; + $errorCount++; + } if ($ft =~ m/^FT_U?INT(8|16|24|32)$/ && $convert =~ m/^VALS64\(/) { print STDERR "Error: $hf: 32-bit field must use VALS instead of VALS64 in $filename\n"; $errorCount++; @@ -809,8 +813,8 @@ sub check_hf_entries($$) print STDERR "Error: $hf is passing the address of a pointer to $1 in $filename\n"; $errorCount++; } - if ($convert !~ m/^((0[xX]0?)?0$|NULL$|VALS|VALS64|VALS_EXT_PTR|RVALS|TFS|CF_FUNC|FRAMENUM_TYPE|&|STRINGS_ENTERPRISES)/ && $display !~ /BASE_CUSTOM/) { - print STDERR "Error: non-null $hf 'convert' field missing 'VALS|VALS64|RVALS|TFS|CF_FUNC|FRAMENUM_TYPE|&|STRINGS_ENTERPRISES' in $filename ?\n"; + if ($convert !~ m/^((0[xX]0?)?0$|NULL$|VALS|VALS64|VALS_EXT_PTR|RVALS|TFS|UNS|CF_FUNC|FRAMENUM_TYPE|&|STRINGS_ENTERPRISES)/ && $display !~ /BASE_CUSTOM/) { + print STDERR "Error: non-null $hf 'convert' field missing 'VALS|VALS64|RVALS|TFS|UNS|CF_FUNC|FRAMENUM_TYPE|&|STRINGS_ENTERPRISES' in $filename ?\n"; $errorCount++; } ## Benign... diff --git a/tools/check_col_apis.py b/tools/check_col_apis.py new file mode 100755 index 00000000..eb8e1850 --- /dev/null +++ b/tools/check_col_apis.py @@ -0,0 +1,310 @@ +#!/usr/bin/env python3 +# Wireshark - Network traffic analyzer +# By Gerald Combs +# Copyright 1998 Gerald Combs +# +# SPDX-License-Identifier: GPL-2.0-or-later + +# Scan dissectors for calls to col_[set|add|append]_[f]str +# to check that most appropriate API is being used + +import os +import re +import subprocess +import argparse +import signal + + +# Try to exit soon after Ctrl-C is pressed. +should_exit = False + +def signal_handler(sig, frame): + global should_exit + should_exit = True + print('You pressed Ctrl+C - exiting') + +signal.signal(signal.SIGINT, signal_handler) + + +# Test for whether the given file was automatically generated. +def isGeneratedFile(filename): + # Check file exists - e.g. may have been deleted in a recent commit. + if not os.path.exists(filename): + return False + + # Open file + f_read = open(os.path.join(filename), 'r', encoding="utf8") + lines_tested = 0 + for line in f_read: + # The comment to say that its generated is near the top, so give up once + # get a few lines down. + if lines_tested > 10: + f_read.close() + return False + if (line.find('Generated automatically') != -1 or + line.find('Generated Automatically') != -1 or + line.find('Autogenerated from') != -1 or + line.find('is autogenerated') != -1 or + line.find('automatically generated by Pidl') != -1 or + line.find('Created by: The Qt Meta Object Compiler') != -1 or + line.find('This file was generated') != -1 or + line.find('This filter was automatically generated') != -1 or + line.find('This file is auto generated, do not edit!') != -1 or + line.find('This file is auto generated') != -1): + + f_read.close() + return True + lines_tested = lines_tested + 1 + + # OK, looks like a hand-written file! + f_read.close() + return False + + +def removeComments(code_string): + code_string = re.sub(re.compile(r"/\*.*?\*/",re.DOTALL ) ,"" ,code_string) # C-style comment + code_string = re.sub(re.compile(r"//.*?\n" ) ,"" ,code_string) # C++-style comment + return code_string + + +def is_dissector_file(filename): + p = re.compile(r'.*(packet|file)-.*\.c') + return p.match(filename) + +def findDissectorFilesInFolder(folder, recursive=False): + dissector_files = [] + + if recursive: + for root, subfolders, files in os.walk(folder): + for f in files: + if should_exit: + return + f = os.path.join(root, f) + dissector_files.append(f) + else: + for f in sorted(os.listdir(folder)): + if should_exit: + return + filename = os.path.join(folder, f) + dissector_files.append(filename) + + return [x for x in filter(is_dissector_file, dissector_files)] + + + +warnings_found = 0 +errors_found = 0 + +class ColCall: + def __init__(self, file, line_number, name, last_args, generated, verbose): + self.filename = file + self.line_number = line_number + self.name = name + self.last_args = last_args + self.generated = generated + self.verbose = verbose + + def issue_prefix(self): + generated = '(GENERATED) ' if self.generated else '' + return self.filename + ':' + generated + str(self.line_number) + ' : called ' + self.name + ' with ' + self.last_args + + def check(self): + global warnings_found + + self.last_args = self.last_args.replace('\\\"', "'") + self.last_args = self.last_args.strip() + + # Empty string never a good idea + if self.last_args == r'""': + if self.name.find('append') == -1: + print('Warning:', self.issue_prefix(), '- if want to clear column, use col_clear() instead') + warnings_found += 1 + else: + # TODO: pointless if appending, but unlikely to see + pass + + # This is never a good idea.. + if self.last_args.startswith(r'"%s"'): + print('Warning:', self.issue_prefix(), " - don't need fstr API?") + warnings_found += 1 + + # String should be static, or at least persist + if self.name == 'col_set_str': + # Literal strings are safe, as well as some other patterns.. + if self.last_args.startswith('"'): + return + elif self.last_args.startswith('val_to_str_const') or self.last_args.startswith('val_to_str_ext_const'): + return + # TODO: substitute macros to avoid some special cases.. + elif self.last_args.startswith('PSNAME') or self.last_args.startswith('PNAME') or self.last_args.startswith('PROTO_SHORT_NAME'): + return + # TODO; match ternary test with both outcomes being literal strings? + else: + if self.verbose: + # Not easy/possible to judge lifetime of string.. + print('Note:', self.issue_prefix(), '- is this OK??') + + if self.name == 'col_add_str': + # If literal string, could have used col_set_str instead? + self.last_args = self.last_args.replace('\\\"', "'") + self.last_args = self.last_args.strip() + if self.last_args.startswith('"'): + print('Warning:', self.issue_prefix(), '- could call col_set_str() instead') + warnings_found += 1 + elif self.last_args.startswith('val_to_str_const'): + print('Warning:', self.issue_prefix(), '- const so could use col_set_str() instead') + warnings_found += 1 + elif self.last_args.startswith('val_to_str_ext_const'): + print('Warning:', self.issue_prefix(), '- const so could use col_set_str() instead') + warnings_found += 1 + + if self.name == 'col_append_str': + pass + if self.name == 'col_add_fstr' or self.name == 'col_append_fstr': + # Look at format string + self.last_args = self.last_args.replace('\\\"', "'") + m = re.search(r'"(.*?)"', self.last_args) + if m: + # Should contain at least one format specifier! + format_string = m.group(1) + if format_string.find('%') == -1: + print('Warning:', self.issue_prefix(), 'with no format specifiers - "' + format_string + '" - use str() version instead') + warnings_found += 1 + + +# Check the given dissector file. +def checkFile(filename, generated, verbose=False): + global warnings_found + global errors_found + + # Check file exists - e.g. may have been deleted in a recent commit. + if not os.path.exists(filename): + print(filename, 'does not exist!') + return + + with open(filename, 'r', encoding="utf8") as f: + full_contents = f.read() + + # Remove comments so as not to trip up RE. + contents = removeComments(full_contents) + + # Look for all calls in this file + matches = re.finditer(r'(col_set_str|col_add_str|col_add_fstr|col_append_str|col_append_fstr)\((.*?)\)\s*\;', contents, re.MULTILINE|re.DOTALL) + col_calls = [] + + last_line_number = 1 + last_char_offset = 0 + + for m in matches: + args = m.group(2) + + line_number = -1 + # May fail to find there were comments inside call... + # Make search partial to: + # - avoid finding an earlier identical call + # - speed up searching by making it shorter + remaining_lines_text = full_contents[last_char_offset:] + match_offset = remaining_lines_text.find(m.group(0)) + if match_offset != -1: + match_in_lines = len(remaining_lines_text[0:match_offset].splitlines()) + line_number = last_line_number + match_in_lines-1 + last_line_number = line_number + last_char_offset += match_offset + 1 # enough to not match again + + # Match first 2 args plus remainer + args_m = re.match(r'(.*?),\s*(.*?),\s*(.*)', args) + if args_m: + col_calls.append(ColCall(filename, line_number, m.group(1), last_args=args_m.group(3), + generated=generated, verbose=verbose)) + + # Check them all + for call in col_calls: + call.check() + + + +################################################################# +# Main logic. + +# command-line args. Controls which dissector files should be checked. +# If no args given, will scan all dissectors. +parser = argparse.ArgumentParser(description='Check calls in dissectors') +parser.add_argument('--file', action='append', + help='specify individual dissector file to test') +parser.add_argument('--commits', action='store', + help='last N commits to check') +parser.add_argument('--open', action='store_true', + help='check open files') +parser.add_argument('--verbose', action='store_true', + help='show extra info') + + +args = parser.parse_args() + + +# Get files from wherever command-line args indicate. +files = [] +if args.file: + # Add specified file(s) + for f in args.file: + if not os.path.isfile(f) and not f.startswith('epan'): + f = os.path.join('epan', 'dissectors', f) + if not os.path.isfile(f): + print('Chosen file', f, 'does not exist.') + exit(1) + else: + files.append(f) +elif args.commits: + # Get files affected by specified number of commits. + command = ['git', 'diff', '--name-only', 'HEAD~' + args.commits] + files = [f.decode('utf-8') + for f in subprocess.check_output(command).splitlines()] + # Will examine dissector files only + files = list(filter(lambda f : is_dissector_file(f), files)) +elif args.open: + # Unstaged changes. + command = ['git', 'diff', '--name-only'] + files = [f.decode('utf-8') + for f in subprocess.check_output(command).splitlines()] + # Only interested in dissector files. + files = list(filter(lambda f : is_dissector_file(f), files)) + # Staged changes. + command = ['git', 'diff', '--staged', '--name-only'] + files_staged = [f.decode('utf-8') + for f in subprocess.check_output(command).splitlines()] + # Only interested in dissector files. + files_staged = list(filter(lambda f : is_dissector_file(f), files_staged)) + for f in files_staged: + if f not in files: + files.append(f) +else: + # Find all dissector files from folder. + files = findDissectorFilesInFolder(os.path.join('epan', 'dissectors')) + files += findDissectorFilesInFolder(os.path.join('plugins', 'epan'), recursive=True) + files += findDissectorFilesInFolder(os.path.join('epan', 'dissectors', 'asn1'), recursive=True) + + +# If scanning a subset of files, list them here. +print('Examining:') +if args.file or args.commits or args.open: + if files: + print(' '.join(files), '\n') + else: + print('No files to check.\n') +else: + print('All dissectors\n') + + +# Now check the chosen files +for f in files: + if should_exit: + exit(1) + + checkFile(f, isGeneratedFile(f), verbose=args.verbose) + + +# Show summary. +print(warnings_found, 'warnings found') +if errors_found: + print(errors_found, 'errors found') + exit(1) diff --git a/tools/check_dissector.py b/tools/check_dissector.py index af1dc648..1461f66e 100755 --- a/tools/check_dissector.py +++ b/tools/check_dissector.py @@ -83,18 +83,19 @@ if args.file_list: # Boolean arg is for whether build-dir is needed in order to run it. # 3rd is Windows support. tools = [ - ('tools/delete_includes.py --folder .', True, True), - ('tools/check_spelling.py', False, True), - ('tools/check_tfs.py --check-value-strings', False, True), - ('tools/check_typed_item_calls.py --all-checks', False, True), - ('tools/check_static.py', True, False), - ('tools/check_dissector_urls.py', False, True), - ('tools/check_val_to_str.py', False, True), - ('tools/cppcheck/cppcheck.sh', False, True), - ('tools/checkhf.pl', False, True), - ('tools/checkAPIs.pl', False, True), - ('tools/fix-encoding-args.pl', False, True), - ('tools/checkfiltername.pl', False, True) + ('tools/delete_includes.py --folder .', True, True), + ('tools/check_spelling.py --comments --no-wikipedia', False, True), + ('tools/check_tfs.py --check-value-strings', False, True), + ('tools/check_typed_item_calls.py --all-checks', False, True), + ('tools/check_static.py', True, False), + ('tools/check_dissector_urls.py', False, True), + ('tools/check_val_to_str.py', False, True), + ('tools/check_col_apis.py', False, True), + ('tools/cppcheck/cppcheck.sh', False, True), + ('tools/checkhf.pl', False, True), + ('tools/checkAPIs.pl', False, True), + ('tools/fix-encoding-args.pl', False, True), + ('tools/checkfiltername.pl', False, True) ] diff --git a/tools/check_dissector_urls.py b/tools/check_dissector_urls.py index 373d88b8..96ee4d65 100755 --- a/tools/check_dissector_urls.py +++ b/tools/check_dissector_urls.py @@ -122,6 +122,9 @@ files = [] all_urls = set() def find_links_in_file(filename): + if os.path.isdir(filename): + return + with open(filename, 'r', encoding="utf8") as f: for line_number, line in enumerate(f, start=1): # TODO: not matching @@ -141,14 +144,21 @@ def find_links_in_file(filename): all_urls.add(url) -# Scan the given folder for links to test. +# Scan the given folder for links to test. Recurses. def find_links_in_folder(folder): - # Look at files in sorted order, to give some idea of how far through it - # is. - for filename in sorted(os.listdir(folder)): - if filename.endswith('.c'): - global links - find_links_in_file(os.path.join(folder, filename)) + files_to_check = [] + for root,subfolders,files in os.walk(folder): + for f in files: + if should_exit: + return + file = os.path.join(root, f) + if file.endswith('.c') or file.endswith('.adoc'): + files_to_check.append(file) + + # Deal with files in sorted order. + for file in sorted(files_to_check): + find_links_in_file(file) + async def populate_cache(sem, session, url): @@ -181,8 +191,8 @@ async def check_all_links(links): except (asyncio.CancelledError): await session.close() - for l in links: - l.validate() + for link in links: + link.validate() ################################################################# @@ -199,12 +209,15 @@ parser.add_argument('--open', action='store_true', help='check open files') parser.add_argument('--verbose', action='store_true', help='when enabled, show more output') +parser.add_argument('--docs', action='store_true', + help='when enabled, also check document folders') + args = parser.parse_args() def is_dissector_file(filename): - p = re.compile(r'epan/dissectors/packet-.*\.c') + p = re.compile(r'.*(packet|file)-.*\.c') return p.match(filename) @@ -212,7 +225,7 @@ def is_dissector_file(filename): if args.file: # Add specified file(s) for f in args.file: - if not f.startswith('epan'): + if not os.path.isfile(f) and not f.startswith('epan'): f = os.path.join('epan', 'dissectors', f) if not os.path.isfile(f): print('Chosen file', f, 'does not exist.') @@ -246,10 +259,13 @@ elif args.open: if f not in files: find_links_in_file(f) files.append(f) +elif args.docs: + # Find links from doc folder(s) + find_links_in_folder(os.path.join(os.path.dirname(__file__), '..', 'doc')) + else: # Find links from dissector folder. - find_links_in_folder(os.path.join(os.path.dirname( - __file__), '..', 'epan', 'dissectors')) + find_links_in_folder(os.path.join(os.path.dirname(__file__), '..', 'epan', 'dissectors')) # If scanning a subset of files, list them here. @@ -260,7 +276,10 @@ if args.file or args.commits or args.open: else: print('No files to check.\n') else: - print('All dissector modules\n') + if not args.docs: + print('All dissector modules\n') + else: + print('Document sources') asyncio.run(check_all_links(links)) @@ -268,21 +287,21 @@ asyncio.run(check_all_links(links)) if os.path.exists('failures.txt'): shutil.copyfile('failures.txt', 'failures_last_run.txt') with open('failures.txt', 'w') as f_f: - for l in links: - if l.tested and not l.success: - f_f.write(str(l) + '\n') + for link in links: + if link.tested and not link.success: + f_f.write(str(link) + '\n') # And successes with open('successes.txt', 'w') as f_s: - for l in links: - if l.tested and l.success: - f_s.write(str(l) + '\n') + for link in links: + if link.tested and link.success: + f_s.write(str(link) + '\n') # Count and show overall stats. passed, failed = 0, 0 -for l in links: - if l.tested: - if l.success: +for link in links: + if link.tested: + if link.success: passed += 1 else: failed += 1 diff --git a/tools/check_help_urls.py b/tools/check_help_urls.py index ddf3673e..c9ad6f3f 100755 --- a/tools/check_help_urls.py +++ b/tools/check_help_urls.py @@ -22,7 +22,7 @@ with open("ui/help_url.c") as f: chapter = url.group(1) found[chapter] = False -adoc_files = glob("docbook/wsug_src/*.adoc") +adoc_files = glob("doc/wsug_src/*.adoc") for adoc_file in adoc_files: with open(adoc_file) as f: diff --git a/tools/check_spelling.py b/tools/check_spelling.py index 7e319081..be0bbf82 100755 --- a/tools/check_spelling.py +++ b/tools/check_spelling.py @@ -11,10 +11,18 @@ import re import subprocess import argparse import signal +import glob + +from spellchecker import SpellChecker from collections import Counter +from html.parser import HTMLParser +import urllib.request # Looks for spelling errors among strings found in source or documentation files. -# N.B. To run this script, you should install pyspellchecker (not spellchecker) using pip. +# N.B., +# - To run this script, you should install pyspellchecker (not spellchecker) using pip. +# - Because of colouring, you may want to pipe into less -R + # TODO: check structured doxygen comments? @@ -44,12 +52,12 @@ signal.signal(signal.SIGINT, signal_handler) # Create spellchecker, and augment with some Wireshark words. -from spellchecker import SpellChecker # Set up our dict with words from text file. spell = SpellChecker() spell.word_frequency.load_text_file('./tools/wireshark_words.txt') + # Track words that were not found. missing_words = [] @@ -67,7 +75,8 @@ class File: self.values = [] filename, extension = os.path.splitext(file) - self.code_file = extension in {'.c', '.cpp'} + # TODO: add '.lua'? Would also need to check string and comment formats... + self.code_file = extension in {'.c', '.cpp', '.h' } with open(file, 'r', encoding="utf8") as f: @@ -124,7 +133,6 @@ class File: def checkMultiWordsRecursive(self, word): length = len(word) - #print('word=', word) if length < 4: return False @@ -159,6 +167,12 @@ class File: v = str(v) + # Sometimes parentheses used to show optional letters, so don't leave space + #if re.compile(r"^[\S]*\(").search(v): + # v = v.replace('(', '') + #if re.compile(r"\S\)").search(v): + # v = v.replace(')', '') + # Ignore includes. if v.endswith('.h'): continue @@ -191,17 +205,19 @@ class File: v = v.replace('?', ' ') v = v.replace('=', ' ') v = v.replace('*', ' ') + v = v.replace('%u', '') + v = v.replace('%d', '') + v = v.replace('%s', '') v = v.replace('%', ' ') v = v.replace('#', ' ') v = v.replace('&', ' ') v = v.replace('@', ' ') v = v.replace('$', ' ') + v = v.replace('^', ' ') v = v.replace('®', '') v = v.replace("'", ' ') v = v.replace('"', ' ') - v = v.replace('%u', '') - v = v.replace('%d', '') - v = v.replace('%s', '') + v = v.replace('~', ' ') # Split into words. value_words = v.split() @@ -225,11 +241,14 @@ class File: if word.endswith("s’"): word = word[:-2] + if self.numberPlusUnits(word): continue if len(word) > 4 and spell.unknown([word]) and not self.checkMultiWords(word) and not self.wordBeforeId(word): - print(self.file, value_index, '/', num_values, '"' + original + '"', bcolors.FAIL + word + bcolors.ENDC, + # Highlight words that appeared in Wikipedia list. + print(bcolors.BOLD if word in wiki_db else '', + self.file, value_index, '/', num_values, '"' + original + '"', bcolors.FAIL + word + bcolors.ENDC, ' -> ', '?') # TODO: this can be interesting, but takes too long! @@ -261,9 +280,24 @@ def removeContractions(code_string): def removeComments(code_string): code_string = re.sub(re.compile(r"/\*.*?\*/", re.DOTALL), "" , code_string) # C-style comment # Avoid matching // where it is allowed, e.g., https://www... or file:///... - code_string = re.sub(re.compile(r"(?..." part of the document. + parser = TypoSourceDocumentParser() + parser.feed(content) + content = parser.content.strip() + + wiki_db = dict(line.lower().split('->', maxsplit=1) for line in content.splitlines()) + del wiki_db['cmo'] # All false positives. + del wiki_db['ect'] # Too many false positives. + del wiki_db['thru'] # We'll let that one thru. ;-) + del wiki_db['sargeant'] # All false positives. + + # Remove each word from dict + removed = 0 + for word in wiki_db: + try: + if should_exit: + exit(1) + spell.word_frequency.remove_words([word]) + #print('Removed', word) + removed += 1 + except Exception: + pass + + print('Removed', removed, 'known bad words') + except Exception: + print('Failed to fetch and/or parse Wikipedia mispellings!') + + # Get files from wherever command-line args indicate. files = [] @@ -423,14 +533,15 @@ if args.file: exit(1) else: files.append(f) -elif args.commits: +if args.commits: # Get files affected by specified number of commits. command = ['git', 'diff', '--name-only', 'HEAD~' + args.commits] files = [f.decode('utf-8') for f in subprocess.check_output(command).splitlines()] # Filter files files = list(filter(lambda f : os.path.exists(f) and isAppropriateFile(f) and not isGeneratedFile(f), files)) -elif args.open: + +if args.open: # Unstaged changes. command = ['git', 'diff', '--name-only'] files = [f.decode('utf-8') @@ -444,26 +555,42 @@ elif args.open: # Filter files. files_staged = list(filter(lambda f : isAppropriateFile(f) and not isGeneratedFile(f), files_staged)) for f in files_staged: - if not f in files: + if f not in files: files.append(f) -else: - # By default, scan dissectors directory - folder = os.path.join('epan', 'dissectors') - # But overwrite with any folder entry. - if args.folder: - folder = args.folder + +if args.glob: + # Add specified file(s) + for g in args.glob: + for f in glob.glob(g): + if not os.path.isfile(f): + print('Chosen file', f, 'does not exist.') + exit(1) + else: + files.append(f) + +if args.folder: + for folder in args.folder: if not os.path.isdir(folder): print('Folder', folder, 'not found!') exit(1) + # Find files from folder. + print('Looking for files in', folder) + files += findFilesInFolder(folder, not args.no_recurse) + +# By default, scan dissector files. +if not args.file and not args.open and not args.commits and not args.glob and not args.folder: + # By default, scan dissectors directory + folder = os.path.join('epan', 'dissectors') # Find files from folder. print('Looking for files in', folder) files = findFilesInFolder(folder, not args.no_recurse) + # If scanning a subset of files, list them here. print('Examining:') -if args.file or args.folder or args.commits or args.open: +if args.file or args.folder or args.commits or args.open or args.glob: if files: print(' '.join(files), '\n') else: @@ -475,7 +602,7 @@ else: # Now check the chosen files. for f in files: # Check this file. - checkFile(f) + checkFile(f, check_comments=args.comments) # But get out if control-C has been pressed. if should_exit: exit(1) diff --git a/tools/check_static.py b/tools/check_static.py index fbd1d11c..773c0d60 100755 --- a/tools/check_static.py +++ b/tools/check_static.py @@ -14,6 +14,9 @@ import signal # Look for dissector symbols that could/should be static. # This will not run on Windows, unless/until we check the platform # and use (I think) dumpbin.exe +# +# N.B. Will report false positives if symbols are extern'd rather than +# declared in a header file. # Try to exit soon after Ctrl-C is pressed. should_exit = False @@ -26,7 +29,8 @@ def signal_handler(sig, frame): signal.signal(signal.SIGINT, signal_handler) # Allow this as a default build folder name... -build_folder = os.getcwd() + '-build' +build_folder = os.getcwd() + '-build' + # Record which symbols are referred to (by a set of files). class CalledSymbols: @@ -34,6 +38,9 @@ class CalledSymbols: self.referred = set() def addCalls(self, file): + if should_exit: + exit(1) + # Make sure that file is built. last_dir = os.path.split(os.path.dirname(file))[-1] if file.find('ui/cli') != -1: @@ -47,42 +54,54 @@ class CalledSymbols: else: object_file = os.path.join(build_folder, os.path.dirname(file), 'CMakeFiles', last_dir + '.dir', os.path.basename(file) + '.o') if not os.path.exists(object_file): + # Not built for whatever reason.. #print('Warning -', object_file, 'does not exist') return + + # Run command to check symbols. command = ['nm', object_file] for f in subprocess.check_output(command).splitlines(): - l = str(f)[2:-1] - # Lines might or might not have an address before letter and symbol. + line = str(f)[2:-1] + # Lines might, or might not, have an address before letter and symbol. p1 = re.compile(r'[0-9a-f]* ([a-zA-Z]) (.*)') p2 = re.compile(r'[ ]* ([a-zA-Z]) (.*)') - m = p1.match(l) + m = p1.match(line) if not m: - m = p2.match(l) + m = p2.match(line) if m: letter = m.group(1) function_name = m.group(2) - # Only interested in undefined references to symbols. + # Only interested in undefined/external references to symbols. if letter == 'U': self.referred.add(function_name) -# Record which symbols are defined in a single file. +# Record which symbols are defined in a single dissector file. class DefinedSymbols: def __init__(self, file): self.filename = file - self.global_dict = {} + self.global_symbols = {} # map from defined symbol -> whole output-line self.header_file_contents = None + self.from_generated_file = isGeneratedFile(file) # Make sure that file is built. - object_file = os.path.join(build_folder, 'epan', 'dissectors', 'CMakeFiles', 'dissectors.dir', os.path.basename(file) + '.o') - + if self.filename.startswith('epan'): + object_file = os.path.join(build_folder, 'epan', 'dissectors', 'CMakeFiles', 'dissectors.dir', os.path.basename(file) + '.o') + elif self.filename.startswith('plugins'): + plugin_base_dir = os.path.dirname(file) + plugin_base_name = os.path.basename(plugin_base_dir) + object_file = os.path.join(build_folder, plugin_base_dir, 'CMakeFiles', plugin_base_name + '.dir', os.path.basename(file) + '.o') + else: + #print("Warning - can't determine object file for ", self.filename) + return if not os.path.exists(object_file): #print('Warning -', object_file, 'does not exist') return + # Get header file contents if available header_file= file.replace('.c', '.h') try: f = open(header_file, 'r') @@ -90,29 +109,30 @@ class DefinedSymbols: except IOError: pass - + # Run command to see which symbols are defined command = ['nm', object_file] for f in subprocess.check_output(command).splitlines(): # Line consists of whitespace, [address], letter, symbolName - l = str(f)[2:-1] + line = str(f)[2:-1] p = re.compile(r'[0-9a-f]* ([a-zA-Z]) (.*)') - m = p.match(l) + m = p.match(line) if m: letter = m.group(1) function_name = m.group(2) - # globally-defined symbols. Would be 't' or 'd' if already static. + # Globally-defined symbols. Would be 't' or 'd' if already static.. if letter in 'TD': - self.add(function_name, l) + self.addDefinedSymbol(function_name, line) - def add(self, letter, function_name): - self.global_dict[letter] = function_name + def addDefinedSymbol(self, symbol, line): + self.global_symbols[symbol] = line + # Check if a given symbol is mentioned in headers def mentionedInHeaders(self, symbol): if self.header_file_contents: if self.header_file_contents.find(symbol) != -1: return True # Also check some of the 'common' header files that don't match the dissector file name. - # TODO: could cache the contents of these files, but it's not that slow. + # TODO: could cache the contents of these files? common_mismatched_headers = [ os.path.join('epan', 'dissectors', 'packet-ncp-int.h'), os.path.join('epan', 'dissectors', 'packet-mq.h'), os.path.join('epan', 'dissectors', 'packet-ip.h'), @@ -133,13 +153,15 @@ class DefinedSymbols: return False - def check(self, called_symbols): + def checkIfSymbolsAreCalled(self, called_symbols): global issues_found - for f in self.global_dict: - if not f in called_symbols: + for f in self.global_symbols: + if f not in called_symbols: mentioned_in_header = self.mentionedInHeaders(f) - fun = self.global_dict[f] - print(self.filename, '(' + fun + ')', 'is not referred to so could be static?', '(in header)' if mentioned_in_header else '') + fun = self.global_symbols[f] + print(self.filename, '' if not self.from_generated_file else '(GENERATED)', + '(' + fun + ')', + 'is not referred to so could be static?', '(declared in header)' if mentioned_in_header else '') issues_found += 1 @@ -147,6 +169,7 @@ class DefinedSymbols: # Helper functions. def isDissectorFile(filename): + # Ignoring usb.c & errno.c p = re.compile(r'(packet|file)-.*\.c') return p.match(filename) @@ -212,12 +235,10 @@ def findFilesInFolder(folder): def is_dissector_file(filename): - p = re.compile(r'.*packet-.*\.c') + p = re.compile(r'.*(packet|file)-.*\.c') return p.match(filename) -issues_found = 0 - ################################################################# @@ -237,6 +258,7 @@ parser.add_argument('--open', action='store_true', args = parser.parse_args() +issues_found = 0 # Get files from wherever command-line args indicate. files = [] @@ -247,7 +269,7 @@ if args.build_folder: if args.file: # Add specified file(s) for f in args.file: - if not f.startswith('epan'): + if not os.path.isfile(f) and not f.startswith('epan'): f = os.path.join('epan', 'dissectors', f) if not os.path.isfile(f): print('Chosen file', f, 'does not exist.') @@ -277,12 +299,12 @@ elif args.open: for f in files: files.append(f) for f in files_staged: - if not f in files: + if f not in files: files.append(f) else: # Find all dissector files from folder. files = findDissectorFilesInFolder(os.path.join('epan', 'dissectors'), - include_generated=False) + include_generated=True) # If scanning a subset of files, list them here. @@ -316,11 +338,12 @@ for d in findFilesInFolder(os.path.join('ui', 'cli')): called.addCalls(d) -# Now check identified files. +# Now check identified dissector files. for f in files: if should_exit: exit(1) - DefinedSymbols(f).check(called.referred) + # Are these symbols called - or could they be deleted or static???? + DefinedSymbols(f).checkIfSymbolsAreCalled(called.referred) # Show summary. print(issues_found, 'issues found') diff --git a/tools/check_tfs.py b/tools/check_tfs.py index cecf8d9d..f7c59377 100755 --- a/tools/check_tfs.py +++ b/tools/check_tfs.py @@ -12,12 +12,10 @@ import argparse import signal # This utility scans for tfs items, and works out if standard ones -# could have been used intead (from epan/tfs.c) +# could have been used instead (from epan/tfs.c) # Can also check for value_string where common tfs could be used instead. # TODO: -# - check how many of the definitions in epan/tfs.c are used in other dissectors -# - although even if unused, might be in external dissectors? # - consider merging Item class with check_typed_item_calls.py ? @@ -39,7 +37,7 @@ def isGeneratedFile(filename): return False # Open file - f_read = open(os.path.join(filename), 'r') + f_read = open(os.path.join(filename), 'r', encoding="utf8", errors="ignore") lines_tested = 0 for line in f_read: # The comment to say that its generated is near the top, so give up once @@ -70,60 +68,61 @@ def isGeneratedFile(filename): # Keep track of custom entries that might appear in multiple dissectors, # so we can consider adding them to tfs.c custom_tfs_entries = {} -def AddCustomEntry(val1, val2, file): +def AddCustomEntry(true_val, false_val, file): global custom_tfs_entries - if (val1, val2) in custom_tfs_entries: - custom_tfs_entries[(val1, val2)].append(file) + if (true_val, false_val) in custom_tfs_entries: + custom_tfs_entries[(true_val, false_val)].append(file) else: - custom_tfs_entries[(val1, val2)] = [file] - + custom_tfs_entries[(true_val, false_val)] = [file] +# Individual parsed TFS entry class TFS: - def __init__(self, file, name, val1, val2): + def __init__(self, file, name, true_val, false_val): self.file = file self.name = name - self.val1 = val1 - self.val2 = val2 + self.true_val = true_val + self.false_val = false_val global warnings_found # Should not be empty - if not len(val1) or not len(val2): + if not len(true_val) or not len(false_val): print('Warning:', file, name, 'has an empty field', self) warnings_found += 1 #else: # Strange if one begins with capital but other doesn't? - #if val1[0].isalpha() and val2[0].isalpha(): - # if val1[0].isupper() != val2[0].isupper(): + #if true_val[0].isalpha() and false_val[0].isalpha(): + # if true_val[0].isupper() != false_val[0].isupper(): # print(file, name, 'one starts lowercase and the other upper', self) # Leading or trailing space should not be needed. - if val1.startswith(' ') or val1.endswith(' '): - print('Note: ' + self.file + ' ' + self.name + ' - false val begins or ends with space \"' + self.val1 + '\"') - if val2.startswith(' ') or val2.endswith(' '): - print('Note: ' + self.file + ' ' + self.name + ' - true val begins or ends with space \"' + self.val2 + '\"') + if true_val.startswith(' ') or true_val.endswith(' '): + print('Note: ' + self.file + ' ' + self.name + ' - true val begins or ends with space \"' + self.true_val + '\"') + if false_val.startswith(' ') or false_val.endswith(' '): + print('Note: ' + self.file + ' ' + self.name + ' - false val begins or ends with space \"' + self.false_val + '\"') # Should really not be identical... - if val1.lower() == val2.lower(): + if true_val.lower() == false_val.lower(): print('Warning:', file, name, 'true and false strings are the same', self) warnings_found += 1 # Shouldn't both be negation (with exception..) - if (file != os.path.join('epan', 'dissectors', 'packet-smb.c') and (val1.lower().find('not ') != -1) and (val2.lower().find('not ') != -1)): + if (file != os.path.join('epan', 'dissectors', 'packet-smb.c') and (true_val.lower().find('not ') != -1) and (false_val.lower().find('not ') != -1)): print('Warning:', file, name, self, 'both strings contain not') warnings_found += 1 # Not expecting full-stops inside strings.. - if val1.find('.') != -1 or val2.find('.') != -1: + if true_val.find('.') != -1 or false_val.find('.') != -1: print('Warning:', file, name, 'Period found in string', self) warnings_found += 1 def __str__(self): - return '{' + '"' + self.val1 + '", "' + self.val2 + '"}' + return '{' + '"' + self.true_val + '", "' + self.false_val + '"}' +# Only looking at in terms of could/should it be TFS instead. class ValueString: def __init__(self, file, name, vals): self.file = file @@ -198,7 +197,7 @@ class Item: self.strings = strings self.mask = mask - # N.B. Not sestting mask by looking up macros. + # N.B. Not setting mask by looking up macros. self.item_type = item_type self.type_modifier = type_modifier @@ -210,16 +209,10 @@ class Item: if self.check_bit(self.mask_value, n): self.bits_set += 1 - def check_bit(self, value, n): - return (value & (0x1 << n)) != 0 - - def __str__(self): return 'Item ({0} "{1}" {2} type={3}:{4} strings={5} mask={6})'.format(self.filename, self.label, self.filter, self.item_type, self.type_modifier, self.strings, self.mask) - - def set_mask_value(self, macros): try: self.mask_read = True @@ -227,12 +220,11 @@ class Item: # Substitute mask if found as a macro.. if self.mask in macros: self.mask = macros[self.mask] - elif any(not c in '0123456789abcdefABCDEFxX' for c in self.mask): + elif any(c not in '0123456789abcdefABCDEFxX' for c in self.mask): self.mask_read = False self.mask_value = 0 return - # Read according to the appropriate base. if self.mask.startswith('0x'): self.mask_value = int(self.mask, 16) @@ -240,7 +232,7 @@ class Item: self.mask_value = int(self.mask, 8) else: self.mask_value = int(self.mask, 10) - except: + except Exception: self.mask_read = False self.mask_value = 0 @@ -262,8 +254,7 @@ class Item: try: # For FT_BOOLEAN, modifier is just numerical number of bits. Round up to next nibble. return int((int(self.type_modifier) + 3)/4)*4 - except: - #print('oops', self) + except Exception: return 0 else: if self.item_type in field_widths: @@ -289,7 +280,7 @@ def removeComments(code_string): def findTFS(filename): tfs_found = {} - with open(filename, 'r', encoding="utf8") as f: + with open(filename, 'r', encoding="utf8", errors="ignore") as f: contents = f.read() # Example: const true_false_string tfs_yes_no = { "Yes", "No" }; @@ -299,10 +290,10 @@ def findTFS(filename): matches = re.finditer(r'\sconst\s*true_false_string\s*([a-zA-Z0-9_]*)\s*=\s*{\s*\"([a-zA-Z_0-9/:! ]*)\"\s*,\s*\"([a-zA-Z_0-9/:! ]*)\"', contents) for m in matches: name = m.group(1) - val1 = m.group(2) - val2 = m.group(3) + true_val = m.group(2) + false_val = m.group(3) # Store this entry. - tfs_found[name] = TFS(filename, name, val1, val2) + tfs_found[name] = TFS(filename, name, true_val, false_val) return tfs_found @@ -317,7 +308,7 @@ def findValueStrings(filename): # { 0, NULL } #}; - with open(filename, 'r', encoding="utf8") as f: + with open(filename, 'r', encoding="utf8", errors="ignore") as f: contents = f.read() # Remove comments so as not to trip up RE. @@ -333,9 +324,8 @@ def findValueStrings(filename): # Look for hf items (i.e. full item to be registered) in a dissector file. def find_items(filename, macros, check_mask=False, mask_exact_width=False, check_label=False, check_consecutive=False): - is_generated = isGeneratedFile(filename) items = {} - with open(filename, 'r', encoding="utf8") as f: + with open(filename, 'r', encoding="utf8", errors="ignore") as f: contents = f.read() # Remove comments so as not to trip up RE. contents = removeComments(contents) @@ -354,7 +344,7 @@ def find_items(filename, macros, check_mask=False, mask_exact_width=False, check def find_macros(filename): macros = {} - with open(filename, 'r', encoding="utf8") as f: + with open(filename, 'r', encoding="utf8", errors="ignore") as f: contents = f.read() # Remove comments so as not to trip up RE. contents = removeComments(contents) @@ -368,31 +358,32 @@ def find_macros(filename): def is_dissector_file(filename): - p = re.compile(r'.*packet-.*\.c') + p = re.compile(r'.*(packet|file)-.*\.c') return p.match(filename) def findDissectorFilesInFolder(folder): - # Look at files in sorted order, to give some idea of how far through is. - files = [] + files = set() - for f in sorted(os.listdir(folder)): - if should_exit: - return - if is_dissector_file(f): - filename = os.path.join(folder, f) - files.append(filename) - return files + for path, tmp_unused, names in os.walk(folder): + for f in names: + if should_exit: + return + if is_dissector_file(f): + files.add(os.path.join(path, f)) + return files +# Global counts warnings_found = 0 errors_found = 0 +# name -> count +common_usage = {} -tfs_found = 0 # Check the given dissector file. -def checkFile(filename, common_tfs, look_for_common=False, check_value_strings=False): +def checkFile(filename, common_tfs, look_for_common=False, check_value_strings=False, count_common_usage=False): global warnings_found global errors_found @@ -422,14 +413,15 @@ def checkFile(filename, common_tfs, look_for_common=False, check_value_strings=F # if os.path.commonprefix([filename, 'plugin/epan/']) == '': exact_case = False - if file_tfs[f].val1 == common_tfs[c].val1 and file_tfs[f].val2 == common_tfs[c].val2: + if file_tfs[f].true_val == common_tfs[c].true_val and file_tfs[f].false_val == common_tfs[c].false_val: found = True exact_case = True - elif file_tfs[f].val1.upper() == common_tfs[c].val1.upper() and file_tfs[f].val2.upper() == common_tfs[c].val2.upper(): + elif file_tfs[f].true_val.upper() == common_tfs[c].true_val.upper() and file_tfs[f].false_val.upper() == common_tfs[c].false_val.upper(): found = True if found: - print("Error:" if exact_case else "Warn: ", filename, f, "- could have used", c, 'from tfs.c instead: ', common_tfs[c], + print("Error:" if exact_case else "Warning: ", filename, f, + "- could have used", c, 'from tfs.c instead: ', common_tfs[c], '' if exact_case else ' (capitalisation differs)') if exact_case: errors_found += 1 @@ -438,7 +430,7 @@ def checkFile(filename, common_tfs, look_for_common=False, check_value_strings=F break if not found: if look_for_common: - AddCustomEntry(file_tfs[f].val1, file_tfs[f].val2, filename) + AddCustomEntry(file_tfs[f].true_val, file_tfs[f].false_val, filename) if check_value_strings: # Get macros @@ -456,7 +448,6 @@ def checkFile(filename, common_tfs, look_for_common=False, check_value_strings=F found = False exact_case = False - #print('Candidate', v, vs[v]) for c in common_tfs: found = False @@ -473,10 +464,10 @@ def checkFile(filename, common_tfs, look_for_common=False, check_value_strings=F # if os.path.commonprefix([filename, 'plugin/epan/']) == '': exact_case = False - if common_tfs[c].val1 == vs[v].parsed_vals[True] and common_tfs[c].val2 == vs[v].parsed_vals[False]: + if common_tfs[c].true_val == vs[v].parsed_vals[True] and common_tfs[c].false_val == vs[v].parsed_vals[False]: found = True exact_case = True - elif common_tfs[c].val1.upper() == vs[v].parsed_vals[True].upper() and common_tfs[c].val2.upper() == vs[v].parsed_vals[False].upper(): + elif common_tfs[c].true_val.upper() == vs[v].parsed_vals[True].upper() and common_tfs[c].false_val.upper() == vs[v].parsed_vals[False].upper(): found = True # Do values match? @@ -488,11 +479,24 @@ def checkFile(filename, common_tfs, look_for_common=False, check_value_strings=F if re.match(r'VALS\(\s*'+v+r'\s*\)', items[i].strings): if items[i].bits_set == 1: print("Warn:" if exact_case else "Note:", filename, 'value_string', "'"+v+"'", - "- could have used", c, 'from tfs.c instead: ', common_tfs[c], 'for', i, - '' if exact_case else ' (capitalisation differs)') + '- could have used tfs.c entry instead: for', i, + ' - "FT_BOOLEAN,', str(items[i].get_field_width_in_bits()) + ', TFS(&' + c + '),"', + '' if exact_case else ' (capitalisation differs)') if exact_case: warnings_found += 1 + if count_common_usage: + # Look for TFS(&) in dissector + with open(filename, 'r') as f: + contents = f.read() + for c in common_tfs: + m = re.search(r'TFS\(\s*\&' + c + r'\s*\)', contents) + if m: + if c not in common_usage: + common_usage[c] = 1 + else: + common_usage[c] += 1 + ################################################################# @@ -512,46 +516,46 @@ parser.add_argument('--check-value-strings', action='store_true', parser.add_argument('--common', action='store_true', help='check for potential new entries for tfs.c') - +parser.add_argument('--common-usage', action='store_true', + help='count how many dissectors are using common tfs entries') args = parser.parse_args() # Get files from wherever command-line args indicate. -files = [] +files = set() if args.file: # Add specified file(s) for f in args.file: - if not f.startswith('epan'): + if not os.path.isfile(f) and not f.startswith('epan'): f = os.path.join('epan', 'dissectors', f) if not os.path.isfile(f): print('Chosen file', f, 'does not exist.') exit(1) else: - files.append(f) + files.add(f) elif args.commits: # Get files affected by specified number of commits. command = ['git', 'diff', '--name-only', 'HEAD~' + args.commits] - files = [f.decode('utf-8') - for f in subprocess.check_output(command).splitlines()] + files = {f.decode('utf-8') + for f in subprocess.check_output(command).splitlines()} # Will examine dissector files only - files = list(filter(lambda f : is_dissector_file(f), files)) + files = set(filter(is_dissector_file, files)) elif args.open: # Unstaged changes. command = ['git', 'diff', '--name-only'] - files = [f.decode('utf-8') - for f in subprocess.check_output(command).splitlines()] + files = {f.decode('utf-8') + for f in subprocess.check_output(command).splitlines()} # Only interested in dissector files. - files = list(filter(lambda f : is_dissector_file(f), files)) + files = list(filter(is_dissector_file, files)) # Staged changes. command = ['git', 'diff', '--staged', '--name-only'] - files_staged = [f.decode('utf-8') - for f in subprocess.check_output(command).splitlines()] + files_staged = {f.decode('utf-8') + for f in subprocess.check_output(command).splitlines()} # Only interested in dissector files. - files_staged = list(filter(lambda f : is_dissector_file(f), files_staged)) + files = set(filter(is_dissector_file, files_staged)) for f in files_staged: - if not f in files: - files.append(f) + files.add(f) else: # Find all dissector files from folder. files = findDissectorFilesInFolder(os.path.join('epan', 'dissectors')) @@ -561,7 +565,7 @@ else: print('Examining:') if args.file or args.commits or args.open: if files: - print(' '.join(files), '\n') + print(' '.join(sorted(files)), '\n') else: print('No files to check.\n') else: @@ -569,14 +573,17 @@ else: # Get standard/ shared ones. -tfs_entries = findTFS(os.path.join('epan', 'tfs.c')) +common_tfs_entries = findTFS(os.path.join('epan', 'tfs.c')) # Now check the files to see if they could have used shared ones instead. -for f in files: +# Look at files in sorted order, to give some idea of how far through we are. +for f in sorted(files): if should_exit: exit(1) if not isGeneratedFile(f): - checkFile(f, tfs_entries, look_for_common=args.common, check_value_strings=args.check_value_strings) + checkFile(f, common_tfs_entries, look_for_common=args.common, + check_value_strings=args.check_value_strings, + count_common_usage=args.common_usage) # Report on commonly-defined values. if args.common: @@ -587,6 +594,12 @@ if args.common: if len(custom_tfs_entries[c]) > 2: print(c, 'appears', len(custom_tfs_entries[c]), 'times, in: ', custom_tfs_entries[c]) +if args.common_usage: + for c in common_tfs_entries: + if c in common_usage: + print(c, 'used in', common_usage[c], 'dissectors') + else: + print('***', c, 'IS NOT USED! ***') # Show summary. print(warnings_found, 'warnings found') diff --git a/tools/check_typed_item_calls.py b/tools/check_typed_item_calls.py index 24520c6e..3923264e 100755 --- a/tools/check_typed_item_calls.py +++ b/tools/check_typed_item_calls.py @@ -47,12 +47,12 @@ class Call: if length: try: self.length = int(length) - except: + except Exception: if length.isupper(): if length in macros: try: self.length = int(macros[length]) - except: + except Exception: pass pass @@ -84,6 +84,9 @@ item_lengths['FT_INT56'] = 7 item_lengths['FT_UINT64'] = 8 item_lengths['FT_INT64'] = 8 item_lengths['FT_ETHER'] = 6 +item_lengths['FT_IPv4'] = 4 +item_lengths['FT_IPv6'] = 16 + # TODO: other types... @@ -97,16 +100,16 @@ class APICheck: if fun_name.startswith('ptvcursor'): # RE captures function name + 1st 2 args (always ptvc + hfindex) - self.p = re.compile('[^\n]*' + self.fun_name + '\s*\(([a-zA-Z0-9_]+),\s*([a-zA-Z0-9_]+)') + self.p = re.compile('[^\n]*' + self.fun_name + r'\s*\(([a-zA-Z0-9_]+),\s*([a-zA-Z0-9_]+)') elif fun_name.find('add_bitmask') == -1: # Normal case. # RE captures function name + 1st 2 args (always tree + hfindex + length) - self.p = re.compile('[^\n]*' + self.fun_name + '\s*\(([a-zA-Z0-9_]+),\s*([a-zA-Z0-9_]+),\s*[a-zA-Z0-9_]+,\s*[a-zA-Z0-9_]+,\s*([a-zA-Z0-9_]+)') + self.p = re.compile('[^\n]*' + self.fun_name + r'\s*\(([a-zA-Z0-9_]+),\s*([a-zA-Z0-9_]+),\s*[a-zA-Z0-9_]+,\s*[a-zA-Z0-9_]+,\s*([a-zA-Z0-9_]+)') else: # _add_bitmask functions. # RE captures function name + 1st + 4th args (always tree + hfindex) # 6th arg is 'fields' - self.p = re.compile('[^\n]*' + self.fun_name + '\s*\(([a-zA-Z0-9_]+),\s*[a-zA-Z0-9_]+,\s*[a-zA-Z0-9_]+,\s*([a-zA-Z0-9_]+)\s*,\s*[a-zA-Z0-9_]+\s*,\s*([a-zA-Z0-9_]+)\s*,') + self.p = re.compile('[^\n]*' + self.fun_name + r'\s*\(([a-zA-Z0-9_]+),\s*[a-zA-Z0-9_]+,\s*[a-zA-Z0-9_]+,\s*([a-zA-Z0-9_]+)\s*,\s*[a-zA-Z0-9_]+\s*,\s*([a-zA-Z0-9_]+)\s*,') self.file = None self.mask_allowed = True @@ -145,7 +148,6 @@ class APICheck: length = m.group(3) # Add call. We have length if re had 3 groups. - num_groups = self.p.groups self.calls.append(Call(m.group(2), macros, line_number=line_number, @@ -160,7 +162,6 @@ class APICheck: # Walk past any l.s. 0 bits in value n = 0 - mask_start = n # Walk through any bits that are set and check they are in mask while self.check_bit(value, n) and n <= 63: if not self.check_bit(mask, n): @@ -180,13 +181,15 @@ class APICheck: if self.fun_name.find('add_bits') == -1 and call.hf_name in items_defined: if call.length and items_defined[call.hf_name].item_type in item_lengths: if item_lengths[items_defined[call.hf_name].item_type] < call.length: - print('Warning:', self.file + ':' + str(call.line_number), - self.fun_name + ' called for', call.hf_name, ' - ', - 'item type is', items_defined[call.hf_name].item_type, 'but call has len', call.length) - warnings_found += 1 + # Don't warn if adding value - value is unlikely to just be bytes value + if self.fun_name.find('_add_uint') == -1: + print('Warning:', self.file + ':' + str(call.line_number), + self.fun_name + ' called for', call.hf_name, ' - ', + 'item type is', items_defined[call.hf_name].item_type, 'but call has len', call.length) + warnings_found += 1 # Needs a +ve length - if self.positive_length and call.length != None: + if self.positive_length and call.length is not None: if call.length != -1 and call.length <= 0: print('Error: ' + self.fun_name + '(.., ' + call.hf_name + ', ...) called at ' + self.file + ':' + str(call.line_number) + @@ -195,7 +198,7 @@ class APICheck: if call.hf_name in items_defined: # Is type allowed? - if not items_defined[call.hf_name].item_type in self.allowed_types: + if items_defined[call.hf_name].item_type not in self.allowed_types: print('Error: ' + self.fun_name + '(.., ' + call.hf_name + ', ...) called at ' + self.file + ':' + str(call.line_number) + ' with type ' + items_defined[call.hf_name].item_type) @@ -221,7 +224,7 @@ class APICheck: warnings_found += 1 if check_missing_items: - if call.hf_name in items_declared and not call.hf_name in items_declared_extern: + if call.hf_name in items_declared and call.hf_name not in items_defined and call.hf_name not in items_declared_extern: #not in common_hf_var_names: print('Warning:', self.file + ':' + str(call.line_number), self.fun_name + ' called for "' + call.hf_name + '"', ' - but no item found') @@ -237,15 +240,15 @@ class ProtoTreeAddItemCheck(APICheck): if not ptv: # proto_item * # proto_tree_add_item(proto_tree *tree, int hfindex, tvbuff_t *tvb, - # const gint start, gint length, const guint encoding) + # const gint start, gint length, const unsigned encoding) self.fun_name = 'proto_tree_add_item' - self.p = re.compile('[^\n]*' + self.fun_name + '\s*\(\s*[a-zA-Z0-9_]+?,\s*([a-zA-Z0-9_]+?),\s*[a-zA-Z0-9_\+\s]+?,\s*[^,.]+?,\s*(.+),\s*([^,.]+?)\);') + self.p = re.compile('[^\n]*' + self.fun_name + r'\s*\(\s*[a-zA-Z0-9_]+?,\s*([a-zA-Z0-9_]+?),\s*[a-zA-Z0-9_\+\s]+?,\s*[^,.]+?,\s*(.+),\s*([^,.]+?)\);') else: # proto_item * # ptvcursor_add(ptvcursor_t *ptvc, int hfindex, gint length, - # const guint encoding) + # const unsigned encoding) self.fun_name = 'ptvcursor_add' - self.p = re.compile('[^\n]*' + self.fun_name + '\s*\([^,.]+?,\s*([^,.]+?),\s*([^,.]+?),\s*([a-zA-Z0-9_\-\>]+)') + self.p = re.compile('[^\n]*' + self.fun_name + r'\s*\([^,.]+?,\s*([^,.]+?),\s*([^,.]+?),\s*([a-zA-Z0-9_\-\>]+)') def find_calls(self, file, macros): @@ -279,7 +282,7 @@ class ProtoTreeAddItemCheck(APICheck): enc = m.group(3) hf_name = m.group(1) if not enc.startswith('ENC_'): - if not enc in { 'encoding', 'enc', 'client_is_le', 'cigi_byte_order', 'endian', 'endianess', 'machine_encoding', 'byte_order', 'bLittleEndian', + if enc not in { 'encoding', 'enc', 'client_is_le', 'cigi_byte_order', 'endian', 'endianess', 'machine_encoding', 'byte_order', 'bLittleEndian', 'p_mq_parm->mq_str_enc', 'p_mq_parm->mq_int_enc', 'iEnc', 'strid_enc', 'iCod', 'nl_data->encoding', 'argp->info->encoding', 'gquic_info->encoding', 'writer_encoding', @@ -306,7 +309,9 @@ class ProtoTreeAddItemCheck(APICheck): 'BASE_SHOW_UTF_8_PRINTABLE', 'dhcp_secs_endian', 'is_mdns ? ENC_UTF_8|ENC_NA : ENC_ASCII|ENC_NA', - 'xl_encoding' + 'xl_encoding', + 'my_frame_data->encoding_client', 'my_frame_data->encoding_results' + }: global warnings_found @@ -328,12 +333,15 @@ class ProtoTreeAddItemCheck(APICheck): if call.hf_name in items_defined: if call.length and items_defined[call.hf_name].item_type in item_lengths: if item_lengths[items_defined[call.hf_name].item_type] < call.length: - print('Warning:', self.file + ':' + str(call.line_number), - self.fun_name + ' called for', call.hf_name, ' - ', - 'item type is', items_defined[call.hf_name].item_type, 'but call has len', call.length) - warnings_found += 1 + # On balance, it is not worth complaining about these - the value is unlikely to be + # just the value found in these bytes.. + if self.fun_name.find('_add_uint') == -1: + print('Warning:', self.file + ':' + str(call.line_number), + self.fun_name + ' called for', call.hf_name, ' - ', + 'item type is', items_defined[call.hf_name].item_type, 'but call has len', call.length) + warnings_found += 1 elif check_missing_items: - if call.hf_name in items_declared and not call.hf_name in items_declared_extern: + if call.hf_name in items_declared and call.hf_name not in items_declared_extern: #not in common_hf_var_names: print('Warning:', self.file + ':' + str(call.line_number), self.fun_name + ' called for "' + call.hf_name + '"', ' - but no item found') @@ -371,7 +379,9 @@ known_non_contiguous_fields = { 'wlan.fixed.capabilities.cfpoll.sta', 'hf_hiqnet_flags', 'hf_hiqnet_flagmask', 'hf_h223_mux_mpl', - 'rdp.flags.pkt' + 'rdp.flags.pkt', + 'erf.flags.if_raw', # confirmed by Stephen Donnelly + 'oran_fh_cus.sReSMask' } ################################################################################################## @@ -397,138 +407,157 @@ field_widths = { 'FT_INT64' : 64 } -# TODO: most of these might as well be strings... def is_ignored_consecutive_filter(filter): + ignore_filters = { + 'elf.sh_type', + 'elf.p_type', + 'btavrcp.pdu_id', + 'netlogon.dummy_string', + 'opa.reserved', + 'wassp.data.mu_mac', + 'thrift.type', + 'quake2.game.client.command.move.angles', + 'ipp.enum_value', + 'idrp.error.subcode', + 'ftdi-ft.lValue', + '6lowpan.src', + 'couchbase.flex_frame.frame.id', + 'rtps.param.id', + 'rtps.locator.port', + 'sigcomp.udvm.value', + 'opa.mad.attributemodifier.n', + 'smb.cmd', + 'sctp.checksum', + 'dhcp.option.end', + 'nfapi.num.bf.vector.bf.value', + 'dnp3.al.range.abs', + 'dnp3.al.range.quantity', + 'dnp3.al.index', + 'dnp3.al.size', + 'ftdi-ft.hValue', + 'homeplug_av.op_attr_cnf.data.sw_sub', + 'radiotap.he_mu.preamble_puncturing', + 'ndmp.file', + 'ocfs2.dlm.lvb', + 'oran_fh_cus.reserved', + 'qnet6.kif.msgsend.msg.read.xtypes0-7', + 'qnet6.kif.msgsend.msg.write.xtypes0-7', + 'mih.sig_strength', + 'couchbase.flex_frame.frame.len', + 'nvme-rdma.read_to_host_req', + 'rpcap.dummy', + 'sflow.flow_sample.output_interface', + 'socks.results', + 'opa.mad.attributemodifier.p', + 'v5ua.efa', + 'zbncp.data.tx_power', + 'zbncp.data.nwk_addr', + 'zbee_zcl_hvac.pump_config_control.attr.ctrl_mode', + 'nat-pmp.external_port', + 'zbee_zcl.attr.float', + 'wpan-tap.phr.fsk_ms.mode', + 'mysql.exec_flags', + 'pim.metric_pref', + 'modbus.regval_float', + 'alcap.cau.value', + 'bpv7.crc_field', + 'at.chld.mode', + 'btl2cap.psm', + 'srvloc.srvtypereq.nameauthlistlen', + 'a11.ext.code', + 'adwin_config.port', + 'afp.unknown', + 'ansi_a_bsmap.mid.digit_1', + 'ber.unknown.OCTETSTRING', + 'btatt.handle', + 'btl2cap.option_flushto', + 'cip.network_segment.prod_inhibit', + 'cql.result.rows.table_name', + 'dcom.sa.vartype', + 'f5ethtrailer.slot', + 'ipdr.cm_ipv6_addr', + 'mojito.kuid', + 'mtp3.priority', + 'pw.cw.length', + 'rlc.ciphered_data', + 'vp8.pld.pictureid', + 'gryphon.sched.channel', + 'pn_io.ioxs', + 'pn_dcp.block_qualifier_reset', + 'pn_dcp.suboption_device_instance', + 'nfs.attr', + 'nfs.create_session_flags', + 'rmt-lct.toi64', + 'gryphon.data.header_length', + 'quake2.game.client.command.move.movement', + 'isup.parameter_type', + 'cip.port', + 'adwin.fifo_no', + 'bthci_evt.hci_vers_nr', + 'gryphon.usdt.stmin_active', + 'dnp3.al.anaout.int', + 'dnp3.al.ana.int', + 'dnp3.al.cnt', + 'bthfp.chld.mode', + 'nat-pmp.pml', + 'isystemactivator.actproperties.ts.hdr', + 'rtpdump.txt_addr', + 'unistim.vocoder.id', + 'mac.ueid', + 'cip.symbol.size', + 'dnp3.al.range.start', + 'dnp3.al.range.stop', + 'gtpv2.mp', + 'gvcp.cmd.resend.firstpacketid', + 'gvcp.cmd.resend.lastpacketid', + 'wlan.bf.reserved', + 'opa.sa.reserved', + 'rmt-lct.ext_tol_transfer_len', + 'pn_io.error_code2', + 'gryphon.ldf.schedsize', + 'wimaxmacphy.burst_opt_mimo_matrix_indicator', + 'ccsds.packet_type', + 'iso15765.flow_control.stmin', + 'msdo.PieceSize', + 'opa.clasportinfo.redirect.reserved', + 'p_mul.unused', + 'opa.pm.dataportcounters.reserved', + 'opa.switchinfo.switchcapabilitymask.reserved', + 'nvme-rdma.read_from_host_resp', + 'nvme-rdma.write_to_host_req', + 'netlink-route.ifla_linkstats.rx_errors.fifo_errs', + 'mtp3mg.japan_spare', + 'ixveriwave.errors.ip_checksum_error', + 'bpsec.asb.result_count', + 'btle.control.phys.le_coded_phy', + 'gsm_rlcmac.ul.gprs_multislot_class_exist', + 'tpm.resp.size', + 'sasp.flags.quiesce', + 'canopen.sdo.n', + 'cigi.celestial_sphere_control.date', + 'corosync_totemsrp.orf_token.seq', + 'dec_dna.flags.msglen', + 'hiqnet.device', + 'ipdr.cm_ipv6_addr_len', + 'ipdr.cm_ipv6_addr_string', + 'mpeg_descr.phone.nat_code_len' + } + if filter in ignore_filters: + return True + + ignore_patterns = [ - re.compile(r'^elf.sh_type'), - re.compile(r'^elf.p_type'), - re.compile(r'^btavrcp.pdu_id'), re.compile(r'^nstrace.trcdbg.val(\d+)'), - re.compile(r'^netlogon.dummy_string'), - re.compile(r'^opa.reserved'), re.compile(r'^mpls_pm.timestamp\d\..*'), - re.compile(r'^wassp.data.mu_mac'), - re.compile(r'^thrift.type'), - re.compile(r'^quake2.game.client.command.move.angles'), - re.compile(r'^ipp.enum_value'), - re.compile(r'^idrp.error.subcode'), - re.compile(r'^ftdi-ft.lValue'), - re.compile(r'^6lowpan.src'), - re.compile(r'^couchbase.flex_frame.frame.id'), - re.compile(r'^rtps.param.id'), - re.compile(r'^rtps.locator.port'), - re.compile(r'^sigcomp.udvm.value'), - re.compile(r'^opa.mad.attributemodifier.n'), - re.compile(r'^smb.cmd'), - re.compile(r'^sctp.checksum'), - re.compile(r'^dhcp.option.end'), - re.compile(r'^nfapi.num.bf.vector.bf.value'), - re.compile(r'^dnp3.al.range.abs'), - re.compile(r'^dnp3.al.range.quantity'), - re.compile(r'^dnp3.al.index'), - re.compile(r'^dnp3.al.size'), - re.compile(r'^ftdi-ft.hValue'), - re.compile(r'^homeplug_av.op_attr_cnf.data.sw_sub'), - re.compile(r'^radiotap.he_mu.preamble_puncturing'), - re.compile(r'^ndmp.file'), - re.compile(r'^ocfs2.dlm.lvb'), - re.compile(r'^oran_fh_cus.reserved'), - re.compile(r'^qnet6.kif.msgsend.msg.read.xtypes0-7'), - re.compile(r'^qnet6.kif.msgsend.msg.write.xtypes0-7'), - re.compile(r'^mih.sig_strength'), - re.compile(r'^couchbase.flex_frame.frame.len'), - re.compile(r'^nvme-rdma.read_to_host_req'), - re.compile(r'^rpcap.dummy'), - re.compile(r'^sflow.flow_sample.output_interface'), - re.compile(r'^socks.results'), - re.compile(r'^opa.mad.attributemodifier.p'), - re.compile(r'^v5ua.efa'), - re.compile(r'^zbncp.data.tx_power'), - re.compile(r'^zbncp.data.nwk_addr'), - re.compile(r'^zbee_zcl_hvac.pump_config_control.attr.ctrl_mode'), - re.compile(r'^nat-pmp.external_port'), - re.compile(r'^zbee_zcl.attr.float'), - re.compile(r'^wpan-tap.phr.fsk_ms.mode'), - re.compile(r'^mysql.exec_flags'), - re.compile(r'^pim.metric_pref'), - re.compile(r'^modbus.regval_float'), - re.compile(r'^alcap.cau.value'), - re.compile(r'^bpv7.crc_field'), - re.compile(r'^at.chld.mode'), - re.compile(r'^btl2cap.psm'), - re.compile(r'^srvloc.srvtypereq.nameauthlistlen'), - re.compile(r'^a11.ext.code'), - re.compile(r'^adwin_config.port'), - re.compile(r'^afp.unknown'), - re.compile(r'^ansi_a_bsmap.mid.digit_1'), - re.compile(r'^ber.unknown.OCTETSTRING'), - re.compile(r'^btatt.handle'), - re.compile(r'^btl2cap.option_flushto'), - re.compile(r'^cip.network_segment.prod_inhibit'), - re.compile(r'^cql.result.rows.table_name'), - re.compile(r'^dcom.sa.vartype'), - re.compile(r'^f5ethtrailer.slot'), - re.compile(r'^ipdr.cm_ipv6_addr'), - re.compile(r'^mojito.kuid'), - re.compile(r'^mtp3.priority'), - re.compile(r'^pw.cw.length'), - re.compile(r'^rlc.ciphered_data'), - re.compile(r'^vp8.pld.pictureid'), - re.compile(r'^gryphon.sched.channel'), - re.compile(r'^pn_io.ioxs'), - re.compile(r'^pn_dcp.block_qualifier_reset'), - re.compile(r'^pn_dcp.suboption_device_instance'), - re.compile(r'^nfs.attr'), - re.compile(r'^nfs.create_session_flags'), - re.compile(r'^rmt-lct.toi64'), - re.compile(r'^gryphon.data.header_length'), - re.compile(r'^quake2.game.client.command.move.movement'), - re.compile(r'^isup.parameter_type'), - re.compile(r'^cip.port'), - re.compile(r'^adwin.fifo_no'), - re.compile(r'^bthci_evt.hci_vers_nr'), - re.compile(r'^gryphon.usdt.stmin_active'), - re.compile(r'^dnp3.al.anaout.int'), - re.compile(r'^dnp3.al.ana.int'), - re.compile(r'^dnp3.al.cnt'), - re.compile(r'^bthfp.chld.mode'), - re.compile(r'^nat-pmp.pml'), - re.compile(r'^isystemactivator.actproperties.ts.hdr'), - re.compile(r'^rtpdump.txt_addr'), - re.compile(r'^unistim.vocoder.id'), - re.compile(r'^mac.ueid'), - re.compile(r'cip.symbol.size'), - re.compile(r'dnp3.al.range.start'), - re.compile(r'dnp3.al.range.stop'), - re.compile(r'gtpv2.mp'), - re.compile(r'gvcp.cmd.resend.firstpacketid'), - re.compile(r'gvcp.cmd.resend.lastpacketid'), - re.compile(r'wlan.bf.reserved'), - re.compile(r'opa.sa.reserved'), - re.compile(r'rmt-lct.ext_tol_transfer_len'), - re.compile(r'pn_io.error_code2'), - re.compile(r'gryphon.ldf.schedsize'), - re.compile(r'wimaxmacphy.burst_opt_mimo_matrix_indicator'), re.compile(r'alcap.*bwt.*.[b|f]w'), - re.compile(r'ccsds.packet_type'), - re.compile(r'iso15765.flow_control.stmin'), - re.compile(r'msdo.PieceSize'), - re.compile(r'opa.clasportinfo.redirect.reserved'), - re.compile(r'p_mul.unused'), re.compile(r'btle.control.phys.le_[1|2]m_phy'), - re.compile(r'opa.pm.dataportcounters.reserved'), - re.compile(r'opa.switchinfo.switchcapabilitymask.reserved'), - re.compile(r'nvme-rdma.read_from_host_resp'), - re.compile(r'nvme-rdma.write_to_host_req'), - re.compile(r'netlink-route.ifla_linkstats.rx_errors.fifo_errs'), - re.compile(r'mtp3mg.japan_spare'), - re.compile(r'ixveriwave.errors.ip_checksum_error'), - re.compile(r'ansi_a_bsmap.cm2.scm.bc_entry.opmode[0|1]') + re.compile(r'ansi_a_bsmap.cm2.scm.bc_entry.opmode[0|1]'), + re.compile(r'cemi.[n|x]') ] - for patt in ignore_patterns: if patt.match(filter): return True + return False @@ -549,7 +578,7 @@ class ValueString: value,label = m.group(1), m.group(2) if value in macros: value = macros[value] - elif any(not c in '0123456789abcdefABCDEFxX' for c in value): + elif any(c not in '0123456789abcdefABCDEFxX' for c in value): self.valid = False return @@ -563,12 +592,16 @@ class ValueString: value = int(value, 8) else: value = int(value, 10) - except: + except Exception: return global warnings_found # Check for value conflict before inserting + if do_extra_checks and value in self.parsed_vals and label == self.parsed_vals[value]: + print('Warning:', self.file, ': value_string', self.name, '- value ', value, 'repeated with same string - ', label) + warnings_found += 1 + if value in self.parsed_vals and label != self.parsed_vals[value]: print('Warning:', self.file, ': value_string', self.name, '- value ', value, 'repeated with different values - was', self.parsed_vals[value], 'now', label) @@ -583,14 +616,16 @@ class ValueString: 'other', 'for further study', 'future', 'vendor specific', 'obsolete', 'none', 'shall not be used', 'national use', 'unassigned', 'oem', 'user defined', 'manufacturer specific', 'not specified', 'proprietary', 'operator-defined', - 'dynamically allocated', 'user specified', 'xxx', 'default', 'planned', 'not req' ] + 'dynamically allocated', 'user specified', 'xxx', 'default', 'planned', 'not req', + 'deprecated', 'not measured', 'unspecified', 'nationally defined', 'nondisplay', 'general', + 'tbd' ] excepted = False for ex in exceptions: if label.lower().find(ex) != -1: excepted = True break - if not excepted: + if not excepted and len(label)>2: print('Warning:', self.file, ': value_string', self.name, '- label ', label, 'repeated') warnings_found += 1 else: @@ -609,7 +644,7 @@ class ValueString: span = self.max_value - self.min_value + 1 if num_items > 4 and span > num_items and (span-num_items <=1): for val in range(self.min_value, self.max_value): - if not val in self.parsed_vals: + if val not in self.parsed_vals: print('Warning:', self.file, ': value_string', self.name, '- value', val, 'missing?', '(', num_items, 'entries)') global warnings_found warnings_found += 1 @@ -627,7 +662,7 @@ class ValueString: # Be forgiving about first or last entry first_val = list(self.parsed_vals)[0] last_val = list(self.parsed_vals)[-1] - if not first_val in matching_label_entries or not last_val in matching_label_entries: + if first_val not in matching_label_entries or last_val not in matching_label_entries: return print('Warning:', self.file, ': value_string', self.name, 'Labels match value except for 1!', matching_label_entries, num_items, self) @@ -680,21 +715,20 @@ class RangeString: self.max_value = -99999 # Now parse out each entry in the value_string - matches = re.finditer(r'\{\s*([0-9_A-Za-z]*)\s*,\s*([0-9_A-Za-z]*)\s*,\s*(".*?")\s*}\s*,', self.raw_vals) + matches = re.finditer(r'\{\s*([0-9_A-Za-z]*)\s*,\s*([0-9_A-Za-z]*)\s*,\s*(".*?")\s*\}\s*,', self.raw_vals) for m in matches: min,max,label = m.group(1), m.group(2), m.group(3) if min in macros: min = macros[min] - elif any(not c in '0123456789abcdefABCDEFxX' for c in min): + elif any(c not in '0123456789abcdefABCDEFxX' for c in min): self.valid = False return if max in macros: max = macros[max] - elif any(not c in '0123456789abcdefABCDEFxX' for c in max): + elif any(c not in '0123456789abcdefABCDEFxX' for c in max): self.valid = False return - try: # Read according to the appropriate base. if min.lower().startswith('0x'): @@ -714,7 +748,7 @@ class RangeString: max = int(max, 8) else: max = int(max, 10) - except: + except Exception: return # Now check what we've found. @@ -724,7 +758,7 @@ class RangeString: self.min_value = min # For overall max value, still use min of each entry. # It is common for entries to extend to e.g. 0xff, but at least we can check for items - # that can never match if we only chec the min. + # that can never match if we only check the min. if min > self.max_value: self.max_value = min @@ -734,7 +768,7 @@ class RangeString: print('Warning:', self.file, ': range_string label', label, 'hidden by', prev) warnings_found += 1 - # Max should not be > min + # Min should not be > max if min > max: print('Warning:', self.file, ': range_string', self.name, 'entry', label, 'min', min, '>', max) warnings_found += 1 @@ -747,12 +781,59 @@ class RangeString: # OK, add this entry self.parsed_vals.append(RangeStringEntry(min, max, label)) + # TODO: mark as not valid if not all pairs were successfully parsed? + def extraChecks(self): - pass - # TODO: some checks over all entries. e.g., - # - can multiple values be coalesced into 1? - # - if in all cases min==max, suggest value_string instead? + global warnings_found + + # if in all cases min==max, suggest value_string instead? + could_use_value_string = True + for val in self.parsed_vals: + if val.min != val.max: + could_use_value_string = False + break + if could_use_value_string: + print('Warning:', self.file, ': range_string', self.name, 'could be value_string instead!') + warnings_found += 1 + + # TODO: can multiple values be coalesced into fewer? + # TODO: Partial overlapping? + + +class StringString: + def __init__(self, file, name, vals, macros, do_extra_checks=False): + self.file = file + self.name = name + self.raw_vals = vals + self.parsed_vals = {} + + terminated = False + global errors_found + + # Now parse out each entry in the string_string + matches = re.finditer(r'\{\s*(["0-9_A-Za-z\s\-]*?)\s*,\s*(["0-9_A-Za-z\s\-]*)\s*', self.raw_vals) + for m in matches: + key = m.group(1).strip() + value = m.group(2).strip() + if key in self.parsed_vals: + print('Error:', self.file, ': string_string', self.name, 'entry', key, 'has been added twice (values', + self.parsed_vals[key], 'and now', value, ')') + errors_found += 1 + + else: + self.parsed_vals[key] = value + # TODO: Also allow key to be "0" ? + if (key in { "NULL" }) and value == "NULL": + terminated = True + + if not terminated: + print('Error:', self.file, ': string_string', self.name, "is not terminated with { NULL, NULL }") + errors_found += 1 + + def extraChecks(self): + pass + # TODO: ? @@ -781,7 +862,7 @@ def findValueStrings(filename, macros, do_extra_checks=False): return vals_found -# Look for value_string entries in a dissector file. Return a dict name -> ValueString +# Look for range_string entries in a dissector file. Return a dict name -> RangeString def findRangeStrings(filename, macros, do_extra_checks=False): vals_found = {} @@ -805,6 +886,29 @@ def findRangeStrings(filename, macros, do_extra_checks=False): return vals_found +# Look for string_string entries in a dissector file. Return a dict name -> StringString +def findStringStrings(filename, macros, do_extra_checks=False): + vals_found = {} + + #static const string_string ice_candidate_types[] = { + # { "host", "Host candidate" }, + # { "srflx", "Server reflexive candidate" }, + # { 0, NULL } + #}; + + with open(filename, 'r', encoding="utf8") as f: + contents = f.read() + + # Remove comments so as not to trip up RE. + contents = removeComments(contents) + + matches = re.finditer(r'.*const string_string\s*([a-zA-Z0-9_]*)\s*\[\s*\]\s*\=\s*\{([\{\}\d\,a-zA-Z0-9_\-\*\#\.:\/\(\)\'\s\"]*)\};', contents) + for m in matches: + name = m.group(1) + vals = m.group(2) + vals_found[name] = StringString(filename, name, vals, macros, do_extra_checks) + + return vals_found # The relevant parts of an hf item. Used as value in dict where hf variable name is key. @@ -821,11 +925,17 @@ class Item: self.hf = hf self.filter = filter self.label = label + self.blurb = blurb self.mask = mask self.strings = strings self.mask_exact_width = mask_exact_width - global warnings_found + global warnings_found, errors_found + + if blurb == '0': + print('Error:', filename, hf, ': - filter "' + filter + + '" has blurb of 0 - if no string, please set NULL instead') + errors_found += 1 self.set_mask_value(macros) @@ -846,16 +956,19 @@ class Item: Item.previousItems.pop() self.item_type = item_type + self.display = display + self.set_display_value(macros) # Optionally check label (short and long). if check_label: self.check_label(label, 'label') #self.check_label(blurb, 'blurb') + self.check_blurb_vs_label() # Optionally check that mask bits are contiguous if check_mask: - if self.mask_read and not mask in { 'NULL', '0x0', '0', '0x00' }: + if self.mask_read and mask not in { 'NULL', '0x0', '0', '0x00' }: self.check_contiguous_bits(mask) self.check_num_digits(self.mask) # N.B., if last entry in set is removed, see around 18,000 warnings @@ -889,9 +1002,16 @@ class Item: rs = range_strings[self.rs_name] self.check_range_string_range(rs.min_value, rs.max_value) + # Could/should this item be FT_FRAMENUM ? + #if ((self.label.lower().find(' frame') != -1 or self.label.lower().find('frame ') != -1) and self.label.lower().find('frames') == -1 and + # (self.label.lower().find('in') != -1 or self.label.lower().find('for') != -1) and + # self.item_type == 'FT_UINT32' and self.mask_value == 0x0): + # print('Warning: ' + self.filename, self.hf, 'filter "' + self.filter + '", label "' + label + '"', 'item type is', self.item_type, '- could be FT_FRANENUM?') + # warnings_found += 1 + def __str__(self): - return 'Item ({0} "{1}" {2} type={3}:{4} {5} mask={6})'.format(self.filename, self.label, self.filter, self.item_type, self.display, self.strings, self.mask) + return 'Item ({0} {1} "{2}" {3} type={4}:{5} {6} mask={7})'.format(self.filename, self.hf, self.label, self.filter, self.item_type, self.display, self.strings, self.mask) def check_label(self, label, label_name): global warnings_found @@ -915,20 +1035,50 @@ class Item: print('Warning: ' + self.filename, self.hf, 'filter "' + self.filter + '"', label_name, '"' + label + '"', 'ends with an unnecessary colon') warnings_found += 1 + def check_blurb_vs_label(self): + global warnings_found + if self.blurb == "NULL": + return + + # Is the label longer than the blurb? + # Generated dissectors tend to write the type into the blurb field... + #if len(self.label) > len(self.blurb): + # print('Warning:', self.filename, self.hf, 'label="' + self.label + '" blurb="' + self.blurb + '"', "- label longer than blurb!!!") + + # Is the blurb just the label in a different order? + label_words = self.label.lower().split(' ') + label_words.sort() + blurb_words = self.blurb.lower().split(' ') + blurb_words.sort() + + # Subset - often happens when part specific to that field is dropped + if set(label_words) > set(blurb_words): + print('Warning:', self.filename, self.hf, 'label="' + self.label + '" blurb="' + self.blurb + '"', "- words in blurb are subset of label words") + warnings_found += 1 + + # Just a re-ordering (but may also contain capitalization changes.) + if blurb_words == label_words: + print('Warning:', self.filename, self.hf, 'label="' + self.label + '" blurb="' + self.blurb + '"', "- blurb words are label words (re-ordered?)") + warnings_found += 1 + + # TODO: could have item know protocol name(s) from file this item was found in, and complain if blurb is just prot-name + label ? + def set_mask_value(self, macros): try: self.mask_read = True + # PIDL generator adds annoying parenthesis and spaces around mask.. + self.mask = self.mask.strip('() ') # Substitute mask if found as a macro.. if self.mask in macros: self.mask = macros[self.mask] - elif any(not c in '0123456789abcdefABCDEFxX' for c in self.mask): + elif any(c not in '0123456789abcdefABCDEFxX' for c in self.mask): self.mask_read = False self.mask_value = 0 + #print(self.filename, 'Could not read:', '"' + self.mask + '"') return - # Read according to the appropriate base. if self.mask.startswith('0x'): self.mask_value = int(self.mask, 16) @@ -936,10 +1086,39 @@ class Item: self.mask_value = int(self.mask, 8) else: self.mask_value = int(self.mask, 10) - except: + except Exception: self.mask_read = False self.mask_value = 0 + #if not self.mask_read: + # print('Could not read:', self.mask) + + + def set_display_value(self, macros): + try: + self.display_read = True + display = self.display + + # Substitute display if found as a macro.. + if display in macros: + display = macros[display] + elif any(c not in '0123456789abcdefABCDEFxX' for c in display): + self.display_read = False + self.display_value = 0 + return + + # Read according to the appropriate base. + if self.display.startswith('0x'): + self.display_value = int(display, 16) + elif self.display.startswith('0'): + self.display_value = int(display, 8) + else: + self.display_value = int(display, 10) + except Exception: + self.display_read = False + self.display_value = 0 + + def check_value_string_range(self, vs_min, vs_max): item_width = self.get_field_width_in_bits() @@ -993,7 +1172,7 @@ class Item: def check_bit(self, value, n): return (value & (0x1 << n)) != 0 - # Output a warning if non-contigous bits are found in the mask (guint64). + # Output a warning if non-contiguous bits are found in the mask (uint64_t). # Note that this legimately happens in several dissectors where multiple reserved/unassigned # bits are conflated into one field. # - there is probably a cool/efficient way to check this (+1 => 1-bit set?) @@ -1026,7 +1205,7 @@ class Item: # Look up the field width field_width = 0 - if not self.item_type in field_widths: + if self.item_type not in field_widths: print('unexpected item_type is ', self.item_type) field_width = 64 else: @@ -1058,15 +1237,13 @@ class Item: if self.item_type == 'FT_BOOLEAN': if self.display == 'NULL': return 8 # i.e. 1 byte - elif self.display == 'BASE_NONE': - return 8 elif self.display == 'SEP_DOT': # from proto.h, only meant for FT_BYTES return 64 else: try: # For FT_BOOLEAN, modifier is just numerical number of bits. Round up to next nibble. return int((int(self.display) + 3)/4)*4 - except: + except Exception: return None else: if self.item_type in field_widths: @@ -1156,13 +1333,11 @@ class Item: def check_mask_if_in_field_array(self, mask, field_arrays): # Work out if this item appears in a field array found = False - array_name = None for arr in field_arrays: list = field_arrays[arr][0] if self.hf in list: # These need to have a mask - don't judge for being 0 found = True - array_name = arr break if found: @@ -1226,6 +1401,30 @@ class Item: return True + def check_boolean_length(self): + global errors_found + # If mask is 0, display must be BASE_NONE. + if self.item_type == 'FT_BOOLEAN' and self.mask_read and self.mask_value == 0 and self.display.find('BASE_NONE') == -1: + print('Error:', self.filename, self.hf, 'type is FT_BOOLEAN, no mask set (', self.mask, ') - display should be BASE_NONE, is instead', self.display) + errors_found += 1 + # TODO: check for length > 64? + + def check_string_display(self): + global warnings_found + if self.item_type in { 'FT_STRING', 'FT_STRINGZ', 'FT_UINT_STRING'}: + if self.display.find('BASE_NONE')==-1: + print('Warning:', self.filename, self.hf, 'type is', self.item_type, 'display must be BASE_NONE, is instead', self.display) + warnings_found += 1 + + + + + def check_ipv4_display(self): + global errors_found + if self.item_type == 'FT_IPv4' and self.display not in { 'BASE_NETMASK', 'BASE_NONE' }: + print('Error:', self.filename, self.hf, 'type is FT_IPv4, should be BASE_NETMASK or BASE_NONE, is instead', self.display) + errors_found += 1 + class CombinedCallsCheck: def __init__(self, file, apiChecks): @@ -1256,8 +1455,8 @@ class CombinedCallsCheck: # More compelling if close together.. if call.line_number>prev.line_number and call.line_number-prev.line_number <= 4: scope_different = False - for l in range(prev.line_number, call.line_number-1): - if lines[l].find('{') != -1 or lines[l].find('}') != -1 or lines[l].find('else') != -1 or lines[l].find('break;') != -1 or lines[l].find('if ') != -1: + for no in range(prev.line_number, call.line_number-1): + if lines[no].find('{') != -1 or lines[no].find('}') != -1 or lines[no].find('else') != -1 or lines[no].find('break;') != -1 or lines[no].find('if ') != -1: scope_different = True break # Also more compelling if check for and scope changes { } in lines in-between? @@ -1315,7 +1514,6 @@ apiChecks.append(APICheck('proto_tree_add_item_ret_varint', { 'FT_INT8', 'FT_INT 'FT_CHAR', 'FT_UINT8', 'FT_UINT16', 'FT_UINT24', 'FT_UINT32', 'FT_FRAMENUM', 'FT_UINT40', 'FT_UINT48', 'FT_UINT56', 'FT_UINT64',})) apiChecks.append(APICheck('proto_tree_add_boolean_bits_format_value', { 'FT_BOOLEAN'})) -apiChecks.append(APICheck('proto_tree_add_boolean_bits_format_value64', { 'FT_BOOLEAN'})) apiChecks.append(APICheck('proto_tree_add_ascii_7bits_item', { 'FT_STRING'})) # TODO: positions are different, and takes 2 hf_fields.. #apiChecks.append(APICheck('proto_tree_add_checksum', { 'FT_UINT8', 'FT_UINT16', 'FT_UINT24', 'FT_UINT32'})) @@ -1401,17 +1599,27 @@ def isGeneratedFile(filename): return False +# TODO: could also look for macros in related/included header file(s)? def find_macros(filename): - macros = {} + # Pre-populate with some useful values.. + macros = { 'BASE_NONE' : 0, 'BASE_DEC' : 1 } + with open(filename, 'r', encoding="utf8") as f: contents = f.read() # Remove comments so as not to trip up RE. contents = removeComments(contents) - matches = re.finditer( r'#define\s*([A-Z0-9_]*)\s*([0-9xa-fA-F]*)\n', contents) + matches = re.finditer( r'#define\s*([A-Za-z0-9_]*)\s*([0-9xa-fA-F]*)\s*\n', contents) + for m in matches: + # Store this mapping. + macros[m.group(1)] = m.group(2) + + # Also look for what could be enumeration assignments + matches = re.finditer( r'\s*([A-Za-z0-9_]*)\s*=\s*([0-9xa-fA-F]*)\s*,?\n', contents) for m in matches: # Store this mapping. macros[m.group(1)] = m.group(2) + return macros @@ -1468,7 +1676,7 @@ def find_field_arrays(filename, all_fields, all_hf): for m in matches: name = m.group(1) # Ignore if not used in a call to an _add_bitmask_ API - if not name in all_fields: + if name not in all_fields: continue fields_text = m.group(2) @@ -1591,6 +1799,11 @@ def checkFile(filename, check_mask=False, mask_exact_width=False, check_label=Fa for name in range_strings: range_strings[name].extraChecks() + # Find (and sanity-check) string_strings + string_strings = findStringStrings(filename, macros, do_extra_checks=extra_value_string_checks) + if extra_value_string_checks: + for name in string_strings: + string_strings[name].extraChecks() # Find important parts of items. @@ -1638,10 +1851,15 @@ def checkFile(filename, check_mask=False, mask_exact_width=False, check_label=Fa # Only checking if almost every field does match. checking = len(items_defined) and matches 0.93) if checking: - print(filename, ':', matches, 'label-vs-filter matches of out of', len(items_defined), 'so reporting mismatches') + print(filename, ':', matches, 'label-vs-filter matches out of', len(items_defined), 'so reporting mismatches') for hf in items_defined: items_defined[hf].check_label_vs_filter(reportError=True, reportNumericalMismatch=False) + for hf in items_defined: + items_defined[hf].check_boolean_length() + items_defined[hf].check_string_display() + items_defined[hf].check_ipv4_display() + ################################################################# @@ -1686,7 +1904,7 @@ if args.all_checks: args.mask_exact_width = True args.consecutive = True args.check_bitmask_fields = True - #args.label = True + args.label = True args.label_vs_filter = True args.extra_value_string_checks @@ -1734,7 +1952,7 @@ elif args.open: # Only interested in dissector files. files_staged = list(filter(lambda f : is_dissector_file(f), files_staged)) for f in files_staged: - if not f in files: + if f not in files: files.append(f) else: # Find all dissector files. diff --git a/tools/check_val_to_str.py b/tools/check_val_to_str.py index 417655c3..4ce2ca8c 100755 --- a/tools/check_val_to_str.py +++ b/tools/check_val_to_str.py @@ -73,7 +73,7 @@ def removeComments(code_string): def is_dissector_file(filename): - p = re.compile(r'.*packet-.*\.c') + p = re.compile(r'.*(packet|file)-.*\.c') return p.match(filename) def findDissectorFilesInFolder(folder, recursive=False): @@ -101,7 +101,7 @@ warnings_found = 0 errors_found = 0 # Check the given dissector file. -def checkFile(filename): +def checkFile(filename, generated): global warnings_found global errors_found @@ -130,18 +130,28 @@ def checkFile(filename): # TODO: I suppose it could be escaped, but haven't seen this... if format_string.find('%') != -1: # This is an error as format specifier would show in app - print('Error:', filename, " ", m.group(0), ' - should not have specifiers in unknown string') + print('Error:', filename, " ", m.group(0), + ' - should not have specifiers in unknown string', + '(GENERATED)' if generated else '') errors_found += 1 else: # These ones need to have a specifier, and it should be suitable for an int - specifier_id = format_string.find('%') - if specifier_id == -1: - print('Warning:', filename, " ", m.group(0), ' - should have suitable format specifier in unknown string (or use _const()?)') + count = format_string.count('%') + if count == 0: + print('Warning:', filename, " ", m.group(0), + ' - should have suitable format specifier in unknown string (or use _const()?)', + '(GENERATED)' if generated else '') warnings_found += 1 + elif count > 1: + print('Warning:', filename, " ", m.group(0), + ' - has more than one specifier?', + '(GENERATED)' if generated else '') # TODO: check allowed specifiers (d, u, x, ?) and modifiers (0-9*) in re ? if format_string.find('%s') != -1: # This is an error as this likely causes a crash - print('Error:', filename, " ", m.group(0), ' - inappropriate format specifier in unknown string') + print('Error:', filename, " ", m.group(0), + ' - inappropriate format specifier in unknown string', + '(GENERATED)' if generated else '') errors_found += 1 @@ -158,6 +168,8 @@ parser.add_argument('--commits', action='store', help='last N commits to check') parser.add_argument('--open', action='store_true', help='check open files') +parser.add_argument('--generated', action='store_true', + help='check generated files') args = parser.parse_args() @@ -167,7 +179,7 @@ files = [] if args.file: # Add specified file(s) for f in args.file: - if not f.startswith('epan'): + if not os.path.isfile(f) and not f.startswith('epan'): f = os.path.join('epan', 'dissectors', f) if not os.path.isfile(f): print('Chosen file', f, 'does not exist.') @@ -195,7 +207,7 @@ elif args.open: # Only interested in dissector files. files_staged = list(filter(lambda f : is_dissector_file(f), files_staged)) for f in files_staged: - if not f in files: + if f not in files: files.append(f) else: # Find all dissector files from folder. @@ -219,8 +231,9 @@ else: for f in files: if should_exit: exit(1) - if not isGeneratedFile(f): - checkFile(f) + generated = isGeneratedFile(f) + if args.generated or not generated: + checkFile(f, generated) # Show summary. diff --git a/tools/checkfiltername.pl b/tools/checkfiltername.pl index ea286b26..d7b1c0f6 100755 --- a/tools/checkfiltername.pl +++ b/tools/checkfiltername.pl @@ -357,6 +357,7 @@ sub is_proto_dup_allowed { if (($_[0] eq "tn3270") && (index($_[1], "tn3270e") >= 0)) {return 1;} if (($_[0] eq "usb") && (index($_[1], "usb") >= 0)) {return 1;} if (($_[0] eq "xml") && (index($_[1], "xml") >= 0)) {return 1;} + if (($_[0] eq "dns") && (index($_[1], "dnscrypt") >= 0)) {return 1;} return 0; } diff --git a/tools/checkhf.pl b/tools/checkhf.pl index 7e01c7e5..df075e62 100755 --- a/tools/checkhf.pl +++ b/tools/checkhf.pl @@ -291,7 +291,7 @@ sub remove_quoted_strings { sub remove_if0_code { my ($codeRef, $fileName) = @_; - # Preprocess outputput (ensure trailing LF and no leading WS before '#') + # Preprocess output (ensure trailing LF and no leading WS before '#') $$codeRef =~ s/^\s*#/#/m; if ($$codeRef !~ /\n$/) { $$codeRef .= "\n"; } diff --git a/tools/checklicenses.py b/tools/checklicenses.py index 192fecbe..b0a0ef02 100755 --- a/tools/checklicenses.py +++ b/tools/checklicenses.py @@ -37,7 +37,6 @@ ALLOWED_LICENSES = [ 'BSD (2 clause) GPL (v2 or later)', 'BSD (3 clause)', 'GPL (v2 or later)', - 'GPL (v3 or later) (with Bison parser exception)', 'ISC', 'ISC GPL (v2 or later)', 'LGPL (v2 or later)', @@ -73,15 +72,12 @@ PATH_SPECIFIC_ALLOWED_LICENSES = { 'doc/': [ 'UNKNOWN', ], - 'docbook/custom_layer_chm.xsl': [ + 'doc/custom_layer_chm.xsl': [ 'UNKNOWN', ], - 'docbook/custom_layer_single_html.xsl': [ + 'doc/custom_layer_single_html.xsl': [ 'UNKNOWN', ], - 'docbook/ws.css' : [ - 'UNKNOWN' - ], 'fix': [ 'UNKNOWN', ], @@ -122,7 +118,7 @@ PATH_SPECIFIC_ALLOWED_LICENSES = { ], # Special IDL license that appears to be compatible as far as I (not a # lawyer) can tell. See - # https://www.wireshark.org/lists/wireshark-dev/201310/msg00234.html + # https://lists.wireshark.org/archives/wireshark-dev/201310/msg00234.html 'epan/dissectors/pidl/idl_types.h': [ 'UNKNOWN', ], @@ -172,7 +168,7 @@ def check_licenses(options, args): 'licensecheck.pl')) licensecheck = subprocess.Popen([licensecheck_path, - '-l', '150', + '-l', '160', '-r', start_dir], stdout=subprocess.PIPE, stderr=subprocess.PIPE) diff --git a/tools/convert-glib-types.py b/tools/convert-glib-types.py index aa714d7d..83c8978a 100755 --- a/tools/convert-glib-types.py +++ b/tools/convert-glib-types.py @@ -5,7 +5,7 @@ # # SPDX-License-Identifier: GPL-2.0-or-later '''\ -convert-glib-types.py - Convert glib types to their C and C99 eqivalents. +convert-glib-types.py - Convert glib types to their C and C99 equivalents. ''' # Imports @@ -22,9 +22,13 @@ type_map = { 'gboolean': 'bool', 'gchar': 'char', 'guchar': 'unsigned char', + 'gshort': 'int16_t', + 'gushort': 'uint16_t', 'gint': 'int', 'guint': 'unsigned', # Matches README.developer - 'glong': 'long', + # Our remaining glong instances probably shouldn't be converted, e.g. + # sequence_analysis.c:350 + # 'glong': 'long', 'gulong': 'unsigned long', 'gint8': 'int8_t', 'gint16': 'int16_t', @@ -38,6 +42,10 @@ type_map = { 'gdouble': 'double', 'gpointer ': 'void *', # 'void *foo' instead of 'void * foo' 'gpointer': 'void *', + 'gconstpointer ': 'const void *', # 'void *foo' instead of 'void * foo' + 'gconstpointer': 'const void *', + 'gintptr': 'intptr_t', + 'guintptr': 'uintptr_t', # Is gsize the same as size_t on the platforms we support? # https://gitlab.gnome.org/GNOME/glib/-/issues/2493 'gsize': 'size_t', @@ -45,8 +53,6 @@ type_map = { } definition_map = { - 'TRUE': 'true', - 'FALSE': 'false', 'G_MAXINT8': 'INT8_MAX', 'G_MAXINT16': 'INT16_MAX', 'G_MAXINT32': 'INT32_MAX', @@ -62,6 +68,17 @@ definition_map = { 'G_MININT32': 'INT32_MIN', 'G_MININT64': 'INT64_MIN', 'G_MININT': 'INT_MIN', + 'G_MINFLOAT': 'FLT_MIN', + 'G_MAXFLOAT': 'FLT_MAX', + 'G_MINDOUBLE': 'DBL_MIN', + 'G_MAXDOUBLE': 'DBL_MAX', + 'G_GINT64_CONSTANT': 'INT64_C', + 'G_GUINT64_CONSTANT': 'UINT64_C', +} + +tf_definition_map = { + 'TRUE': 'true', + 'FALSE': 'false', } format_spec_map = { @@ -69,6 +86,33 @@ format_spec_map = { 'G_GUINT64_FORMAT': 'PRIu64', } +api_map = { + 'tvb_get_guint8': 'tvb_get_uint8', + 'tvb_get_gint8': 'tvb_get_int8', + 'tvb_get_guint16': 'tvb_get_uint16', + 'tvb_get_gint16': 'tvb_get_int16', + 'tvb_get_guint24': 'tvb_get_uint24', + 'tvb_get_gint24': 'tvb_get_int24', + 'tvb_get_guint32': 'tvb_get_uint32', + 'tvb_get_gint32': 'tvb_get_int32', + 'tvb_get_guint40': 'tvb_get_uint40', + 'tvb_get_gint40': 'tvb_get_int40', + 'tvb_get_guint48': 'tvb_get_uint48', + 'tvb_get_gint48': 'tvb_get_int48', + 'tvb_get_guint56': 'tvb_get_uint56', + 'tvb_get_gint56': 'tvb_get_int56', + 'tvb_get_guint64': 'tvb_get_uint64', + 'tvb_get_gint64': 'tvb_get_int64', + 'tvb_find_guint8': 'tvb_find_uint8', + 'tvb_find_guint16': 'tvb_find_uint16', + 'tvb_ws_mempbrk_pattern_guint8': 'tvb_ws_mempbrk_pattern_uint8', + 'guint32_to_str_buf': 'uint32_to_str_buf', + 'guint64_to_str_buf': 'uint64_to_str_buf', + 'get_nonzero_guint32': 'get_nonzero_uint32', + 'get_guint32': 'get_uint32', + 'guint8_to_hex': 'uint8_to_hex', +} + def convert_file(file): lines = '' try: @@ -80,15 +124,19 @@ def convert_file(file): lines = re.sub(rf'([^"])\b{glib_type}\b([^"])', rf'\1{c99_type}\2', lines, flags=re.MULTILINE) for glib_define, c99_define in definition_map.items(): lines = re.sub(rf'\b{glib_define}\b', rf'{c99_define}', lines, flags=re.MULTILINE) + for glib_tf_define, c99_define in tf_definition_map.items(): + lines = re.sub(rf'\b{glib_tf_define}\b([^\'"])', rf'{c99_define}\1', lines, flags=re.MULTILINE) for glib_fmt_spec, c99_fmt_spec in format_spec_map.items(): lines = re.sub(rf'\b{glib_fmt_spec}\b', rf'{c99_fmt_spec}', lines, flags=re.MULTILINE) + for glib_api, c99_api in api_map.items(): + lines = re.sub(rf'\b{glib_api}\b', rf'{c99_api}', lines, flags=re.MULTILINE) except IsADirectoryError: sys.stderr.write(f'{file} is a directory.\n') return except UnicodeDecodeError: sys.stderr.write(f"{file} isn't valid UTF-8.\n") return - except: + except Exception: sys.stderr.write(f'Unable to open {file}.\n') return @@ -97,11 +145,11 @@ def convert_file(file): print(f'Converted {file}') def main(): - parser = argparse.ArgumentParser(description='Convert glib types to their C and C99 eqivalents.') + parser = argparse.ArgumentParser(description='Convert glib types to their C and C99 equivalents.') parser.add_argument('files', metavar='FILE', nargs='*') args = parser.parse_args() - # Build a padded version of type_map which attempts to preseve alignment + # Build a padded version of type_map which attempts to preserve alignment for glib_type, c99_type in type_map.items(): pg_type = glib_type + ' ' pc_type = c99_type + ' ' diff --git a/tools/convert-proto-init.py b/tools/convert-proto-init.py new file mode 100755 index 00000000..f0ce652f --- /dev/null +++ b/tools/convert-proto-init.py @@ -0,0 +1,73 @@ +#!/usr/bin/env python3 +# +# Wireshark - Network traffic analyzer +# By Gerald Combs +# Copyright 1998 Gerald Combs +# +# SPDX-License-Identifier: GPL-2.0-or-later +'''\ +convert-proto-init.py - Remove explicit init of proto variables. +''' + +# Imports + +import argparse +import glob +import platform +import re +import sys + +def convert_file(file): + lines = '' + try: + with open(file, 'r') as f: + lines = f.read() + # Match the following proto, header field, expert info and subtree variables: + # + # static int proto_a = -1; + # int proto_b=-1; + # + # static int hf_proto_a_value_1 = -1; + # int hf_proto_a_value_2 = - 1; + # int hf_proto_a_value_3=-1; + # /* static int hf_proto_a_unused_1 = -1; */ + # + # static gint ett_proto_a_tree_1=-1; + # gint ett_proto_a_tree_2 = -1; /* A comment. */ + # + # static expert_field ei_proto_a_expert_1 = EI_INIT; + # + lines = re.sub(r'^((?://\s*|/[*]+\s*)?(?:static\s*| )?(?:g?int|expert_field)\s*(?:proto|hf|ett|ei)_[\w_]+)\s*=\s*(?:-\s*1|EI_INIT)\s*', r'\1', lines, flags=re.MULTILINE) + except IsADirectoryError: + sys.stderr.write(f'{file} is a directory.\n') + return + except UnicodeDecodeError: + sys.stderr.write(f"{file} isn't valid UTF-8.\n") + return + except Exception: + sys.stderr.write(f'Unable to open {file}.\n') + return + + with open(file, 'w') as f: + f.write(lines) + print(f'Converted {file}') + +def main(): + parser = argparse.ArgumentParser(description='Initialize static proto values to 0.') + parser.add_argument('files', metavar='FILE', nargs='*') + args = parser.parse_args() + + files = [] + if platform.system() == 'Windows': + for arg in args.files: + files += glob.glob(arg) + else: + files = args.files + + for file in files: + convert_file(file) + +# On with the show + +if __name__ == "__main__": + sys.exit(main()) diff --git a/tools/convert_expert_add_info_format.pl b/tools/convert_expert_add_info_format.pl index 57289364..0b0ddd32 100755 --- a/tools/convert_expert_add_info_format.pl +++ b/tools/convert_expert_add_info_format.pl @@ -59,7 +59,10 @@ my %EXPERT_GROUPS = ('PI_CHECKSUM' => "PI_CHECKSUM", 'PI_COMMENTS_GROUP' => "PI_COMMENTS_GROUP", 'PI_DECRYPTION' => "PI_DECRYPTION", 'PI_ASSUMPTION' => "PI_ASSUMPTION", - 'PI_DEPRECATED' => "PI_DEPRECATED"); + 'PI_DEPRECATED' => "PI_DEPRECATED", + 'PI_RECEIVE' => "PI_RECEIVE", + 'PI_INTERFACE' => "PI_INTERFACE", + 'PI_DISSECTOR_BUG' => "PI_DISSECTOR_BUG"); my @expert_list; my $protabbrev = ""; diff --git a/tools/convert_proto_tree_add_text.pl b/tools/convert_proto_tree_add_text.pl index 35764558..cffc875c 100755 --- a/tools/convert_proto_tree_add_text.pl +++ b/tools/convert_proto_tree_add_text.pl @@ -3,12 +3,12 @@ # Copyright 2013 Michael Mann (see AUTHORS file) # # A program to help convert proto_tree_add_text calls into filterable "items" that -# use proto_tree_add_item. The program requires 2 passes. "Pass 1" (generate) collects +# use proto_tree_add_item. The program requires 2 passes. "Pass 1" (generate) collects # the eligible proto_tree_add_text calls and outputs the necessary data into a delimited # file. "Pass 2" (fix-all) takes the data from the delimited file and replaces the -# proto_tree_add_text calls with proto_tree_add_item or "expert info" calls as well as +# proto_tree_add_text calls with proto_tree_add_item or "expert info" calls as well as # generating separate files for the hf and/or ei variable declarations and hf and/or ei array data. -# The hf "files" can be copy/pasted into the dissector where appropriate (until such time as +# The hf "files" can be copy/pasted into the dissector where appropriate (until such time as # its done automatically) # # Note that the output from "Pass 1" won't always be a perfect conversion for "Pass 2", so @@ -96,8 +96,11 @@ my %EXPERT_GROUPS = ('PI_CHECKSUM' => "PI_CHECKSUM", 'PI_SECURITY' => "PI_SECURITY", 'PI_COMMENTS_GROUP' => "PI_COMMENTS_GROUP", 'PI_DECRYPTION' => "PI_DECRYPTION", - 'PI_ASSUMPTION' => "PI_ASSUMPTION", - 'PI_DEPRECATED' => "PI_DEPRECATED"); + 'PI_ASSUMPTION' => "PI_ASSUMPTION", + 'PI_DEPRECATED' => "PI_DEPRECATED", + 'PI_RECEIVE' => "PI_RECEIVE", + 'PI_INTERFACE' => "PI_INTERFACE", + 'PI_DISSECTOR_BUG' => "PI_DISSECTOR_BUG"); my @proto_tree_list; my @expert_list; @@ -379,7 +382,7 @@ sub generate_hfs { #encoding if (scalar @args > 5) { if (($proto_tree_item[6] eq "1") || - ($args[5] =~ /tvb_get_guint8/) || + ($args[5] =~ /tvb_get_g?uint8/) || ($args[5] =~ /tvb_bytes_to_str/) || ($args[5] =~ /tvb_ether_to_str/)) { $proto_tree_item[7] = "ENC_NA"; @@ -387,7 +390,7 @@ sub generate_hfs { $proto_tree_item[7] = "ENC_BIG_ENDIAN"; } elsif ($args[5] =~ /tvb_get_letoh/) { $proto_tree_item[7] = "ENC_LITTLE_ENDIAN"; - } elsif (($args[5] =~ /tvb_get_ephemeral_string/) || + } elsif (($args[5] =~ /tvb_get_ephemeral_string/) || ($args[5] =~ /tvb_format_text/)){ $proto_tree_item[7] = "ENC_NA|ENC_ASCII"; } elsif ($encoding ne "") { @@ -434,7 +437,7 @@ sub generate_hfs { #field type if (scalar @args > 5) { - if ($args[5] =~ /tvb_get_guint8/) { + if ($args[5] =~ /tvb_get_g?uint8/) { if ($args[4] =~ /%[0-9]*[i]/) { $proto_tree_item[9] = "FT_INT8"; } else { @@ -479,7 +482,7 @@ sub generate_hfs { $proto_tree_item[9] = "FT_GUID"; } elsif ($args[5] =~ /tvb_get_ephemeral_stringz/) { $proto_tree_item[9] = "FT_STRINGZ"; - } elsif (($args[5] =~ /tvb_get_ephemeral_string/) || + } elsif (($args[5] =~ /tvb_get_ephemeral_string/) || ($args[5] =~ /tvb_format_text/)){ $proto_tree_item[9] = "FT_STRING"; } elsif (($args[5] =~ /tvb_bytes_to_str/)) { diff --git a/tools/debian-setup.sh b/tools/debian-setup.sh index 9b688794..8fade5ac 100755 --- a/tools/debian-setup.sh +++ b/tools/debian-setup.sh @@ -26,6 +26,26 @@ function print_usage() { printf "\\t[other]: other options are passed as-is to apt\\n" } +# Adds package $2 to list variable $1 if the package is found. +# If $3 is given, then this version requirement must be satisfied. +function add_package() { + local list="$1" pkgname="$2" versionreq="${3:-}" version + + version=$(apt-cache show "$pkgname" 2>/dev/null | + awk '/^Version:/{ print $2; exit}') + # fail if the package is not known + if [ -z "$version" ]; then + return 1 + elif [ -n "$versionreq" ]; then + # Require minimum version or fail. + # shellcheck disable=SC2086 + dpkg --compare-versions $version $versionreq || return 1 + fi + + # package is found, append it to list + eval "${list}=\"\${${list}} \${pkgname}\"" +} + ADDITIONAL=0 DEBDEPS=0 TESTDEPS=0 @@ -75,36 +95,48 @@ then exit 1 fi -BASIC_LIST="gcc \ - g++\ - libglib2.0-dev \ - libc-ares-dev \ - libpcap-dev \ - libpcre2-dev \ - flex \ - make \ - python3 \ - libgcrypt-dev \ - libspeexdsp-dev" - -QT5_LIST="qttools5-dev \ - qttools5-dev-tools \ - libqt5svg5-dev \ - qtmultimedia5-dev \ - qtbase5-dev \ - qtchooser \ - qt5-qmake \ - qtbase5-dev-tools" - -QT6_LIST="qt6-base-dev \ - qt6-multimedia-dev \ - qt6-tools-dev \ - qt6-tools-dev-tools \ - qt6-l10n-tools \ - libqt6core5compat6-dev \ - freeglut3-dev \ - libvulkan-dev \ - libxkbcommon-dev" +BASIC_LIST=" + cmake + flex + g++ + gcc + libc-ares-dev + libgcrypt-dev + libglib2.0-dev + libpcap-dev + libpcre2-dev + libspeexdsp-dev + make + python3 + " + +QT5_LIST=" + libqt5svg5-dev + qt5-qmake + qtbase5-dev + qtbase5-dev-tools + qtchooser + qtmultimedia5-dev + qttools5-dev + qttools5-dev-tools + " + +QT6_LIST=" + freeglut3-dev + libqt6svg6-dev + libvulkan-dev + libxkbcommon-dev + qt6-base-dev + qt6-l10n-tools + qt6-multimedia-dev + qt6-tools-dev + qt6-tools-dev-tools + " + +# qt6-5compat-dev: Debian >= bookworm, Ubuntu >= 23.04 +# libqt6core5compat6-dev: Ubuntu 22.04 +add_package QT6_LIST qt6-5compat-dev || +QT6_LIST="$QT6_LIST libqt6core5compat6-dev" if [ $ADD_QT5 -ne 0 ] then @@ -125,144 +157,101 @@ then # shellcheck disable=SC1090 . "${os_release}" - # Ubuntu 22.04 (jammy) or later + # Ubuntu 22.04 (jammy) / Debian 12 (bookworm) or later MAJOR=$(echo "$VERSION_ID" | cut -f1 -d.) if [ "${ID:-linux}" = "ubuntu" ] && [ "${MAJOR:-0}" -ge "22" ]; then echo "Installing Qt6." BASIC_LIST="$BASIC_LIST $QT6_LIST" + elif [ "${ID:-linux}" = "debian" ] && [ "${MAJOR:-0}" -ge "12" ]; then + echo "Installing Qt6." + BASIC_LIST="$BASIC_LIST $QT6_LIST" else echo "Installing Qt5." BASIC_LIST="$BASIC_LIST $QT5_LIST" fi fi -ADDITIONAL_LIST="libnl-3-dev \ - libkrb5-dev \ - libsmi2-dev \ - libsbc-dev \ - liblua5.2-dev \ - libnl-cli-3-dev \ - libparse-yapp-perl \ - libcap-dev \ - liblz4-dev \ - libsnappy-dev \ - libzstd-dev \ - libspandsp-dev \ - libxml2-dev \ - libminizip-dev \ - git \ - ninja-build \ - perl \ - xsltproc \ - ccache \ - doxygen" +ADDITIONAL_LIST=" + ccache + doxygen + git + libbrotli-dev + libcap-dev + libgnutls28-dev + libkrb5-dev + liblz4-dev + libmaxminddb-dev + libminizip-dev + libnghttp2-dev + libnl-3-dev + libnl-cli-3-dev + libopencore-amrnb-dev + libopus-dev + libparse-yapp-perl + libsbc-dev + libssh-gcrypt-dev + libsmi2-dev + libsnappy-dev + libspandsp-dev + libsystemd-dev + libxml2-dev + libzstd-dev + ninja-build + perl + xsltproc + " # Uncomment to add PNG compression utilities used by compress-pngs: -# ADDITIONAL_LIST="$ADDITIONAL_LIST \ -# advancecomp \ -# optipng \ -# pngcrush" - -DEBDEPS_LIST="debhelper \ - dh-python \ - asciidoctor \ - docbook-xml \ - docbook-xsl \ - libxml2-utils \ - lintian \ - lsb-release \ - po-debconf \ - python3-ply \ - quilt" - -TESTDEPS_LIST="python3-pytest \ - python3-pytest-xdist" - -# Adds package $2 to list variable $1 if the package is found. -# If $3 is given, then this version requirement must be satisfied. -add_package() { - local list="$1" pkgname="$2" versionreq="${3:-}" version - - version=$(apt-cache show "$pkgname" 2>/dev/null | - awk '/^Version:/{ print $2; exit}') - # fail if the package is not known - if [ -z "$version" ]; then - return 1 - elif [ -n "$versionreq" ]; then - # Require minimum version or fail. - # shellcheck disable=SC2086 - dpkg --compare-versions $version $versionreq || return 1 - fi - - # package is found, append it to list - eval "${list}=\"\${${list}} \${pkgname}\"" -} +# ADDITIONAL_LIST=" +# $ADDITIONAL_LIST +# advancecomp +# optipng +# pngcrush +# " + +DEBDEPS_LIST=" + asciidoctor + debhelper + dh-python + docbook-xml + docbook-xsl + libxml2-utils + lintian + lsb-release + po-debconf + python3-ply + quilt + " + +TESTDEPS_LIST=" + gdb + python3-pytest + python3-pytest-xdist + softhsm2 + " # apt-get update must be called before calling add_package # otherwise available packages appear as unavailable apt-get update || exit 2 -# cmake3 3.5.1: Ubuntu 14.04 -# cmake >= 3.5: Debian >= jessie-backports, Ubuntu >= 16.04 -add_package BASIC_LIST cmake3 || -BASIC_LIST="$BASIC_LIST cmake" - -# Debian >= wheezy-backports, Ubuntu >= 16.04 -add_package ADDITIONAL_LIST libnghttp2-dev || -echo "libnghttp2-dev is unavailable" >&2 +# Lua 5.4: Debian >= bullseye, Ubuntu >= 22.04 (jammy) +# Lua 5.3: Debian >= buster, Ubuntu >= 20.04 (focal) +add_package ADDITIONAL_LIST liblua5.4-dev || +ADDITIONAL_LIST="$ADDITIONAL_LIST liblua5.3-dev" # Debian >= bookworm, Ubuntu >= 22.04 add_package ADDITIONAL_LIST libnghttp3-dev || echo "libnghttp3-dev is unavailable" >&2 -# libssh-gcrypt-dev: Debian >= jessie, Ubuntu >= 16.04 -# libssh-dev (>= 0.6): Debian >= jessie, Ubuntu >= 14.04 -add_package ADDITIONAL_LIST libssh-gcrypt-dev || -add_package ADDITIONAL_LIST libssh-dev || -echo "libssh-gcrypt-dev and libssh-dev are unavailable" >&2 - -# libgnutls28-dev: Debian >= wheezy-backports, Ubuntu >= 12.04 -add_package ADDITIONAL_LIST libgnutls28-dev || -echo "libgnutls28-dev is unavailable" >&2 - -# Debian >= jessie-backports, Ubuntu >= 16.04 -add_package ADDITIONAL_LIST libmaxminddb-dev || -echo "libmaxminddb-dev is unavailable" >&2 - -# Debian >= stretch-backports, Ubuntu >= 16.04 -add_package ADDITIONAL_LIST libbrotli-dev || -echo "libbrotli-dev is unavailable" >&2 - -# libsystemd-journal-dev: Ubuntu 14.04 -# libsystemd-dev: Ubuntu >= 16.04 -add_package ADDITIONAL_LIST libsystemd-dev || -add_package ADDITIONAL_LIST libsystemd-journal-dev || -echo "libsystemd-dev is unavailable" - # ilbc library from http://www.deb-multimedia.org add_package ADDITIONAL_LIST libilbc-dev || echo "libilbc-dev is unavailable" -# opus library libopus-dev -add_package ADDITIONAL_LIST libopus-dev || - echo "libopus-dev is unavailable" - +# Debian >= bullseye, Ubuntu >= 22.04 (jammy) # bcg729 library libbcg729-dev add_package ADDITIONAL_LIST libbcg729-dev || echo "libbcg729-dev is unavailable" -# softhsm2 2.0.0: Ubuntu 16.04 -# softhsm2 2.2.0: Debian >= jessie-backports, Ubuntu 18.04 -# softhsm2 >= 2.4.0: Debian >= buster, Ubuntu >= 18.10 -if ! add_package TESTDEPS_LIST softhsm2 '>= 2.3.0'; then - if add_package TESTDEPS_LIST softhsm2; then - # If SoftHSM 2.3.0 is unavailble, install p11tool. - TESTDEPS_LIST="$TESTDEPS_LIST gnutls-bin" - else - echo "softhsm2 is unavailable" >&2 - fi -fi - ACTUAL_LIST=$BASIC_LIST # Now arrange for optional support libraries diff --git a/tools/delete_includes.py b/tools/delete_includes.py index cc804e0b..8ea5e806 100755 --- a/tools/delete_includes.py +++ b/tools/delete_includes.py @@ -18,7 +18,6 @@ import sys import shutil import argparse import signal -import re from pathlib import Path @@ -190,8 +189,6 @@ def test_file(filename): # Don't want to delete 'self-includes', so prepare filename. module_name = Path(filename).stem - extension = Path(filename).suffix - module_header = module_name + '.h' # Loop around, finding all possible include lines to comment out diff --git a/tools/detect_bad_alloc_patterns.py b/tools/detect_bad_alloc_patterns.py index a89ceb6f..b2459c6e 100644 --- a/tools/detect_bad_alloc_patterns.py +++ b/tools/detect_bad_alloc_patterns.py @@ -69,8 +69,8 @@ def test_replacements(): test_string = """\ (if_info_t*) g_malloc0(sizeof(if_info_t)) (oui_info_t *)g_malloc(sizeof (oui_info_t)) -(guint8 *)g_malloc(16 * sizeof(guint8)) -(guint32 *)g_malloc(sizeof(guint32)*2) +(uint8_t *)g_malloc(16 * sizeof(uint8_t)) +(uint32_t *)g_malloc(sizeof(uint32_t)*2) (struct imf_field *)g_malloc (sizeof (struct imf_field)) (rtspstat_t *)g_malloc( sizeof(rtspstat_t) ) (proto_data_t *)wmem_alloc(scope, sizeof(proto_data_t)) @@ -85,8 +85,8 @@ def test_replacements(): expected_output = """\ g_new0(if_info_t, 1) g_new(oui_info_t, 1) -g_new(guint8, 16) -g_new(guint32, 2) +g_new(uint8_t, 16) +g_new(uint32_t, 2) g_new(struct imf_field, 1) g_new(rtspstat_t, 1) wmem_new(scope, proto_data_t) diff --git a/tools/eti2wireshark.py b/tools/eti2wireshark.py index 98fb291a..fe11d644 100755 --- a/tools/eti2wireshark.py +++ b/tools/eti2wireshark.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -# Generate Wireshark Dissectors for eletronic trading/market data +# Generate Wireshark Dissectors for electronic trading/market data # protocols such as ETI/EOBI. # # Targets Wireshark 3.5 or later. @@ -10,7 +10,6 @@ import argparse -import itertools import re import sys import xml.etree.ElementTree as ET @@ -130,6 +129,10 @@ def gen_header(proto, desc, o=sys.stdout): /* (Required to prevent [-Wmissing-prototypes] warnings */ void proto_reg_handoff_{proto}(void); void proto_register_{proto}(void); + +static dissector_handle_t {proto}_handle; + +static int proto_{proto}; ''', file=o) @@ -232,21 +235,20 @@ def get_fields(st, dt): return vs def gen_field_handles(st, dt, proto, o=sys.stdout): - print(f'''static expert_field ei_{proto}_counter_overflow = EI_INIT; -static expert_field ei_{proto}_invalid_template = EI_INIT; -static expert_field ei_{proto}_invalid_length = EI_INIT;''', file=o) + print(f'''static expert_field ei_{proto}_counter_overflow; +static expert_field ei_{proto}_invalid_template; +static expert_field ei_{proto}_invalid_length;''', file=o) if not proto.startswith('eobi'): - print(f'static expert_field ei_{proto}_unaligned = EI_INIT;', file=o) - print(f'''static expert_field ei_{proto}_missing = EI_INIT; -static expert_field ei_{proto}_overused = EI_INIT; + print(f'static expert_field ei_{proto}_unaligned;', file=o) + print(f'''static expert_field ei_{proto}_missing; +static expert_field ei_{proto}_overused; ''', file=o) vs = get_fields(st, dt) - s = ', '.join('-1' for i in range(len(vs))) - print(f'static int hf_{proto}[] = {{ {s} }};', file=o) - print(f'''static int hf_{proto}_dscp_exec_summary = -1; -static int hf_{proto}_dscp_improved = -1; -static int hf_{proto}_dscp_widened = -1;''', file=o) + print(f'static int hf_{proto}[{len(vs)}];', file=o) + print(f'''static int hf_{proto}_dscp_exec_summary; +static int hf_{proto}_dscp_improved; +static int hf_{proto}_dscp_widened;''', file=o) print('enum Field_Handle_Index {', file=o) for i, (name, _) in enumerate(vs): c = ' ' if i == 0 else ',' @@ -334,10 +336,9 @@ def gen_field_info(st, dt, n2enum, proto='eti', o=sys.stdout): def gen_subtree_handles(st, proto='eti', o=sys.stdout): ns = [ name for name, e in st.items() if e.get('type') != 'Message' ] ns.sort() - s = ', '.join('-1' for i in range(len(ns) + 1)) h = dict( (n, i) for i, n in enumerate(ns, 1) ) - print(f'static gint ett_{proto}[] = {{ {s} }};', file=o) - print(f'static gint ett_{proto}_dscp = -1;', file=o) + print(f'static int ett_{proto}[{len(ns) + 1}];', file=o) + print(f'static int ett_{proto}_dscp;', file=o) return h @@ -345,7 +346,7 @@ def gen_subtree_array(st, proto='eti', o=sys.stdout): n = sum(1 for name, e in st.items() if e.get('type') != 'Message') n += 1 s = ', '.join(f'&ett_{proto}[{i}]' for i in range(n)) - print(f' static gint * const ett[] = {{ {s}, &ett_{proto}_dscp }};', file=o) + print(f' static int * const ett[] = {{ {s}, &ett_{proto}_dscp }};', file=o) def gen_fields_table(st, dt, sh, o=sys.stdout): @@ -380,7 +381,6 @@ def gen_fields_table(st, dt, sh, o=sys.stdout): size = int(t.get('size')) if t is not None else 0 rep = '' fh = f'{m.get("name").upper()}_FH_IDX' - sub = '' if is_padding(t): print(f' {c} {{ ETI_PADDING, 0, {size}, 0, 0 }}', file=o) elif is_fixed_point(t): @@ -517,7 +517,7 @@ def gen_usage_table(min_templateid, n, ts, ams, o=sys.stdout): # (cf. the uidx DISSECTOR_ASSER_CMPUINIT() before the switch statement) # when the ETI_EOF of the message whose usage information comes last # is reached - print(f' , 0 // filler', file=o) + print(' , 0 // filler', file=o) print(' };', file=o) xs = [ '-1' ] * n t2n = dict(ts) @@ -622,13 +622,13 @@ dissect_{proto}_message(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, voi {{ col_set_str(pinfo->cinfo, COL_PROTOCOL, "{proto.upper()}"); col_clear(pinfo->cinfo, COL_INFO); - guint16 templateid = tvb_get_letohs(tvb, {template_off}); + uint16_t templateid = tvb_get_letohs(tvb, {template_off}); const char *template_str = val_to_str_ext(templateid, &template_id_vals_ext, "Unknown {proto.upper()} template: 0x%04x"); - col_add_fstr(pinfo->cinfo, COL_INFO, "%s", template_str); + col_add_str(pinfo->cinfo, COL_INFO, template_str); /* create display subtree for the protocol */ proto_item *ti = proto_tree_add_item(tree, proto_{proto}, tvb, 0, -1, ENC_NA); - guint32 bodylen= {bl_fn}(tvb, 0); + uint32_t bodylen= {bl_fn}(tvb, 0); proto_item_append_text(ti, ", %s (%" PRIu16 "), BodyLen: %u", template_str, templateid, bodylen); proto_tree *root = proto_item_add_subtree(ti, ett_{proto}[0]); ''', file=o) @@ -676,7 +676,7 @@ dissect_{proto}_message(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, voi print(f''' int uidx = tid2uidx[templateid - {min_templateid}]; DISSECTOR_ASSERT_CMPINT(uidx, >=, 0); - DISSECTOR_ASSERT_CMPUINT(((size_t)uidx), <, (sizeof usages / sizeof usages[0])); + DISSECTOR_ASSERT_CMPUINT(((size_t)uidx), <, array_length(usages)); ''', file=o) print(f''' int old_fidx = 0; @@ -689,9 +689,9 @@ dissect_{proto}_message(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, voi proto_tree *t = root; while (top) {{ DISSECTOR_ASSERT_CMPINT(fidx, >=, 0); - DISSECTOR_ASSERT_CMPUINT(((size_t)fidx), <, (sizeof fields / sizeof fields[0])); + DISSECTOR_ASSERT_CMPUINT(((size_t)fidx), <, array_length(fields)); DISSECTOR_ASSERT_CMPINT(uidx, >=, 0); - DISSECTOR_ASSERT_CMPUINT(((size_t)uidx), <, (sizeof usages / sizeof usages[0])); + DISSECTOR_ASSERT_CMPUINT(((size_t)uidx), <, array_length(usages)); switch (fields[fidx].type) {{ case ETI_EOF: @@ -713,7 +713,7 @@ dissect_{proto}_message(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, voi break; case ETI_VAR_STRUCT: case ETI_STRUCT: - DISSECTOR_ASSERT_CMPUINT(fields[fidx].counter_off, <, sizeof counter / sizeof counter[0]); + DISSECTOR_ASSERT_CMPUINT(fields[fidx].counter_off, <, array_length(counter)); repeats = fields[fidx].type == ETI_VAR_STRUCT ? counter[fields[fidx].counter_off] : 1; if (repeats) {{ --repeats; @@ -740,7 +740,7 @@ dissect_{proto}_message(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, voi break; case ETI_STRING: {{ - guint8 c = tvb_get_guint8(tvb, off); + uint8_t c = tvb_get_uint8(tvb, off); if (c) proto_tree_add_item(t, hf_{proto}[fields[fidx].field_handle_idx], tvb, off, fields[fidx].size, ENC_ASCII); else {{ @@ -754,20 +754,20 @@ dissect_{proto}_message(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, voi ++uidx; break; case ETI_VAR_STRING: - DISSECTOR_ASSERT_CMPUINT(fields[fidx].counter_off, <, sizeof counter / sizeof counter[0]); + DISSECTOR_ASSERT_CMPUINT(fields[fidx].counter_off, <, array_length(counter)); proto_tree_add_item(t, hf_{proto}[fields[fidx].field_handle_idx], tvb, off, counter[fields[fidx].counter_off], ENC_ASCII); off += counter[fields[fidx].counter_off]; ++fidx; ++uidx; break; case ETI_COUNTER: - DISSECTOR_ASSERT_CMPUINT(fields[fidx].counter_off, <, sizeof counter / sizeof counter[0]); + DISSECTOR_ASSERT_CMPUINT(fields[fidx].counter_off, <, array_length(counter)); DISSECTOR_ASSERT_CMPUINT(fields[fidx].size, <=, 2); {{ switch (fields[fidx].size) {{ case 1: {{ - guint8 x = tvb_get_guint8(tvb, off); + uint8_t x = tvb_get_uint8(tvb, off); if (x == UINT8_MAX) {{ proto_tree_add_uint_format_value(t, hf_{proto}[fields[fidx].field_handle_idx], tvb, off, fields[fidx].size, x, "NO_VALUE (0xff)"); counter[fields[fidx].counter_off] = 0; @@ -784,7 +784,7 @@ dissect_{proto}_message(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, voi break; case 2: {{ - guint16 x = tvb_get_letohs(tvb, off); + uint16_t x = tvb_get_letohs(tvb, off); if (x == UINT16_MAX) {{ proto_tree_add_uint_format_value(t, hf_{proto}[fields[fidx].field_handle_idx], tvb, off, fields[fidx].size, x, "NO_VALUE (0xffff)"); counter[fields[fidx].counter_off] = 0; @@ -839,7 +839,7 @@ dissect_{proto}_message(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, voi DISSECTOR_ASSERT_CMPUINT(fields[fidx].counter_off, >, 0); DISSECTOR_ASSERT_CMPUINT(fields[fidx].counter_off, <=, 16); {{ - gint64 x = tvb_get_letohi64(tvb, off); + int64_t x = tvb_get_letohi64(tvb, off); if (x == INT64_MIN) {{ proto_item *e = proto_tree_add_int64_format_value(t, hf_{proto}[fields[fidx].field_handle_idx], tvb, off, fields[fidx].size, x, "NO_VALUE (0x8000000000000000)"); if (!usages[uidx]) @@ -882,10 +882,10 @@ dissect_{proto}_message(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, voi ''', file=o) print(f'''/* determine PDU length of protocol {proto.upper()} */ -static guint +static unsigned get_{proto}_message_len(packet_info *pinfo _U_, tvbuff_t *tvb, int offset, void *data _U_) {{ - return (guint){bl_fn}(tvb, offset); + return (unsigned){bl_fn}(tvb, offset); }} ''', file=o) @@ -903,7 +903,7 @@ dissect_{proto}(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, dissect_{proto}(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, void *data) {{ - tcp_dissect_pdus(tvb, pinfo, tree, TRUE, 4 /* bytes to read for bodylen */, + tcp_dissect_pdus(tvb, pinfo, tree, true, 4 /* bytes to read for bodylen */, get_{proto}_message_len, dissect_{proto}_message, data); return tvb_captured_length(tvb); }} @@ -955,6 +955,8 @@ proto_register_{proto}(void) print(' proto_register_subtree_array(ett, array_length(ett));', file=o) if proto.startswith('eobi'): print(f' proto_disable_by_default(proto_{proto});', file=o) + + print(f'\n {proto}_handle = register_dissector("{proto}", dissect_{proto}, proto_{proto});', file=o) print('}\n', file=o) @@ -962,9 +964,6 @@ def gen_handoff_fn(proto, o=sys.stdout): print(f'''void proto_reg_handoff_{proto}(void) {{ - dissector_handle_t {proto}_handle = create_dissector_handle(dissect_{proto}, - proto_{proto}); - // cf. N7 Network Access Guide, e.g. // https://www.xetra.com/xetra-en/technology/t7/system-documentation/release10-0/Release-10.0-2692700?frag=2692724 // https://www.xetra.com/resource/blob/2762078/388b727972b5122945eedf0e63c36920/data/N7-Network-Access-Guide-v2.0.59.pdf @@ -1014,7 +1013,7 @@ proto_reg_handoff_{proto}(void) 56500, // Snapshot Boerse Frankfurt SIMU 56501 // Incremental Boerse Frankfurt SIMU }}; - for (unsigned i = 0; i < sizeof ports / sizeof ports[0]; ++i) + for (unsigned i = 0; i < array_length(ports); ++i) dissector_add_uint("udp.port", ports[i], {proto}_handle);''', file=o) print('}', file=o) @@ -1120,7 +1119,7 @@ def group_members(e, dt): def parse_args(): - p = argparse.ArgumentParser(description='Generate Wireshark Dissector for ETI/EOBI style protocol specifictions') + p = argparse.ArgumentParser(description='Generate Wireshark Dissector for ETI/EOBI style protocol specifications') p.add_argument('filename', help='protocol description XML file') p.add_argument('--proto', default='eti', help='short protocol name (default: %(default)s)') @@ -1152,7 +1151,6 @@ def main(): ams = d.getroot().find('ApplicationMessages') gen_header(proto, desc, o) - print(f'static int proto_{proto} = -1;', file=o) gen_field_handles(st, dt, proto, o) n2enum = gen_enums(dt, ts, o) gen_dissect_structs(o) diff --git a/tools/fuzz-test.sh b/tools/fuzz-test.sh index 7e8d1abc..110a2820 100755 --- a/tools/fuzz-test.sh +++ b/tools/fuzz-test.sh @@ -64,7 +64,7 @@ while getopts "2b:C:d:e:agp:P:o:t:U" OPTCHAR ; do C) CONFIG_PROFILE="-C $OPTARG " ;; d) TMP_DIR=$OPTARG ;; e) ERR_PROB=$OPTARG ;; - g) VALGRIND=1 ;; + g) VALGRIND=1 ; CHECK_UTF_8= ;; p) MAX_PASSES=$OPTARG ;; P) MIN_PLUGINS=$OPTARG ;; o) CHANGE_OFFSET=$OPTARG ;; diff --git a/tools/generate-bacnet-vendors.py b/tools/generate-bacnet-vendors.py index 14fc5303..06d21e88 100755 --- a/tools/generate-bacnet-vendors.py +++ b/tools/generate-bacnet-vendors.py @@ -1,16 +1,29 @@ #!/usr/bin/env python3 +# +# Wireshark - Network traffic analyzer +# By Gerald Combs +# Copyright 1998 Gerald Combs +# +# SPDX-License-Identifier: GPL-2.0-or-later +'''Update the BACNET vendors list. -''' - Copyright 2023 Jaap Keuter - based on work by Anish Bhatt +generate-bacnet-vendors generates output containing BACNET vendor Identifiers. + +Copyright 2023 Jaap Keuter +based on work by Anish Bhatt -SPDX-License-Identifier: GPL-2.0-or-later ''' import sys import urllib.request, urllib.error, urllib.parse from bs4 import BeautifulSoup +def exit_msg(msg=None, status=1): + if msg is not None: + sys.stderr.write(msg + '\n\n') + sys.stderr.write(__doc__ + '\n') + sys.exit(status) + req_headers = { 'User-Agent': 'Wireshark generate-bacnet-vendors' } try: req = urllib.request.Request("https://bacnet.org/assigned-vendor-ids/", headers=req_headers) diff --git a/tools/generate-dissector.py b/tools/generate-dissector.py index 4d8ab37d..aee1d615 100755 --- a/tools/generate-dissector.py +++ b/tools/generate-dissector.py @@ -144,7 +144,7 @@ def print_header(): def print_trailer(args): print("") print("The skeleton for the dissector of the " + args.protoshortname + " protocol has been generated.") - print("Please review/extend it to match your specific criterias.") + print("Please review/extend it to match your specific criteria.") print("") diff --git a/tools/generate-nl80211-fields.py b/tools/generate-nl80211-fields.py index dfa8faaf..ddd42575 100755 --- a/tools/generate-nl80211-fields.py +++ b/tools/generate-nl80211-fields.py @@ -34,7 +34,10 @@ EXPORT_ENUMS = { 'nl80211_sta_flags': (None, None, None), 'nl80211_sta_p2p_ps_status': ('Attribute Value', 'FT_UINT8', None), 'nl80211_he_gi': (None, None, None), + 'nl80211_he_ltf': (None, None, None), 'nl80211_he_ru_alloc': (None, None, None), + 'nl80211_eht_gi': (None, None, None), + 'nl80211_eht_ru_alloc': (None, None, None), 'nl80211_rate_info': (None, None, None), 'nl80211_sta_bss_param': (None, None, None), 'nl80211_sta_info': (None, None, None), @@ -65,6 +68,8 @@ EXPORT_ENUMS = { 'nl80211_key_mode': (None, None, None), 'nl80211_chan_width': ('Attribute Value', 'FT_UINT32', None), 'nl80211_bss_scan_width': ('Attribute Value', 'FT_UINT32', None), + 'nl80211_bss_use_for': (None, None, None), + 'nl80211_bss_cannot_use_reasons': (None, None, None), 'nl80211_bss': (None, None, None), 'nl80211_bss_status': ('Attribute Value', 'FT_UINT32', None), 'nl80211_auth_type': ('Attribute Value', 'FT_UINT32', None), @@ -80,6 +85,9 @@ EXPORT_ENUMS = { 'nl80211_attr_cqm': (None, None, None), 'nl80211_cqm_rssi_threshold_event': (None, None, None), 'nl80211_tx_power_setting': ('Attribute Value', 'FT_UINT32', None), + 'nl80211_tid_config': (None, None, None), + 'nl80211_tx_rate_setting': (None, None, None), + 'nl80211_tid_config_attr': (None, None, None), 'nl80211_packet_pattern_attr': (None, None, None), 'nl80211_wowlan_triggers': (None, None, None), 'nl80211_wowlan_tcp_attrs': (None, None, None), @@ -88,13 +96,13 @@ EXPORT_ENUMS = { 'nl80211_iface_limit_attrs': (None, None, None), 'nl80211_if_combination_attrs': (None, None, None), 'nl80211_plink_state': ('Attribute Value', 'FT_UINT8', None), - 'plink_actions': ('Attribute Value', 'FT_UINT8', None), + 'nl80211_plink_action': ('Attribute Value', 'FT_UINT8', None), 'nl80211_rekey_data': (None, None, None), 'nl80211_hidden_ssid': (None, None, None), 'nl80211_sta_wme_attr': (None, None, None), 'nl80211_pmksa_candidate_attr': (None, None, None), 'nl80211_tdls_operation': ('Attribute Value', 'FT_UINT8', None), - #Reserved for future use 'nl80211_ap_sme_features': (None, None, None), + 'nl80211_ap_sme_features': (None, None, None), 'nl80211_feature_flags': (None, None, None), 'nl80211_ext_feature_index': (None, None, None), 'nl80211_probe_resp_offload_support_attr': (None, None, None), @@ -132,6 +140,18 @@ EXPORT_ENUMS = { 'nl80211_peer_measurement_ftm_failure_reasons': (None, None, None), 'nl80211_peer_measurement_ftm_resp': (None, None, None), 'nl80211_obss_pd_attributes': (None, None, None), + 'nl80211_bss_color_attributes': (None, None, None), + 'nl80211_iftype_akm_attributes': (None, None, None), + 'nl80211_fils_discovery_attributes': (None, None, None), + 'nl80211_unsol_bcast_probe_resp_attributes': (None, None, None), + 'nl80211_sae_pwe_mechanism': (None, None, None), + 'nl80211_sar_type': (None, None, None), + 'nl80211_sar_attrs': (None, None, None), + 'nl80211_sar_specs_attrs': (None, None, None), + 'nl80211_mbssid_config_attributes': (None, None, None), + 'nl80211_ap_settings_flags': (None, None, None), + 'nl80211_wiphy_radio_attrs': (None, None, None), + 'nl80211_wiphy_radio_freq_range': (None, None, None), } # File to be patched SOURCE_FILE = "epan/dissectors/packet-netlink-nl80211.c" @@ -169,7 +189,7 @@ def remove_prefix(prefix, text): return text def make_hf_defs(name, indent): - code = 'static gint hf_%s = -1;' % name + code = 'static int hf_%s;' % name return code def make_hf(name, indent): @@ -202,7 +222,7 @@ def make_hf(name, indent): return code def make_ett_defs(name, indent): - code = 'static gint ett_%s = -1;' % name + code = 'static int ett_%s;' % name return code def make_ett(name, indent): diff --git a/tools/generate-sysdig-event.py b/tools/generate-sysdig-event.py index 67419c8e..0cec2d3a 100755 --- a/tools/generate-sysdig-event.py +++ b/tools/generate-sysdig-event.py @@ -46,7 +46,7 @@ def get_url_lines(url): except urllib.error.URLError as err: exit_msg("URL error fetching {0}: {1}".format(url, err.reason)) except OSError as err: - exit_msg("OS error fetching {0}".format(url, err.strerror)) + exit_msg("OS error fetching {0}: {1}".format(url, err.strerror)) except Exception: exit_msg("Unexpected error:", sys.exc_info()[0]) @@ -274,7 +274,7 @@ def main(): strip_re_l.append(re.compile('^\s*{\s*&hf_param_.*},')) # Must all be on one line for strip_re in strip_re_l: - dissector_lines = [l for l in dissector_lines if not strip_re.search(l)] + dissector_lines = [line for line in dissector_lines if not strip_re.search(line)] # Find our value strings value_string_re = re.compile('static\s+const\s+value_string\s+([A-Za-z0-9_]+_vals)') @@ -290,7 +290,7 @@ def main(): header_fields_re = re.compile('/\*\s+' + header_fields_c, flags = re.IGNORECASE) header_fields_l = [] for hf_name in sorted(hf_d.keys()): - header_fields_l.append('static int {} = -1;'.format(hf_name)) + header_fields_l.append('static int {};'.format(hf_name)) event_names_c = 'Event names' event_names_re = re.compile('/\*\s+' + event_names_c, flags = re.IGNORECASE) diff --git a/tools/generate_authors.py b/tools/generate_authors.py index a74ef1c4..a536b850 100755 --- a/tools/generate_authors.py +++ b/tools/generate_authors.py @@ -16,10 +16,8 @@ # SPDX-License-Identifier: GPL-2.0-or-later import argparse -import io import re import subprocess -import sys def get_git_authors(): @@ -29,7 +27,7 @@ def get_git_authors(): ''' GIT_LINE_REGEX = r"^\s*\d+\s+([^<]*)\s*<([^>]*)>" cmd = "git --no-pager shortlog --email --summary HEAD".split(' ') - # check_output is used for Python 3.4 compatability + # check_output is used for Python 3.4 compatibility git_cmd_output = subprocess.check_output(cmd, universal_newlines=True, encoding='utf-8') git_authors = [] @@ -107,7 +105,7 @@ def generate_git_contributors_text(contributors_emails, git_authors_emails): return "\n".join(output_lines) -# Read authos file until we find gitlog entries, then stop +# Read authors file until we find gitlog entries, then stop def read_authors(parsed_args): lines = [] with open(parsed_args.authors[0], 'r', encoding='utf-8') as fh: diff --git a/tools/indexcap.py b/tools/indexcap.py index d18e76f6..31f4e397 100755 --- a/tools/indexcap.py +++ b/tools/indexcap.py @@ -207,7 +207,7 @@ def main(): if options.dissect_files and not options.list_all_files and not options.list_all_proto_files: parser.error("--list-all-files or --list-all-proto-files must be specified") - if options.dissect_files and not options.compare_dir is None: + if options.dissect_files and options.compare_dir is not None: parser.error("--dissect-files and --compare-dir cannot be specified at the same time") index_file_name = args.pop(0) @@ -236,15 +236,15 @@ def main(): print(indexed_files) tshark_bin = find_tshark_executable(options.bin_dir) - if not tshark_bin is None: + if tshark_bin is not None: print("tshark: %s [FOUND]" % tshark_bin) else: print("tshark: %s [MISSING]" % tshark_bin) exit(1) - if not options.compare_dir is None: + if options.compare_dir is not None: tshark_cmp = find_tshark_executable(options.compare_dir) - if not tshark_cmp is None: + if tshark_cmp is not None: print("tshark: %s [FOUND]" % tshark_cmp) else: print("tshark: %s [MISSING]" % tshark_cmp) diff --git a/tools/json2pcap/json2pcap.py b/tools/json2pcap/json2pcap.py index 2a059ad0..baa64b64 100755 --- a/tools/json2pcap/json2pcap.py +++ b/tools/json2pcap/json2pcap.py @@ -28,7 +28,7 @@ from scapy import all as scapy class AnonymizedField: ''' The Anonymization field object specifying anonymization - :filed arg: field name + :field arg: field name :type arg: anonymization type [0 masking 0xff, 1 anonymization shake_256] :start arg: If specified, the anonymization starts at given byte number :end arg: If specified, the anonymization ends at given byte number diff --git a/tools/lemon/CMakeLists.txt b/tools/lemon/CMakeLists.txt index 529eeae1..b7ba7543 100644 --- a/tools/lemon/CMakeLists.txt +++ b/tools/lemon/CMakeLists.txt @@ -12,6 +12,8 @@ add_executable(lemon lemon.c) if(DEFINED LEMON_C_COMPILER) set(CMAKE_C_COMPILER "${LEMON_C_COMPILER}") set(CMAKE_C_FLAGS "") + set(CMAKE_EXE_LINKER_FLAGS "") + set(CMAKE_SYSROOT "") endif() # To keep lemon.c as close to upstream as possible disable all warnings diff --git a/tools/lemon/lemon.c b/tools/lemon/lemon.c index 869ac580..c4b48811 100644 --- a/tools/lemon/lemon.c +++ b/tools/lemon/lemon.c @@ -2106,6 +2106,7 @@ int OptInit(char **a, struct s_options *o, FILE *err) if( g_argv && *g_argv && op ){ int i; for(i=1; g_argv[i]; i++){ + if( strcmp(g_argv[i],"--")==0 ) break; if( g_argv[i][0]=='+' || g_argv[i][0]=='-' ){ errcnt += handleflags(i,err); }else if( strchr(g_argv[i],'=') ){ @@ -5847,7 +5848,7 @@ int Configtable_insert(struct config *data) newnp->from = &(array.ht[h]); array.ht[h] = newnp; } - /* free(x4a->tbl); // This code was originall written for 16-bit machines. + /* free(x4a->tbl); // This code was originally written for 16-bit machines. ** on modern machines, don't worry about freeing this trival amount of ** memory. */ *x4a = array; diff --git a/tools/lemon/patches/01-lemon-dashdash.patch b/tools/lemon/patches/01-lemon-dashdash.patch new file mode 100644 index 00000000..1c204574 --- /dev/null +++ b/tools/lemon/patches/01-lemon-dashdash.patch @@ -0,0 +1,14 @@ +Don't try to parse flags and options that are after "--". This makes it +possible to prevent a filename path with an '=' in it from being processed +as an option. +SPDX-License-Identifier: CC0-1.0 +--- a/lemon.c ++++ b/lemon.c +@@ -2106,6 +2106,7 @@ int OptInit(char **a, struct s_options *o, FILE *err) + if( g_argv && *g_argv && op ){ + int i; + for(i=1; g_argv[i]; i++){ ++ if( strcmp(g_argv[i],"--")==0 ) break; + if( g_argv[i][0]=='+' || g_argv[i][0]=='-' ){ + errcnt += handleflags(i,err); + }else if( strchr(g_argv[i],'=') ){ diff --git a/tools/macos-setup-brew.sh b/tools/macos-setup-brew.sh index 910f7e86..da1a0aa9 100755 --- a/tools/macos-setup-brew.sh +++ b/tools/macos-setup-brew.sh @@ -23,6 +23,7 @@ function print_usage() { printf "\\t--install-dmg-deps: install packages required to build the .dmg file\\n" printf "\\t--install-sparkle-deps: install the Sparkle automatic updater\\n" printf "\\t--install-all: install everything\\n" + printf "\\t--install-logray: install everything to compile Logray and falco bridge\\n" printf "\\t[other]: other options are passed as-is to apt\\n" } @@ -46,6 +47,7 @@ INSTALL_DOC_DEPS=0 INSTALL_DMG_DEPS=0 INSTALL_SPARKLE_DEPS=0 INSTALL_TEST_DEPS=0 +INSTALL_LOGRAY=0 OPTIONS=() for arg; do case $arg in @@ -68,6 +70,9 @@ for arg; do --install-test-deps) INSTALL_TEST_DEPS=1 ;; + --install-logray) + INSTALL_LOGRAY=1 + ;; --install-all) INSTALL_OPTIONAL=1 INSTALL_DOC_DEPS=1 @@ -108,11 +113,15 @@ ADDITIONAL_LIST=( libsmi libssh libxml2 + lua lz4 minizip + minizip-ng + opencore-amr opus snappy spandsp + zlib-ng zstd ) @@ -122,6 +131,12 @@ DOC_DEPS_LIST=( docbook-xsl ) +LOGRAY_LIST=( + jsoncpp + onetbb + re2 +) + ACTUAL_LIST=( "${BUILD_LIST[@]}" "${REQUIRED_LIST[@]}" ) # Now arrange for optional support libraries @@ -133,16 +148,16 @@ if [ $INSTALL_DOC_DEPS -ne 0 ] ; then ACTUAL_LIST+=( "${DOC_DEPS_LIST[@]}" ) fi +if [ $INSTALL_LOGRAY -ne 0 ] ; then + ACTUAL_LIST+=( "${LOGRAY_LIST[@]}" ) +fi + if (( ${#OPTIONS[@]} != 0 )); then ACTUAL_LIST+=( "${OPTIONS[@]}" ) fi install_formulae "${ACTUAL_LIST[@]}" -if [ $INSTALL_OPTIONAL -ne 0 ] ; then - brew install lua@5.1 || printf "Lua 5.1 installation failed.\\n" -fi - if [ $INSTALL_DMG_DEPS -ne 0 ] ; then printf "Sorry, you'll have to install dmgbuild yourself for the time being.\\n" # pip3 install dmgbuild @@ -157,6 +172,28 @@ if [ $INSTALL_TEST_DEPS -ne 0 ] ; then # pip3 install pytest pytest-xdist fi +if [ $INSTALL_LOGRAY -ne 0 ] ; then + FALCO_LIBS_VERSION=0.17.1 + if [ "$FALCO_LIBS_VERSION" ] && [ ! -f "falco-libs-$FALCO_LIBS_VERSION-done" ] ; then + echo "Downloading, building, and installing libsinsp and libscap:" + [ -f "falco-libs-$FALCO_LIBS_VERSION.tar.gz" ] || curl -L -O --remote-header-name "https://github.com/falcosecurity/libs/archive/refs/tags/$FALCO_LIBS_VERSION.tar.gz" + mv "libs-$FALCO_LIBS_VERSION.tar.gz" "falco-libs-$FALCO_LIBS_VERSION.tar.gz" + tar -xf "falco-libs-$FALCO_LIBS_VERSION.tar.gz" + mv "libs-$FALCO_LIBS_VERSION" "falco-libs-$FALCO_LIBS_VERSION" + cd "falco-libs-$FALCO_LIBS_VERSION" + patch -p1 < "../tools/macos-setup-patches/falco-uthash_h-install.patch" + mkdir build_dir + cd build_dir + cmake -DBUILD_SHARED_LIBS=ON -DMINIMAL_BUILD=ON -DCREATE_TEST_TARGETS=OFF \ + -DUSE_BUNDLED_DEPS=ON -DUSE_BUNDLED_CARES=OFF -DUSE_BUNDLED_ZLIB=OFF \ + -DUSE_BUNDLED_JSONCPP=OFF -DUSE_BUNDLED_TBB=OFF -DUSE_BUNDLED_RE2=OFF \ + .. + make + sudo make install + cd ../.. + fi +fi + # Uncomment to add PNG compression utilities used by compress-pngs: # brew install advancecomp optipng oxipng pngcrush diff --git a/tools/macos-setup-patches/falco-include-dirs.patch b/tools/macos-setup-patches/falco-include-dirs.patch new file mode 100644 index 00000000..68f5f228 --- /dev/null +++ b/tools/macos-setup-patches/falco-include-dirs.patch @@ -0,0 +1,15 @@ +758865ee6 update(cmake): Don't add build directories to our pc files +diff --git a/userspace/libsinsp/CMakeLists.txt b/userspace/libsinsp/CMakeLists.txt +index 6104603e8..1989ea3fb 100644 +--- a/userspace/libsinsp/CMakeLists.txt ++++ b/userspace/libsinsp/CMakeLists.txt +@@ -335,6 +335,9 @@ endforeach() + # Build our pkg-config "Cflags:" flags. + set(SINSP_PKG_CONFIG_INCLUDES "") + foreach(sinsp_include_directory ${LIBSINSP_INCLUDE_DIRS}) ++ if (${sinsp_include_directory} MATCHES "^${CMAKE_SOURCE_DIR}" OR ${sinsp_include_directory} MATCHES "^${CMAKE_BINARY_DIR}") ++ continue() ++ endif() + list(APPEND SINSP_PKG_CONFIG_INCLUDES -I${sinsp_include_directory}) + endforeach() + diff --git a/tools/macos-setup-patches/falco-uthash_h-install.patch b/tools/macos-setup-patches/falco-uthash_h-install.patch new file mode 100644 index 00000000..7e1f7855 --- /dev/null +++ b/tools/macos-setup-patches/falco-uthash_h-install.patch @@ -0,0 +1,9 @@ +--- falco-libs-0.14.1/cmake/modules/libscap.cmake.orig 2024-03-25 22:46:40 ++++ falco-libs-0.14.1/cmake/modules/libscap.cmake 2024-03-25 22:46:10 +@@ -139,5 +139,6 @@ + FILES_MATCHING PATTERN "*.h") + install(FILES ${PROJECT_BINARY_DIR}/libscap/scap_config.h DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${LIBS_PACKAGE_NAME}/libscap) + install(FILES ${PROJECT_BINARY_DIR}/libscap/scap_strl_config.h DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${LIBS_PACKAGE_NAME}/libscap) ++install(FILES ${PROJECT_BINARY_DIR}/uthash-prefix/src/uthash/src/uthash.h DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${LIBS_PACKAGE_NAME}/libscap) + install(FILES ${PROJECT_BINARY_DIR}/libscap/libscap.pc DESTINATION ${CMAKE_INSTALL_LIBDIR}/pkgconfig) + endif() diff --git a/tools/macos-setup-patches/glib-pkgconfig.patch b/tools/macos-setup-patches/glib-pkgconfig.patch new file mode 100644 index 00000000..07761460 --- /dev/null +++ b/tools/macos-setup-patches/glib-pkgconfig.patch @@ -0,0 +1,10 @@ +--- gobject-2.0.pc.in.orig 2011-12-30 22:08:27.000000000 +0100 ++++ gobject-2.0.pc.in 2011-12-30 22:09:06.000000000 +0100 +@@ -7,6 +7,6 @@ + Description: GLib Type, Object, Parameter and Signal Library + Requires: glib-2.0 + Version: @VERSION@ +-Libs: -L${libdir} -lgobject-2.0 ++Libs: -L${libdir} -lgobject-2.0 -lffi + Libs.private: @LIBFFI_LIBS@ + Cflags: diff --git a/tools/macos-setup-patches/gnutls-pkgconfig.patch b/tools/macos-setup-patches/gnutls-pkgconfig.patch new file mode 100644 index 00000000..f0ad93ec --- /dev/null +++ b/tools/macos-setup-patches/gnutls-pkgconfig.patch @@ -0,0 +1,8 @@ +--- gnutls.pc.in.orig 2012-05-27 02:08:48.000000000 +0200 ++++ gnutls.pc.in 2012-05-27 02:11:39.000000000 +0200 +@@ -21,5 +21,4 @@ + Version: @VERSION@ + Libs: -L${libdir} -lgnutls + Libs.private: @LTLIBGCRYPT@ @LTLIBNETTLE@ @NETTLE_LIBS@ @GNUTLS_ZLIB_LIBS_PRIVATE@ +-@GNUTLS_REQUIRES_PRIVATE@ + Cflags: -I${includedir} diff --git a/tools/macos-setup-patches/qt-fix-pc-file b/tools/macos-setup-patches/qt-fix-pc-file new file mode 100755 index 00000000..fa9ba54b --- /dev/null +++ b/tools/macos-setup-patches/qt-fix-pc-file @@ -0,0 +1,24 @@ +#! /bin/sh +# +# Fix a Qt .pc file's flags. +# +# Wireshark - Network traffic analyzer +# By Gerald Combs +# Copyright 2014 Gerald Combs +# +# SPDX-License-Identifier: GPL-2.0-or-later +# +# Fix a single Qt .pc file to provide the right flags; a -F flag is +# needed to point to the directory under which the frameworks are +# placed, and a -I flag needs to point to the directory where +# the include files for the component in question are place in +# the framework's directory hierarchy, rather thany to where the +# include files *would* be if the component *weren't* distributed +# as a framework. +# +ed - "$1" < +# Copyright 2014 Gerald Combs +# +# SPDX-License-Identifier: GPL-2.0-or-later +# +# See bug QTBUG-35256 for the full painful story. Shorter version: +# the macOS Qt packages provide the Qt components as frameworks, but +# the .pc files don't generate the right CFLAGS/CXXFLAGS to make +# that work, so autoconf doesn't work correctly. +# +if [ "$#" != 1 ] +then + echo "Usage: qt-fix-pc-files " 1>&1 + exit 1 +fi +find "$1" -name "*.pc" -exec tools/macos-setup-patches/qt-fix-pc-file {} ";" diff --git a/tools/macos-setup-patches/snappy-signed.patch b/tools/macos-setup-patches/snappy-signed.patch new file mode 100644 index 00000000..0018fcc4 --- /dev/null +++ b/tools/macos-setup-patches/snappy-signed.patch @@ -0,0 +1,11 @@ +--- snappy.cc.orig 2023-09-14 01:04:05 ++++ snappy.cc 2023-09-14 01:04:28 +@@ -1290,7 +1290,7 @@ + DeferMemCopy(&deferred_src, &deferred_length, from, len); + } + } while (ip < ip_limit_min_slop && +- (op + deferred_length) < op_limit_min_slop); ++ static_cast(op + deferred_length) < op_limit_min_slop); + exit: + ip--; + assert(ip <= ip_limit); diff --git a/tools/macos-setup-patches/spandsp-configure-patch b/tools/macos-setup-patches/spandsp-configure-patch new file mode 100644 index 00000000..5a898f99 --- /dev/null +++ b/tools/macos-setup-patches/spandsp-configure-patch @@ -0,0 +1,53 @@ +*** configure.dist 2014-06-04 07:28:14.000000000 -0700 +--- configure 2017-08-07 00:16:39.000000000 -0700 +*************** +*** 19658,19664 **** + + case "${ax_cv_c_compiler_vendor}" in + gnu) +! COMP_VENDOR_CFLAGS="-std=gnu99 -ffast-math -Wall -Wunused-variable -Wunused-but-set-variable -Wwrite-strings -Wstrict-prototypes -Wmissing-prototypes $COMP_VENDOR_CFLAGS" + if test "$enable_avx" = "yes" ; then + COMP_VENDOR_CFLAGS="-mavx $COMP_VENDOR_CFLAGS" + fi +--- 19658,19664 ---- + + case "${ax_cv_c_compiler_vendor}" in + gnu) +! COMP_VENDOR_CFLAGS="-std=gnu99 -ffast-math -Wall -Wunused-variable -Wwrite-strings -Wstrict-prototypes -Wmissing-prototypes $COMP_VENDOR_CFLAGS" + if test "$enable_avx" = "yes" ; then + COMP_VENDOR_CFLAGS="-mavx $COMP_VENDOR_CFLAGS" + fi +*************** +*** 19733,19739 **** + + ;; + intel) +! COMP_VENDOR_CFLAGS="-std=c99 -D_POSIX_C_SOURCE=2 -D_GNU_SOURCE=1 -Wall -Wunused-variable -Wunused-but-set-variable -Wwrite-strings -Wstrict-prototypes -Wmissing-prototypes $COMP_VENDOR_CFLAGS" + if test "$enable_avx" = "yes" ; then + COMP_VENDOR_CFLAGS="-mavx $COMP_VENDOR_CFLAGS" + fi +--- 19733,19739 ---- + + ;; + intel) +! COMP_VENDOR_CFLAGS="-std=c99 -D_POSIX_C_SOURCE=2 -D_GNU_SOURCE=1 -Wall -Wunused-variable -Wwrite-strings -Wstrict-prototypes -Wmissing-prototypes $COMP_VENDOR_CFLAGS" + if test "$enable_avx" = "yes" ; then + COMP_VENDOR_CFLAGS="-mavx $COMP_VENDOR_CFLAGS" + fi +*************** +*** 19767,19773 **** + COMP_VENDOR_LDFLAGS= + ;; + *) +! COMP_VENDOR_CFLAGS="-std=c99 -Wall -Wunused-variable -Wunused-but-set-variable -Wwrite-strings -Wstrict-prototypes -Wmissing-prototypes $COMP_VENDOR_CFLAGS" + COMP_VENDOR_LDFLAGS= + ;; + esac +--- 19767,19773 ---- + COMP_VENDOR_LDFLAGS= + ;; + *) +! COMP_VENDOR_CFLAGS="-std=c99 -Wall -Wunused-variable -Wwrite-strings -Wstrict-prototypes -Wmissing-prototypes $COMP_VENDOR_CFLAGS" + COMP_VENDOR_LDFLAGS= + ;; + esac diff --git a/tools/macos-setup.sh b/tools/macos-setup.sh index 0017ffcf..13cb4547 100755 --- a/tools/macos-setup.sh +++ b/tools/macos-setup.sh @@ -10,29 +10,31 @@ # # SPDX-License-Identifier: GPL-2.0-or-later +set -e + shopt -s extglob # # Get the major version of Darwin, so we can check the major macOS # version. # -DARWIN_MAJOR_VERSION=`uname -r | sed 's/\([0-9]*\).*/\1/'` +DARWIN_MAJOR_VERSION=$(uname -r | sed 's/\([0-9]*\).*/\1/') # -# The minimum supported version of Qt is 5.9, so the minimum supported version -# of macOS is OS X 10.10 (Yosemite), aka Darwin 14.0. +# The minimum supported version of Qt is 5.11, so the minimum supported version +# of macOS is OS X 10.11 (El Capitan), aka Darwin 15.0. # -if [[ $DARWIN_MAJOR_VERSION -lt 14 ]]; then - echo "This script does not support any versions of macOS before Yosemite" 1>&2 +if [[ $DARWIN_MAJOR_VERSION -lt 15 ]]; then + echo "This script does not support any versions of macOS before El Capitan" 1>&2 exit 1 fi # # Get the processor architecture of Darwin. Currently supported: arm, i386 # -DARWIN_PROCESSOR_ARCH=`uname -p` +DARWIN_PROCESSOR_ARCH=$(uname -m) -if [ "$DARWIN_PROCESSOR_ARCH" != "arm" -a "$DARWIN_PROCESSOR_ARCH" != "i386" ]; then +if [ "$DARWIN_PROCESSOR_ARCH" != "arm64" ] && [ "$DARWIN_PROCESSOR_ARCH" != "x86_64" ]; then echo "This script does not support this processor architecture" 1>&2 exit 1 fi @@ -43,18 +45,19 @@ fi # # We use curl, but older versions of curl in older macOS releases can't -# handle some sites - including the xz site. +# handle some sites - including the xz site. We also use the --fail-with-body +# flag, which was added in curl 7.76.0. # -# If the version of curl in the system is older than 7.54.0, download +# If the version of curl in the system is older than 7.76.0, download # curl and install it. # -current_curl_version=`curl --version | sed -n 's/curl \([0-9.]*\) .*/\1/p'` -current_curl_major_version="`expr $current_curl_version : '\([0-9][0-9]*\).*'`" -current_curl_minor_version="`expr $current_curl_version : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`" +current_curl_version=$( curl --version | sed -n 's/curl \([0-9.]*\) .*/\1/p' ) +current_curl_major_version="$( expr "$current_curl_version" : '\([0-9][0-9]*\).*' )" +current_curl_minor_version="$(expr "$current_curl_version" : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )" if [[ $current_curl_major_version -lt 7 || ($current_curl_major_version -eq 7 && $current_curl_minor_version -lt 54) ]]; then - CURL_VERSION=${CURL_VERSION-7.60.0} + CURL_VERSION=${CURL_VERSION-7.88.1} fi # @@ -72,17 +75,6 @@ fi # XZ_VERSION=5.2.5 -# -# Some packages need lzip to unpack their current source. -# -LZIP_VERSION=1.21 - -# -# The version of libPCRE on Catalina is insufficient to build glib due to -# missing UTF-8 support. -# -PCRE_VERSION=8.45 - # # CMake is required to do the build - and to build some of the # dependencies. @@ -94,29 +86,31 @@ CMAKE_VERSION=${CMAKE_VERSION-3.21.4} # claimed to build faster than make. # Comment it out if you don't want it. # -NINJA_VERSION=${NINJA_VERSION-1.10.2} +NINJA_VERSION=${NINJA_VERSION-1.12.1} +NINJA_SHA256=89a287444b5b3e98f88a945afa50ce937b8ffd1dcc59c555ad9b1baf855298c9 # # The following libraries and tools are required even to build only TShark. # -GETTEXT_VERSION=0.21 -GLIB_VERSION=2.76.6 +GETTEXT_VERSION=0.22.5 +GLIB_VERSION=2.80.3 if [ "$GLIB_VERSION" ]; then - GLIB_MAJOR_VERSION="`expr $GLIB_VERSION : '\([0-9][0-9]*\).*'`" - GLIB_MINOR_VERSION="`expr $GLIB_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`" - GLIB_DOTDOT_VERSION="`expr $GLIB_VERSION : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*'`" - GLIB_MAJOR_MINOR_VERSION=$GLIB_MAJOR_VERSION.$GLIB_MINOR_VERSION - GLIB_MAJOR_MINOR_DOTDOT_VERSION=$GLIB_MAJOR_VERSION.$GLIB_MINOR_VERSION.$GLIB_DOTDOT_VERSION + GLIB_MAJOR_VERSION="$( expr $GLIB_VERSION : '\([0-9][0-9]*\).*' )" + GLIB_MINOR_VERSION="$( expr $GLIB_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )" +# Unused? +# GLIB_DOTDOT_VERSION="$( expr $GLIB_VERSION : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*' )" +# GLIB_MAJOR_MINOR_VERSION=$GLIB_MAJOR_VERSION.$GLIB_MINOR_VERSION +# GLIB_MAJOR_MINOR_DOTDOT_VERSION=$GLIB_MAJOR_VERSION.$GLIB_MINOR_VERSION.$GLIB_DOTDOT_VERSION fi PKG_CONFIG_VERSION=0.29.2 # # libgpg-error is required for libgcrypt. # -LIBGPG_ERROR_VERSION=1.39 +LIBGPG_ERROR_VERSION=1.47 # # libgcrypt is required. # -LIBGCRYPT_VERSION=1.8.7 +LIBGCRYPT_VERSION=1.10.2 # # libpcre2 is required. # @@ -135,9 +129,9 @@ PCRE2_VERSION=10.39 QT_VERSION=${QT_VERSION-6.2.4} if [ "$QT_VERSION" ]; then - QT_MAJOR_VERSION="`expr $QT_VERSION : '\([0-9][0-9]*\).*'`" - QT_MINOR_VERSION="`expr $QT_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`" - QT_DOTDOT_VERSION="`expr $QT_VERSION : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*'`" + QT_MAJOR_VERSION="$( expr "$QT_VERSION" : '\([0-9][0-9]*\).*' )" + QT_MINOR_VERSION="$( expr "$QT_VERSION" : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )" + QT_DOTDOT_VERSION="$( expr "$QT_VERSION" : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*' )" QT_MAJOR_MINOR_VERSION=$QT_MAJOR_VERSION.$QT_MINOR_VERSION QT_MAJOR_MINOR_DOTDOT_VERSION=$QT_MAJOR_VERSION.$QT_MINOR_VERSION.$QT_DOTDOT_VERSION fi @@ -148,15 +142,16 @@ fi # the optional libraries are required by other optional libraries. # LIBSMI_VERSION=0.4.8 -GNUTLS_VERSION=3.7.8 +GNUTLS_VERSION=3.8.4 +GNUTLS_SHA256=2bea4e154794f3f00180fa2a5c51fe8b005ac7a31cd58bd44cdfa7f36ebc3a9b if [ "$GNUTLS_VERSION" ]; then # # We'll be building GnuTLS, so we may need some additional libraries. # We assume GnuTLS can work with Nettle; newer versions *only* use # Nettle, not libgcrypt. # - GNUTLS_MAJOR_VERSION="`expr $GNUTLS_VERSION : '\([0-9][0-9]*\).*'`" - GNUTLS_MINOR_VERSION="`expr $GNUTLS_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`" + GNUTLS_MAJOR_VERSION="$( expr $GNUTLS_VERSION : '\([0-9][0-9]*\).*' )" + GNUTLS_MINOR_VERSION="$( expr $GNUTLS_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )" NETTLE_VERSION=3.9.1 # @@ -166,27 +161,28 @@ if [ "$GNUTLS_VERSION" ]; then # # And p11-kit - P11KIT_VERSION=0.25.0 + P11KIT_VERSION=0.25.3 # Which requires libtasn1 LIBTASN1_VERSION=4.19.0 fi -# Use 5.2.4, not 5.3, for now; lua_bitop.c hasn't been ported to 5.3 -# yet, and we need to check for compatibility issues (we'd want Lua -# scripts to work with 5.1, 5.2, and 5.3, as long as they only use Lua -# features present in all three versions) -LUA_VERSION=5.2.4 -SNAPPY_VERSION=1.1.10 -ZSTD_VERSION=1.5.5 +# lua_bitop.c has been ported to 5.3 and 5.4 so use the latest release. +# We may still need to check for compatibility issues (we'd want Lua +# scripts to work with 5.1 through 5.4, as long as they only use Lua +# features present in all versions) +LUA_VERSION=5.4.6 +SNAPPY_VERSION=1.2.1 +ZSTD_VERSION=1.5.6 +ZLIBNG_VERSION=2.1.6 LIBXML2_VERSION=2.11.5 LZ4_VERSION=1.9.4 SBC_VERSION=2.0 -CARES_VERSION=1.19.1 +CARES_VERSION=1.31.0 LIBSSH_VERSION=0.10.5 # mmdbresolve -MAXMINDDB_VERSION=1.4.3 -NGHTTP2_VERSION=1.56.0 -NGHTTP3_VERSION=0.15.0 +MAXMINDDB_VERSION=1.9.1 +NGHTTP2_VERSION=1.62.1 +NGHTTP3_VERSION=1.1.0 SPANDSP_VERSION=0.0.6 SPEEXDSP_VERSION=1.2.1 if [ "$SPANDSP_VERSION" ]; then @@ -198,8 +194,19 @@ fi BCG729_VERSION=1.1.1 # libilbc 3.0.0 & later link with absiel, which is released under Apache 2.0 ILBC_VERSION=2.0.2 +OPENCORE_AMR_VERSION=0.1.6 +OPENCORE_AMR_SHA256=483eb4061088e2b34b358e47540b5d495a96cd468e361050fae615b1809dc4a1 OPUS_VERSION=1.4 +# Falco libs (libsinsp and libscap) and their dependencies. Unset for now. +#FALCO_LIBS_VERSION=0.17.1 +if [ "$FALCO_LIBS_VERSION" ] ; then + JSONCPP_VERSION=1.9.5 + ONETBB_VERSION=2021.11.0 + # 2023-06-01 and later require Abseil. + RE2_VERSION=2022-06-01 +fi + # # Is /usr/bin/python3 a working version of Python? It may be, as it # might be a wrapper that runs the Python 3 that's part of Xcode. @@ -214,20 +221,27 @@ else # # No - install a Python package. # - PYTHON3_VERSION=3.9.5 + PYTHON3_VERSION=3.12.1 fi BROTLI_VERSION=1.0.9 # minizip +MINIZIPNG_VERSION=4.0.7 ZLIB_VERSION=1.3 # Uncomment to enable automatic updates using Sparkle -#SPARKLE_VERSION=2.1.0 +#SPARKLE_VERSION=2.2.2 # # Asciidoctor is required to build the documentation. # +# As the Asciidoctor Homebrew formula shows, managing all of the various +# dependencies can become quite hairy: +# https://github.com/Homebrew/homebrew-core/blob/master/Formula/a/asciidoctor.rb +# Maybe we should install a JRE and use AsciidoctorJ instead? ASCIIDOCTOR_VERSION=${ASCIIDOCTOR_VERSION-2.0.16} ASCIIDOCTORPDF_VERSION=${ASCIIDOCTORPDF_VERSION-1.6.1} +# css_parser 1.13 and later require Ruby 2.7 +CSS_PARSER_VERSION=${CSS_PARSER_VERSION-1.12.0} # # GNU autotools. They're not supplied with the macOS versions we # support, and we currently use them for minizip. @@ -237,35 +251,35 @@ AUTOMAKE_VERSION=1.16.5 LIBTOOL_VERSION=2.4.6 install_curl() { - if [ "$CURL_VERSION" -a ! -f curl-$CURL_VERSION-done ] ; then + if [ "$CURL_VERSION" ] && [ ! -f "curl-$CURL_VERSION-done" ] ; then echo "Downloading, building, and installing curl:" - [ -f curl-$CURL_VERSION.tar.bz2 ] || curl -L -O https://curl.haxx.se/download/curl-$CURL_VERSION.tar.bz2 || exit 1 + [ -f "curl-$CURL_VERSION.tar.bz2" ] || curl --fail --location --remote-name "https://curl.haxx.se/download/curl-$CURL_VERSION.tar.bz2" $no_build && echo "Skipping installation" && return - bzcat curl-$CURL_VERSION.tar.bz2 | tar xf - || exit 1 - cd curl-$CURL_VERSION - ./configure || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + bzcat "curl-$CURL_VERSION.tar.bz2" | tar xf - + cd "curl-$CURL_VERSION" + ./configure "${CONFIGURE_OPTS[@]}" + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. - touch curl-$CURL_VERSION-done + touch "curl-$CURL_VERSION-done" fi } uninstall_curl() { - if [ ! -z "$installed_curl_version" ] ; then + if [ -n "$installed_curl_version" ] ; then echo "Uninstalling curl:" - cd curl-$installed_curl_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + cd "curl-$installed_curl_version" + $DO_MAKE_UNINSTALL + make distclean cd .. - rm curl-$installed_curl_version-done + rm "curl-$installed_curl_version-done" - if [ "$#" -eq 1 -a "$1" = "-r" ] ; then + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then # # Get rid of the previously downloaded and unpacked version. # - rm -rf curl-$installed_curl_version - rm -rf curl-$installed_curl_version.tar.bz2 + rm -rf "curl-$installed_curl_version" + rm -rf "curl-$installed_curl_version.tar.bz2" fi installed_curl_version="" @@ -273,112 +287,83 @@ uninstall_curl() { } install_xz() { - if [ "$XZ_VERSION" -a ! -f xz-$XZ_VERSION-done ] ; then + if [ "$XZ_VERSION" ] && [ ! -f xz-$XZ_VERSION-done ] ; then echo "Downloading, building, and installing xz:" - [ -f xz-$XZ_VERSION.tar.bz2 ] || curl -L -O https://tukaani.org/xz/xz-$XZ_VERSION.tar.bz2 || exit 1 + [ -f xz-$XZ_VERSION.tar.bz2 ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://tukaani.org/xz/xz-$XZ_VERSION.tar.bz2 $no_build && echo "Skipping installation" && return - bzcat xz-$XZ_VERSION.tar.bz2 | tar xf - || exit 1 + bzcat xz-$XZ_VERSION.tar.bz2 | tar xf - cd xz-$XZ_VERSION # # This builds and installs liblzma, which libxml2 uses, and - # Wireshark uses liblzma, so we need to build this with + # Wireshark uses libxml2, so we need to build this with # all the minimum-deployment-version and SDK stuff. # - CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch xz-$XZ_VERSION-done fi } uninstall_xz() { - if [ ! -z "$installed_xz_version" ] ; then + if [ -n "$installed_xz_version" ] ; then echo "Uninstalling xz:" - cd xz-$installed_xz_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + cd "xz-$installed_xz_version" + $DO_MAKE_UNINSTALL + make distclean cd .. - rm xz-$installed_xz_version-done + rm "xz-$installed_xz_version-done" - if [ "$#" -eq 1 -a "$1" = "-r" ] ; then + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then # # Get rid of the previously downloaded and unpacked version. # - rm -rf xz-$installed_xz_version - rm -rf xz-$installed_xz_version.tar.bz2 + rm -rf "xz-$installed_xz_version" + rm -rf "xz-$installed_xz_version.tar.bz2" fi installed_xz_version="" fi } -install_lzip() { - if [ "$LZIP_VERSION" -a ! -f lzip-$LZIP_VERSION-done ] ; then - echo "Downloading, building, and installing lzip:" - [ -f lzip-$LZIP_VERSION.tar.gz ] || curl -L -O https://download.savannah.gnu.org/releases/lzip/lzip-$LZIP_VERSION.tar.gz || exit 1 - $no_build && echo "Skipping installation" && return - gzcat lzip-$LZIP_VERSION.tar.gz | tar xf - || exit 1 - cd lzip-$LZIP_VERSION - ./configure || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 - cd .. - touch lzip-$LZIP_VERSION-done - fi -} - uninstall_lzip() { - if [ ! -z "$installed_lzip_version" ] ; then + if [ -n "$installed_lzip_version" ] ; then echo "Uninstalling lzip:" - cd lzip-$installed_lzip_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + cd "lzip-$installed_lzip_version" + $DO_MAKE_UNINSTALL + make distclean cd .. - rm lzip-$installed_lzip_version-done + rm "lzip-$installed_lzip_version-done" - if [ "$#" -eq 1 -a "$1" = "-r" ] ; then + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then # # Get rid of the previously downloaded and unpacked version. # - rm -rf lzip-$installed_lzip_version - rm -rf lzip-$installed_lzip_version.tar.gz + rm -rf "lzip-$installed_lzip_version" + rm -rf "lzip-$installed_lzip_version.tar.gz" fi installed_lzip_version="" fi } -install_pcre() { - if [ "$PCRE_VERSION" -a ! -f pcre-$PCRE_VERSION-done ] ; then - echo "Downloading, building, and installing pcre:" - [ -f pcre-$PCRE_VERSION.tar.bz2 ] || curl -L -O https://sourceforge.net/projects/pcre/files/pcre/$PCRE_VERSION/pcre-$PCRE_VERSION.tar.bz2 || exit 1 - $no_build && echo "Skipping installation" && return - bzcat pcre-$PCRE_VERSION.tar.bz2 | tar xf - || exit 1 - cd pcre-$PCRE_VERSION - ./configure --enable-unicode-properties || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 - cd .. - touch pcre-$PCRE_VERSION-done - fi -} - uninstall_pcre() { - if [ ! -z "$installed_pcre_version" ] ; then - echo "Uninstalling pcre:" - cd pcre-$installed_pcre_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + if [ -n "$installed_pcre_version" ] ; then + echo "Uninstalling leftover pcre:" + cd "pcre-$installed_pcre_version" + $DO_MAKE_UNINSTALL + make distclean cd .. - rm pcre-$installed_pcre_version-done + rm "pcre-$installed_pcre_version-done" - if [ "$#" -eq 1 -a "$1" = "-r" ] ; then + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then # # Get rid of the previously downloaded and unpacked version. # - rm -rf pcre-$installed_pcre_version - rm -rf pcre-$installed_pcre_version.tar.bz2 + rm -rf "pcre-$installed_pcre_version" + rm -rf "pcre-$installed_pcre_version.tar.bz2" fi installed_pcre_version="" @@ -386,20 +371,19 @@ uninstall_pcre() { } install_pcre2() { - if [ "$PCRE2_VERSION" -a ! -f "pcre2-$PCRE2_VERSION-done" ] ; then + if [ "$PCRE2_VERSION" ] && [ ! -f "pcre2-$PCRE2_VERSION-done" ] ; then echo "Downloading, building, and installing pcre2:" - [ -f "pcre2-$PCRE2_VERSION.tar.bz2" ] || curl -L -O "https://github.com/PhilipHazel/pcre2/releases/download/pcre2-$PCRE2_VERSION/pcre2-10.39.tar.bz2" || exit 1 + [ -f "pcre2-$PCRE2_VERSION.tar.bz2" ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" "https://github.com/PhilipHazel/pcre2/releases/download/pcre2-$PCRE2_VERSION/pcre2-10.39.tar.bz2" $no_build && echo "Skipping installation" && return - bzcat "pcre2-$PCRE2_VERSION.tar.bz2" | tar xf - || exit 1 + bzcat "pcre2-$PCRE2_VERSION.tar.bz2" | tar xf - cd "pcre2-$PCRE2_VERSION" mkdir build_dir cd build_dir # https://github.com/Homebrew/homebrew-core/blob/master/Formula/pcre2.rb # https://github.com/microsoft/vcpkg/blob/master/ports/pcre2/portfile.cmake - MACOSX_DEPLOYMENT_TARGET=$min_osx_target SDKROOT="$SDKPATH" \ - $DO_CMAKE -DBUILD_STATIC_LIBS=OFF -DBUILD_SHARED_LIBS=ON -DPCRE2_SUPPORT_JIT=ON -DPCRE2_SUPPORT_UNICODE=ON .. || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + "${DO_CMAKE[@]}" -DBUILD_STATIC_LIBS=OFF -DBUILD_SHARED_LIBS=ON -DPCRE2_SUPPORT_JIT=ON -DPCRE2_SUPPORT_UNICODE=ON .. + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd ../.. touch "pcre2-$PCRE2_VERSION-done" fi @@ -412,7 +396,7 @@ uninstall_pcre2() { while read -r ; do $DO_RM -v "$REPLY" ; done < <(cat "pcre2-$installed_pcre2_version/build_dir/install_manifest.txt"; echo) rm "pcre2-$installed_pcre2_version-done" - if [ "$#" -eq 1 -a "$1" = "-r" ] ; then + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then # # Get rid of the previously downloaded and unpacked version. # @@ -427,20 +411,20 @@ uninstall_pcre2() { install_autoconf() { if [ "$AUTOCONF_VERSION" -a ! -f autoconf-$AUTOCONF_VERSION-done ] ; then echo "Downloading, building and installing GNU autoconf..." - [ -f autoconf-$AUTOCONF_VERSION.tar.xz ] || curl -L -O ftp://ftp.gnu.org/gnu/autoconf/autoconf-$AUTOCONF_VERSION.tar.xz || exit 1 + [ -f autoconf-$AUTOCONF_VERSION.tar.xz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://ftp.gnu.org/gnu/autoconf/autoconf-$AUTOCONF_VERSION.tar.xz $no_build && echo "Skipping installation" && return - xzcat autoconf-$AUTOCONF_VERSION.tar.xz | tar xf - || exit 1 + xzcat autoconf-$AUTOCONF_VERSION.tar.xz | tar xf - cd autoconf-$AUTOCONF_VERSION - ./configure || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + ./configure "${CONFIGURE_OPTS[@]}" + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch autoconf-$AUTOCONF_VERSION-done fi } uninstall_autoconf() { - if [ ! -z "$installed_autoconf_version" ] ; then + if [ -n "$installed_autoconf_version" ] ; then # # automake and libtool depend on this, so uninstall them. # @@ -449,8 +433,8 @@ uninstall_autoconf() { echo "Uninstalling GNU autoconf:" cd autoconf-$installed_autoconf_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MAKE_UNINSTALL + make distclean cd .. rm autoconf-$installed_autoconf_version-done @@ -469,20 +453,20 @@ uninstall_autoconf() { install_automake() { if [ "$AUTOMAKE_VERSION" -a ! -f automake-$AUTOMAKE_VERSION-done ] ; then echo "Downloading, building and installing GNU automake..." - [ -f automake-$AUTOMAKE_VERSION.tar.xz ] || curl -L -O ftp://ftp.gnu.org/gnu/automake/automake-$AUTOMAKE_VERSION.tar.xz || exit 1 + [ -f automake-$AUTOMAKE_VERSION.tar.xz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://ftp.gnu.org/gnu/automake/automake-$AUTOMAKE_VERSION.tar.xz $no_build && echo "Skipping installation" && return - xzcat automake-$AUTOMAKE_VERSION.tar.xz | tar xf - || exit 1 + xzcat automake-$AUTOMAKE_VERSION.tar.xz | tar xf - cd automake-$AUTOMAKE_VERSION - ./configure || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + ./configure "${CONFIGURE_OPTS[@]}" + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch automake-$AUTOMAKE_VERSION-done fi } uninstall_automake() { - if [ ! -z "$installed_automake_version" ] ; then + if [ -n "$installed_automake_version" ] ; then # # libtool depends on this(?), so uninstall it. # @@ -490,8 +474,8 @@ uninstall_automake() { echo "Uninstalling GNU automake:" cd automake-$installed_automake_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MAKE_UNINSTALL + make distclean cd .. rm automake-$installed_automake_version-done @@ -510,26 +494,26 @@ uninstall_automake() { install_libtool() { if [ "$LIBTOOL_VERSION" -a ! -f libtool-$LIBTOOL_VERSION-done ] ; then echo "Downloading, building and installing GNU libtool..." - [ -f libtool-$LIBTOOL_VERSION.tar.xz ] || curl -L -O ftp://ftp.gnu.org/gnu/libtool/libtool-$LIBTOOL_VERSION.tar.xz || exit 1 + [ -f libtool-$LIBTOOL_VERSION.tar.xz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://ftp.gnu.org/gnu/libtool/libtool-$LIBTOOL_VERSION.tar.xz $no_build && echo "Skipping installation" && return - xzcat libtool-$LIBTOOL_VERSION.tar.xz | tar xf - || exit 1 + xzcat libtool-$LIBTOOL_VERSION.tar.xz | tar xf - cd libtool-$LIBTOOL_VERSION - ./configure --program-prefix=g || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + ./configure "${CONFIGURE_OPTS[@]}" --program-prefix=g + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch libtool-$LIBTOOL_VERSION-done fi } uninstall_libtool() { - if [ ! -z "$installed_libtool_version" ] ; then + if [ -n "$installed_libtool_version" ] ; then echo "Uninstalling GNU libtool:" cd libtool-$installed_libtool_version - $DO_MV /usr/local/bin/glibtool /usr/local/bin/libtool - $DO_MV /usr/local/bin/glibtoolize /usr/local/bin/libtoolize - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MV "$installation_prefix/bin/glibtool" "$installation_prefix/bin/libtool" + $DO_MV "$installation_prefix/bin/glibtoolize" "$installation_prefix/bin/libtoolize" + $DO_MAKE_UNINSTALL + make distclean cd .. rm libtool-$installed_libtool_version-done @@ -546,27 +530,28 @@ uninstall_libtool() { } install_ninja() { - if [ "$NINJA_VERSION" -a ! -f ninja-$NINJA_VERSION-done ] ; then + if [ "$NINJA_VERSION" ] && [ ! -f "ninja-$NINJA_VERSION-done" ] ; then echo "Downloading and installing Ninja:" # # Download the zipball, unpack it, and move the binary to - # /usr/local/bin. + # $installation_prefix/bin. # - [ -f ninja-mac-v$NINJA_VERSION.zip ] || curl -L -o ninja-mac-v$NINJA_VERSION.zip https://github.com/ninja-build/ninja/releases/download/v$NINJA_VERSION/ninja-mac.zip || exit 1 + [ -f "ninja-mac-v$NINJA_VERSION.zip" ] || curl "${CURL_LOCAL_NAME_OPTS[@]}" "ninja-mac-v$NINJA_VERSION.zip" https://github.com/ninja-build/ninja/releases/download/v$NINJA_VERSION/ninja-mac.zip + echo "$NINJA_SHA256 ninja-mac-v$NINJA_VERSION.zip" | shasum --algorithm 256 --check $no_build && echo "Skipping installation" && return - unzip ninja-mac-v$NINJA_VERSION.zip - sudo mv ninja /usr/local/bin - touch ninja-$NINJA_VERSION-done + unzip "ninja-mac-v$NINJA_VERSION.zip" + sudo mv ninja "$installation_prefix/bin" + touch "ninja-$NINJA_VERSION-done" fi } uninstall_ninja() { - if [ ! -z "$installed_ninja_version" ]; then + if [ -n "$installed_ninja_version" ]; then echo "Uninstalling Ninja:" - sudo rm /usr/local/bin/ninja - rm ninja-$installed_ninja_version-done - if [ "$#" -eq 1 -a "$1" = "-r" ] ; then - rm -f ninja-mac-v$installed_ninja_version.zip + $DO_RM "$installation_prefix/bin/ninja" + rm "ninja-$installed_ninja_version-done" + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then + rm -f "ninja-mac-v$installed_ninja_version.zip" fi installed_ninja_version="" @@ -576,13 +561,14 @@ uninstall_ninja() { install_asciidoctor() { if [ ! -f asciidoctor-${ASCIIDOCTOR_VERSION}-done ]; then echo "Downloading and installing Asciidoctor:" + $no_build && echo "Skipping installation" && return sudo gem install -V asciidoctor --version "=${ASCIIDOCTOR_VERSION}" touch asciidoctor-${ASCIIDOCTOR_VERSION}-done fi } uninstall_asciidoctor() { - if [ ! -z "$installed_asciidoctor_version" ]; then + if [ -n "$installed_asciidoctor_version" ]; then echo "Uninstalling Asciidoctor:" sudo gem uninstall -V asciidoctor --version "=${installed_asciidoctor_version}" rm asciidoctor-$installed_asciidoctor_version-done @@ -604,13 +590,15 @@ install_asciidoctorpdf() { ## record them for uninstallation ## ttfunk, pdf-core, prawn, prawn-table, Ascii85, ruby-rc4, hashery, afm, pdf-reader, prawn-templates, public_suffix, addressable, css_parser, prawn-svg, prawn-icon, safe_yaml, thread_safe, polyglot, treetop, asciidoctor-pdf echo "Downloading and installing Asciidoctor-pdf:" + $no_build && echo "Skipping installation" && return + sudo gem install -V css_parser --version "=${CSS_PARSER_VERSION}" sudo gem install -V asciidoctor-pdf --version "=${ASCIIDOCTORPDF_VERSION}" touch asciidoctorpdf-${ASCIIDOCTORPDF_VERSION}-done fi } uninstall_asciidoctorpdf() { - if [ ! -z "$installed_asciidoctorpdf_version" ]; then + if [ -n "$installed_asciidoctorpdf_version" ]; then echo "Uninstalling Asciidoctor:" sudo gem uninstall -V asciidoctor-pdf --version "=${installed_asciidoctorpdf_version}" ## XXX uninstall dependencies @@ -630,8 +618,9 @@ uninstall_asciidoctorpdf() { install_cmake() { if [ ! -f cmake-$CMAKE_VERSION-done ]; then echo "Downloading and installing CMake:" - CMAKE_MAJOR_VERSION="`expr $CMAKE_VERSION : '\([0-9][0-9]*\).*'`" - CMAKE_MINOR_VERSION="`expr $CMAKE_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`" + $no_build && echo "Skipping installation" && return + CMAKE_MAJOR_VERSION="$( expr "$CMAKE_VERSION" : '\([0-9][0-9]*\).*' )" + CMAKE_MINOR_VERSION="$( expr "$CMAKE_VERSION" : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )" CMAKE_MAJOR_MINOR_VERSION=$CMAKE_MAJOR_VERSION.$CMAKE_MINOR_VERSION # @@ -654,7 +643,7 @@ install_cmake() { # 3.19.3 and later have a macos-universal DMG for 10.13 and later, # and a macos10.10-universal DMG for 10.10 and later. # - if [ "$CMAKE_MINOR_VERSION" -lt 5 ]; then + if [ "$CMAKE_MINOR_VERSION" -lt 10 ]; then echo "CMake $CMAKE_VERSION" is too old 1>&2 elif [ "$CMAKE_MINOR_VERSION" -lt 19 -o \ "$CMAKE_VERSION" = 3.19.0 -o \ @@ -666,17 +655,17 @@ install_cmake() { else type="macos10.0-universal" fi - [ -f cmake-$CMAKE_VERSION-$type.dmg ] || curl -L -O https://cmake.org/files/v$CMAKE_MAJOR_MINOR_VERSION/cmake-$CMAKE_VERSION-$type.dmg || exit 1 + [ -f cmake-$CMAKE_VERSION-$type.dmg ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://cmake.org/files/v$CMAKE_MAJOR_MINOR_VERSION/cmake-$CMAKE_VERSION-$type.dmg $no_build && echo "Skipping installation" && return - sudo hdiutil attach cmake-$CMAKE_VERSION-$type.dmg || exit 1 - sudo ditto /Volumes/cmake-$CMAKE_VERSION-$type/CMake.app /Applications/CMake.app || exit 1 + sudo hdiutil attach cmake-$CMAKE_VERSION-$type.dmg + sudo ditto /Volumes/cmake-$CMAKE_VERSION-$type/CMake.app /Applications/CMake.app # - # Plant the appropriate symbolic links in /usr/local/bin. + # Plant the appropriate symbolic links in $installation_prefix/bin. # It's a drag-install, so there's no installer to make them, # and the CMake code to put them in place is lame, as # - # 1) it defaults to /usr/bin, not /usr/local/bin; + # 1) it defaults to /usr/bin, not $installation_prefix/bin; # 2) it doesn't request the necessary root privileges; # 3) it can't be run from the command line; # @@ -684,7 +673,7 @@ install_cmake() { # for i in ccmake cmake cmake-gui cmakexbuild cpack ctest do - sudo ln -s /Applications/CMake.app/Contents/bin/$i /usr/local/bin/$i + sudo ln -s /Applications/CMake.app/Contents/bin/$i "$installation_prefix/bin/$i" done sudo hdiutil detach /Volumes/cmake-$CMAKE_VERSION-$type ;; @@ -697,9 +686,9 @@ install_cmake() { } uninstall_cmake() { - if [ ! -z "$installed_cmake_version" ]; then + if [ -n "$installed_cmake_version" ]; then echo "Uninstalling CMake:" - installed_cmake_major_version="`expr $installed_cmake_version : '\([0-9][0-9]*\).*'`" + installed_cmake_major_version="$( expr "$installed_cmake_version" : '\([0-9][0-9]*\).*' )" case "$installed_cmake_major_version" in 0|1|2) @@ -710,7 +699,7 @@ uninstall_cmake() { sudo rm -rf /Applications/CMake.app for i in ccmake cmake cmake-gui cmakexbuild cpack ctest do - sudo rm -f /usr/local/bin/$i + sudo rm -f "$installation_prefix/bin/$i" done rm cmake-$installed_cmake_version-done ;; @@ -739,6 +728,7 @@ install_meson() { # We have it. : else + $no_build && echo "Skipping installation" && return sudo pip3 install meson touch meson-done fi @@ -758,7 +748,7 @@ install_pytest() { # # Install pytest with pip3 if we don't have it already. # - if python3 -m pytest --version >/dev/null 2>&1 + if python3 -m pytest --version &> /dev/null || pytest --version &> /dev/null then # We have it. : @@ -781,122 +771,21 @@ uninstall_pytest() { install_gettext() { if [ ! -f gettext-$GETTEXT_VERSION-done ] ; then echo "Downloading, building, and installing GNU gettext:" - [ -f gettext-$GETTEXT_VERSION.tar.gz ] || curl -L -O https://ftp.gnu.org/pub/gnu/gettext/gettext-$GETTEXT_VERSION.tar.gz || exit 1 + [ -f gettext-$GETTEXT_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://ftp.gnu.org/pub/gnu/gettext/gettext-$GETTEXT_VERSION.tar.gz $no_build && echo "Skipping installation" && return - gzcat gettext-$GETTEXT_VERSION.tar.gz | tar xf - || exit 1 + gzcat gettext-$GETTEXT_VERSION.tar.gz | tar xf - cd gettext-$GETTEXT_VERSION - - # - # This is annoying. - # - # GNU gettext's configuration script checks for the presence of an - # implementation of iconv(). Not only does it check whether iconv() - # is available, *but* it checks for certain behavior *not* specified - # by POSIX that the GNU implementation provides, namely that an - # attempt to convert the UTF-8 for the EURO SYMBOL chaaracter to - # ISO 8859-1 results in an error. - # - # macOS, prior to Sierra, provided the GNU iconv library (as it's - # a POSIX API). - # - # Sierra appears to have picked up an implementation from FreeBSD - # (that implementation originated with the CITRUS project: - # - # http://citrus.bsdclub.org - # - # with additional work done to integrate it into NetBSD, and then - # adopted by FreeBSD with further work done). - # - # That implementation does *NOT* return an error in that case; instead, - # it transliterates the EURO SYMBOL to "EUR". - # - # Both behaviors conform to POSIX. - # - # This causes GNU gettext's configure script to conclude that it - # should not say iconv() is available. That, unfortunately, causes - # the build to fail with a linking error when trying to build - # libtextstyle (a library for which we have no use, that is offered - # as a separate library by the GNU project: - # - # https://www.gnu.org/software/gettext/libtextstyle/manual/libtextstyle.html - # - # and that is presumably bundled in GNU gettext because some gettext - # tool depends on it). The failure appears to be due to: - # - # libtextstyle's exported symbols file is generated from a - # template and a script that passes through only symbols - # that appear in a header file that declares the symbol - # as extern; - # - # one such header file declares iconv_ostream_create, but only - # if HAVE_ICONV is defined. - # - # the source file that defines iconv_ostream_create does so - # only if HAVE_ICONV is defined; - # - # the aforementioned script pays *NO ATTENTION* to #ifdefs, - # so it will include iconv_ostream_create in the list of - # symbols to export regardless of whether a working iconv() - # was found; - # - # the linker failing because it was told to export a symbol - # that doesn't exist. - # - # This is a collection of multiple messes: - # - # 1) not all versions of iconv() defaulting to "return an error - # if the target character set doesn't have a character that - # corresponds to the source character" and not offering a way - # to force that behavior; - # - # 2) either some parts of GNU gettext - and libraries bundled - # with it, for some mysterious reason - depending on the GNU - # behavior rather than assuming only what POSIX specifies, and - # the configure script checking for the GNU behavior and not - # setting HAVE_ICONV if it's not found; - # - # 3) the process for building the exported symbols file not - # removing symbols that won't exist in the build due to - # a "working" iconv() not being found; - # - # 4) the file that would define iconv_ostream_create() not - # defining as an always-failing stub if HAVE_ICONV isn't - # defined; - # - # 5) macOS's linker failing if a symbol is specified in an - # exported symbols file but not found, while other linkers - # just ignore it? (I add this because I'm a bit surprised - # that this has not been fixed, as I suspect it would fail - # on FreeBSD and possibly NetBSD as well, as I think their - # iconv()s also default to transliterating rather than failing - # if an input character has no corresponding character in - # the output encoding.) - # - # The Homebrew folks are aware of this and have reported it to - # Apple as a "feedback", for what that's worth: - # - # https://github.com/Homebrew/homebrew-core/commit/af3b4da5a096db3d9ee885e99ed29b33dec1f1c4 - # - # We adopt their fix, which is to run the configure script with - # "am_cv_func_iconv_works=y" as one of the arguments if it's - # running on Sonoma; in at least one test, doing so on Ventura - # caused the build to fail. - # - if [[ $DARWIN_MAJOR_VERSION -ge 23 ]]; then - workaround_arg="am_cv_func_iconv_works=y" - else - workaround_arg= - fi - CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure $workaround_arg || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch gettext-$GETTEXT_VERSION-done fi } uninstall_gettext() { - if [ ! -z "$installed_gettext_version" ] ; then + if [ -n "$installed_gettext_version" ] ; then # # GLib depends on this, so uninstall it. # @@ -904,8 +793,8 @@ uninstall_gettext() { echo "Uninstalling GNU gettext:" cd gettext-$installed_gettext_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MAKE_UNINSTALL + make distclean cd .. rm gettext-$installed_gettext_version-done @@ -924,24 +813,24 @@ uninstall_gettext() { install_pkg_config() { if [ ! -f pkg-config-$PKG_CONFIG_VERSION-done ] ; then echo "Downloading, building, and installing pkg-config:" - [ -f pkg-config-$PKG_CONFIG_VERSION.tar.gz ] || curl -L -O https://pkgconfig.freedesktop.org/releases/pkg-config-$PKG_CONFIG_VERSION.tar.gz || exit 1 + [ -f pkg-config-$PKG_CONFIG_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://pkgconfig.freedesktop.org/releases/pkg-config-$PKG_CONFIG_VERSION.tar.gz $no_build && echo "Skipping installation" && return - gzcat pkg-config-$PKG_CONFIG_VERSION.tar.gz | tar xf - || exit 1 + gzcat pkg-config-$PKG_CONFIG_VERSION.tar.gz | tar xf - cd pkg-config-$PKG_CONFIG_VERSION - ./configure --with-internal-glib || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + CFLAGS="$CFLAGS -Wno-int-conversion" ./configure "${CONFIGURE_OPTS[@]}" --with-internal-glib + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch pkg-config-$PKG_CONFIG_VERSION-done fi } uninstall_pkg_config() { - if [ ! -z "$installed_pkg_config_version" ] ; then + if [ -n "$installed_pkg_config_version" ] ; then echo "Uninstalling pkg-config:" cd pkg-config-$installed_pkg_config_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MAKE_UNINSTALL + make distclean cd .. rm pkg-config-$installed_pkg_config_version-done @@ -960,13 +849,13 @@ uninstall_pkg_config() { install_glib() { if [ ! -f glib-$GLIB_VERSION-done ] ; then echo "Downloading, building, and installing GLib:" - glib_dir=`expr $GLIB_VERSION : '\([0-9][0-9]*\.[0-9][0-9]*\).*'` + glib_dir=$( expr "$GLIB_VERSION" : '\([0-9][0-9]*\.[0-9][0-9]*\).*' ) # # Starting with GLib 2.28.8, xz-compressed tarballs are available. # - [ -f glib-$GLIB_VERSION.tar.xz ] || curl -L -O https://download.gnome.org/sources/glib/$glib_dir/glib-$GLIB_VERSION.tar.xz || exit 1 + [ -f glib-$GLIB_VERSION.tar.xz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" "https://download.gnome.org/sources/glib/$glib_dir/glib-$GLIB_VERSION.tar.xz" $no_build && echo "Skipping installation" && return - xzcat glib-$GLIB_VERSION.tar.xz | tar xf - || exit 1 + xzcat glib-$GLIB_VERSION.tar.xz | tar xf - cd glib-$GLIB_VERSION # # First, determine where the system include files are. @@ -979,7 +868,7 @@ install_glib() { # # We need this for several things we do later. # - includedir=`SDKROOT="$SDKPATH" xcrun --show-sdk-path 2>/dev/null`/usr/include + includedir=$( SDKROOT="$SDKPATH" xcrun --show-sdk-path 2>/dev/null )/usr/include # # GLib's configuration procedure, whether autotools-based or # Meson-based, really likes to use pkg-config to find libraries, @@ -993,7 +882,7 @@ install_glib() { # # So, if we have a system-provided libffi, but pkg-config # doesn't find libffi, we construct a .pc file for that libffi, - # and install it in /usr/local/lib/pkgconfig. + # and install it in $installation_prefix/lib/pkgconfig. # # First, check whether pkg-config finds libffi but thinks its # header files are in a non-existent directory. That probaby @@ -1008,17 +897,17 @@ install_glib() { if pkg-config libffi ; then # We have a .pc file for libffi; what does it say the # include directory is? - incldir=`pkg-config --variable=includedir libffi` - if [ ! -z "$incldir" -a ! -d "$incldir" ] ; then + incldir=$( pkg-config --variable=includedir libffi ) + if [ -n "$incldir" -a ! -d "$incldir" ] ; then # Bogus - remove it, assuming - $DO_RM /usr/local/lib/pkgconfig/libffi.pc + $DO_RM "$installation_prefix/lib/pkgconfig/libffi.pc" fi fi if pkg-config libffi ; then # It found libffi; no need to install a .pc file, and we # don't want to overwrite what's there already. : - elif [ ! -e $includedir/ffi/ffi.h ] ; then + elif [ ! -e "$includedir"/ffi/ffi.h ] ; then # We don't appear to have libffi as part of the system, so # let the configuration process figure out what to do. # @@ -1054,7 +943,7 @@ install_glib() { # to the standard output, but running the last process in # the pipeline as root won't allow the shell that's # *running* it to open the .pc file if we don't have write - # permission on /usr/local/lib/pkgconfig, so we need a + # permission on $installation_prefix/lib/pkgconfig, so we need a # program that creates a file and then reads from the # standard input and writes to that file. UN*Xes have a # program that does that; it's called "tee". :-) @@ -1062,7 +951,7 @@ install_glib() { # However, it *also* writes the file to the standard output, # so we redirect that to /dev/null when we run it. # - cat <<"EOF" | sed "s;@INCLUDEDIR@;$includedir;" | $DO_TEE_TO_PC_FILE /usr/local/lib/pkgconfig/libffi.pc >/dev/null + cat <<"EOF" | sed "s;@INCLUDEDIR@;$includedir;" | $DO_TEE_TO_PC_FILE "$installation_prefix/lib/pkgconfig/libffi.pc" >/dev/null prefix=/usr libdir=${prefix}/lib includedir=@INCLUDEDIR@ @@ -1087,11 +976,11 @@ EOF *) case $GLIB_MINOR_VERSION in - [0-9]|1[0-9]|2[0-9]|3[0-7]) + [0-9]|1[0-9]|2[0-9]|3[0-9]|4[0-9]) echo "GLib $GLIB_VERSION" is too old 1>&2 ;; - 3[8-9]|4[0-9]|5[0-8]) + 5[0-8]) if [ ! -f ./configure ]; then LIBTOOLIZE=glibtoolize ./autogen.sh fi @@ -1111,15 +1000,17 @@ EOF # # https://bugzilla.gnome.org/show_bug.cgi?id=691608#c25 # - if grep -qs '#define.*MACOSX' $includedir/ffi/fficonfig.h + if grep -qs '#define.*MACOSX' "$includedir/ffi/fficonfig.h" then # It's defined, nothing to do - CFLAGS="$CFLAGS -Wno-format-nonliteral $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -Wno-format-nonliteral $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1 + CFLAGS="$CFLAGS -Wno-format-nonliteral $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -Wno-format-nonliteral $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" else - CFLAGS="$CFLAGS -DMACOSX -Wno-format-nonliteral $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -DMACOSX -Wno-format-nonliteral $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1 + CFLAGS="$CFLAGS -DMACOSX -Wno-format-nonliteral $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -DMACOSX -Wno-format-nonliteral $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" fi - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL ;; 59|[6-9][0-9]|[1-9][0-9][0-9]) @@ -1132,9 +1023,10 @@ EOF # # https://gitlab.gnome.org/GNOME/glib/-/issues/2902 # - CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" $MESON -Dtests=false _build || exit 1 - ninja $MAKE_BUILD_OPTS -C _build || exit 1 - $DO_NINJA_INSTALL || exit 1 + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + $MESON -Dprefix="$installation_prefix" -Dtests=false _build + ninja -C _build + $DO_NINJA_INSTALL ;; *) echo "Glib's put out 1000 2.x releases?" 1>&2 @@ -1148,28 +1040,28 @@ EOF } uninstall_glib() { - if [ ! -z "$installed_glib_version" ] ; then + if [ -n "$installed_glib_version" ] ; then echo "Uninstalling GLib:" - cd glib-$installed_glib_version - installed_glib_major_version="`expr $installed_glib_version : '\([0-9][0-9]*\).*'`" - installed_glib_minor_version="`expr $installed_glib_version : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`" - installed_glib_dotdot_version="`expr $installed_glib_version : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*'`" - installed_glib_major_minor_version=$installed_glib_major_version.$installed_glib_minor_version - installed_glib_major_minor_dotdot_version=$installed_glib_major_version.$installed_glib_minor_version.$installed_glib_dotdot_version + cd "glib-$installed_glib_version" + installed_glib_major_version="$( expr "$installed_glib_version" : '\([0-9][0-9]*\).*' )" + installed_glib_minor_version="$( expr "$installed_glib_version" : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )" + # installed_glib_dotdot_version="$( expr $installed_glib_version : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*' )" + # installed_glib_major_minor_version=$installed_glib_major_version.$installed_glib_minor_version + # installed_glib_major_minor_dotdot_version=$installed_glib_major_version.$installed_glib_minor_version.$installed_glib_dotdot_version # # GLib 2.59.1 and later use Meson+Ninja as the build system. # case $installed_glib_major_version in 1) - $DO_MAKE_UNINSTALL || exit 1 + $DO_MAKE_UNINSTALL # # This appears to delete dependencies out from under other # Makefiles in the tree, causing it to fail. At least until # that gets fixed, if it ever gets fixed, we just ignore the # exit status of "make distclean" # - # make distclean || exit 1 + # make distclean make distclean || echo "Ignoring make distclean failure" 1>&2 ;; @@ -1177,14 +1069,14 @@ uninstall_glib() { case $installed_glib_minor_version in [0-9]|1[0-9]|2[0-9]|3[0-9]|4[0-9]|5[0-8]) - $DO_MAKE_UNINSTALL || exit 1 + $DO_MAKE_UNINSTALL # # This appears to delete dependencies out from under other # Makefiles in the tree, causing it to fail. At least until # that gets fixed, if it ever gets fixed, we just ignore the # exit status of "make distclean" # - # make distclean || exit 1 + # make distclean make distclean || echo "Ignoring make distclean failure" 1>&2 ;; @@ -1194,7 +1086,7 @@ uninstall_glib() { # supports it, and I'm too lazy to add a dot-dot # version check. # - $DO_NINJA_UNINSTALL || exit 1 + $DO_NINJA_UNINSTALL # # For Meson+Ninja, we do the build in an _build # subdirectory, so the equivalent of "make distclean" @@ -1250,11 +1142,11 @@ install_qt() { 5) case $QT_MINOR_VERSION in - 0|1|2|3|4|5|6|7|8) + 0|1|2|3|4|5|6|7|8|9|10) echo "Qt $QT_VERSION" is too old 1>&2 ;; - 9|10|11|12|13|14) + 11|12|13|14) QT_VOLUME=qt-opensource-mac-x64-$QT_VERSION ;; *) @@ -1262,9 +1154,9 @@ install_qt() { ;; esac - [ -f $QT_VOLUME.dmg ] || curl -L -O https://download.qt.io/archive/qt/$QT_MAJOR_MINOR_VERSION/$QT_MAJOR_MINOR_DOTDOT_VERSION/$QT_VOLUME.dmg || exit 1 + [ -f $QT_VOLUME.dmg ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://download.qt.io/archive/qt/$QT_MAJOR_MINOR_VERSION/$QT_MAJOR_MINOR_DOTDOT_VERSION/$QT_VOLUME.dmg $no_build && echo "Skipping installation" && return - sudo hdiutil attach $QT_VOLUME.dmg || exit 1 + sudo hdiutil attach $QT_VOLUME.dmg # # Run the installer executable directly, so that we wait for @@ -1282,7 +1174,7 @@ install_qt() { } uninstall_qt() { - if [ ! -z "$installed_qt_version" ] ; then + if [ -n "$installed_qt_version" ] ; then echo "Uninstalling Qt:" rm -rf $HOME/Qt$installed_qt_version rm qt-$installed_qt_version-done @@ -1297,9 +1189,9 @@ uninstall_qt() { # 5.3 - 5.8: qt-opensource-mac-x64-clang-{version}.dmg # 5.9 - 5.14: qt-opensource-mac-x64-{version}.dmg # - installed_qt_major_version="`expr $installed_qt_version : '\([0-9][0-9]*\).*'`" - installed_qt_minor_version="`expr $installed_qt_version : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`" - installed_qt_dotdot_version="`expr $installed_qt_version : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*'`" + installed_qt_major_version="$( expr "$installed_qt_version" : '\([0-9][0-9]*\).*' )" + installed_qt_minor_version="$( expr "$installed_qt_version" : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )" + # installed_qt_dotdot_version="$( expr "$installed_qt_version" : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*' )" case $installed_qt_major_version in 1|2|3|4) @@ -1309,14 +1201,10 @@ uninstall_qt() { 5*) case $installed_qt_minor_version in - 0|1|2|3|4|5) + 0|1|2|3|4|5|6|7|8) echo "Qt $installed_qt_version" is too old 1>&2 ;; - 6|7|8) - installed_qt_volume=qt-opensource-mac-x64-clang-$installed_qt_version.dmg - ;; - 9|10|11|12|13|14) installed_qt_volume=qt-opensource-mac-x64-$installed_qt_version.dmg ;; @@ -1332,24 +1220,24 @@ uninstall_qt() { install_libsmi() { if [ "$LIBSMI_VERSION" -a ! -f libsmi-$LIBSMI_VERSION-done ] ; then echo "Downloading, building, and installing libsmi:" - [ -f libsmi-$LIBSMI_VERSION.tar.gz ] || curl -L -O https://www.ibr.cs.tu-bs.de/projects/libsmi/download/libsmi-$LIBSMI_VERSION.tar.gz || exit 1 + [ -f libsmi-$LIBSMI_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://www.ibr.cs.tu-bs.de/projects/libsmi/download/libsmi-$LIBSMI_VERSION.tar.gz $no_build && echo "Skipping installation" && return - gzcat libsmi-$LIBSMI_VERSION.tar.gz | tar xf - || exit 1 + gzcat libsmi-$LIBSMI_VERSION.tar.gz | tar xf - cd libsmi-$LIBSMI_VERSION - CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure "${CONFIGURE_OPTS[@]}" + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch libsmi-$LIBSMI_VERSION-done fi } uninstall_libsmi() { - if [ ! -z "$installed_libsmi_version" ] ; then + if [ -n "$installed_libsmi_version" ] ; then echo "Uninstalling libsmi:" cd libsmi-$installed_libsmi_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MAKE_UNINSTALL + make distclean cd .. rm libsmi-$installed_libsmi_version-done @@ -1368,20 +1256,20 @@ uninstall_libsmi() { install_libgpg_error() { if [ "$LIBGPG_ERROR_VERSION" -a ! -f libgpg-error-$LIBGPG_ERROR_VERSION-done ] ; then echo "Downloading, building, and installing libgpg-error:" - [ -f libgpg-error-$LIBGPG_ERROR_VERSION.tar.bz2 ] || curl -L -O https://www.gnupg.org/ftp/gcrypt/libgpg-error/libgpg-error-$LIBGPG_ERROR_VERSION.tar.bz2 || exit 1 + [ -f libgpg-error-$LIBGPG_ERROR_VERSION.tar.bz2 ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://www.gnupg.org/ftp/gcrypt/libgpg-error/libgpg-error-$LIBGPG_ERROR_VERSION.tar.bz2 $no_build && echo "Skipping installation" && return - bzcat libgpg-error-$LIBGPG_ERROR_VERSION.tar.bz2 | tar xf - || exit 1 + bzcat libgpg-error-$LIBGPG_ERROR_VERSION.tar.bz2 | tar xf - cd libgpg-error-$LIBGPG_ERROR_VERSION - CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure "${CONFIGURE_OPTS[@]}" + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch libgpg-error-$LIBGPG_ERROR_VERSION-done fi } uninstall_libgpg_error() { - if [ ! -z "$installed_libgpg_error_version" ] ; then + if [ -n "$installed_libgpg_error_version" ] ; then # # libgcrypt depends on this, so uninstall it. # @@ -1389,8 +1277,8 @@ uninstall_libgpg_error() { echo "Uninstalling libgpg-error:" cd libgpg-error-$installed_libgpg_error_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MAKE_UNINSTALL + make distclean cd .. rm libgpg-error-$installed_libgpg_error_version-done @@ -1418,9 +1306,9 @@ install_libgcrypt() { fi echo "Downloading, building, and installing libgcrypt:" - [ -f libgcrypt-$LIBGCRYPT_VERSION.tar.gz ] || curl -L -O https://www.gnupg.org/ftp/gcrypt/libgcrypt/libgcrypt-$LIBGCRYPT_VERSION.tar.gz || exit 1 + [ -f libgcrypt-$LIBGCRYPT_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://www.gnupg.org/ftp/gcrypt/libgcrypt/libgcrypt-$LIBGCRYPT_VERSION.tar.gz $no_build && echo "Skipping installation" && return - gzcat libgcrypt-$LIBGCRYPT_VERSION.tar.gz | tar xf - || exit 1 + gzcat libgcrypt-$LIBGCRYPT_VERSION.tar.gz | tar xf - cd libgcrypt-$LIBGCRYPT_VERSION # # The assembler language code is not compatible with the macOS @@ -1431,20 +1319,26 @@ install_libgcrypt() { # # https://lists.freebsd.org/pipermail/freebsd-ports-bugs/2010-October/198809.html # - CFLAGS="$CFLAGS -std=gnu89 $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure --disable-asm || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + # We specify "unix" as the random number generator so that we + # don't try to use getentropy, because random/rndgetentropy.c + # *REQUIRES* Linux getrandom(), which we don't have. (This should + # not matter, as we only use this for decryption, as far as I know.) + # + CFLAGS="$CFLAGS -std=gnu89 $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" --disable-asm --enable-random=unix + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch libgcrypt-$LIBGCRYPT_VERSION-done fi } uninstall_libgcrypt() { - if [ ! -z "$installed_libgcrypt_version" ] ; then + if [ -n "$installed_libgcrypt_version" ] ; then echo "Uninstalling libgcrypt:" cd libgcrypt-$installed_libgcrypt_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MAKE_UNINSTALL + make distclean cd .. rm libgcrypt-$installed_libgcrypt_version-done @@ -1461,63 +1355,64 @@ uninstall_libgcrypt() { } install_gmp() { - if [ "$GMP_VERSION" -a ! -f gmp-$GMP_VERSION-done ] ; then + if [ "$GMP_VERSION" ] && [ ! -f "gmp-$GMP_VERSION-done" ] ; then echo "Downloading, building, and installing GMP:" - [ -f gmp-$GMP_VERSION.tar.lz ] || curl -L -O https://gmplib.org/download/gmp/gmp-$GMP_VERSION.tar.lz || exit 1 + [ -f "gmp-$GMP_VERSION.tar.xz" ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://gmplib.org/download/gmp/gmp-$GMP_VERSION.tar.xz $no_build && echo "Skipping installation" && return - lzip -c -d gmp-$GMP_VERSION.tar.lz | tar xf - || exit 1 - cd gmp-$GMP_VERSION + xzcat "gmp-$GMP_VERSION.tar.xz" | tar xf - + cd "gmp-$GMP_VERSION" # # Create a fat binary: https://gmplib.org/manual/Notes-for-Package-Builds.html # # According to # # https://www.mail-archive.com/gmp-bugs@gmplib.org/msg01492.html - # + # # and other pages, the Shiny New Linker in Xcode 15 causes this # build to fail with "ld: branch8 out of range 384833 in # ___gmpn_add_nc_x86_64"; linking with -ld64 is a workaround. # # For now, link with -ld64 on Xcode 15 and later. # - XCODE_VERSION=`xcodebuild -version | sed -n 's;Xcode \(.*\);\1;p'` - XCODE_MAJOR_VERSION="`expr $XCODE_VERSION : '\([0-9][0-9]*\).*'`" - XCODE_MINOR_VERSION="`expr $XCODE_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`" - XCODE_DOTDOT_VERSION="`expr $XCODE_VERSION : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*'`" + XCODE_VERSION=$( xcodebuild -version | sed -n 's;Xcode \(.*\);\1;p' ) + XCODE_MAJOR_VERSION="$( expr "$XCODE_VERSION" : '\([0-9][0-9]*\).*' )" + # XCODE_MINOR_VERSION="$( expr $XCODE_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )" + # XCODE_DOTDOT_VERSION="$( expr $XCODE_VERSION : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*' )" if [ "$XCODE_MAJOR_VERSION" -ge 15 ] then LD64_FLAG="-ld64" else LD64_FLAG="" fi - CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS $LD64_FLAG" ./configure --enable-fat || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS $LD64_FLAG" \ + ./configure "${CONFIGURE_OPTS[@]}" --enable-fat + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. - touch gmp-$GMP_VERSION-done + touch "gmp-$GMP_VERSION-done" fi } uninstall_gmp() { - if [ ! -z "$installed_gmp_version" ] ; then + if [ -n "$installed_gmp_version" ] ; then # # Nettle depends on this, so uninstall it. # uninstall_nettle "$@" echo "Uninstalling GMP:" - cd gmp-$installed_gmp_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + cd "gmp-$installed_gmp_version" + $DO_MAKE_UNINSTALL + make distclean cd .. - rm gmp-$installed_gmp_version-done + rm "gmp-$installed_gmp_version-done" - if [ "$#" -eq 1 -a "$1" = "-r" ] ; then + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then # # Get rid of the previously downloaded and unpacked version. # - rm -rf gmp-$installed_gmp_version - rm -rf gmp-$installed_gmp_version.tar.lz + rm -rf "gmp-$installed_gmp_version" + rm -rf "gmp-$installed_gmp_version.tar.xz" fi installed_gmp_version="" @@ -1525,40 +1420,41 @@ uninstall_gmp() { } install_libtasn1() { - if [ "$LIBTASN1_VERSION" -a ! -f libtasn1-$LIBTASN1_VERSION-done ] ; then + if [ "$LIBTASN1_VERSION" ] && [ ! -f "libtasn1-$LIBTASN1_VERSION-done" ] ; then echo "Downloading, building, and installing libtasn1:" - [ -f libtasn1-$LIBTASN1_VERSION.tar.gz ] || curl -L -O https://ftpmirror.gnu.org/libtasn1/libtasn1-$LIBTASN1_VERSION.tar.gz || exit 1 + [ -f "libtasn1-$LIBTASN1_VERSION.tar.gz" ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" "https://ftp.gnu.org/gnu/libtasn1/libtasn1-$LIBTASN1_VERSION.tar.gz" $no_build && echo "Skipping installation" && return - gzcat libtasn1-$LIBTASN1_VERSION.tar.gz | tar xf - || exit 1 - cd libtasn1-$LIBTASN1_VERSION - CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + gzcat "libtasn1-$LIBTASN1_VERSION.tar.gz" | tar xf - + cd "libtasn1-$LIBTASN1_VERSION" + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. - touch libtasn1-$LIBTASN1_VERSION-done + touch "libtasn1-$LIBTASN1_VERSION-done" fi } uninstall_libtasn1() { - if [ ! -z "$installed_libtasn1_version" ] ; then + if [ -n "$installed_libtasn1_version" ] ; then # # p11-kit depends on this, so uninstall it. # uninstall_p11_kit "$@" echo "Uninstalling libtasn1:" - cd libtasn1-$installed_libtasn1_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + cd "libtasn1-$installed_libtasn1_version" + $DO_MAKE_UNINSTALL + make distclean cd .. - rm libtasn1-$installed_libtasn1_version-done + rm "libtasn1-$installed_libtasn1_version-done" - if [ "$#" -eq 1 -a "$1" = "-r" ] ; then + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then # # Get rid of the previously downloaded and unpacked version. # - rm -rf libtasn1-$installed_libtasn1_version - rm -rf libtasn1-$installed_libtasn1_version.tar.gz + rm -rf "libtasn1-$installed_libtasn1_version" + rm -rf "libtasn1-$installed_libtasn1_version.tar.gz" fi installed_libtasn1_version="" @@ -1566,12 +1462,12 @@ uninstall_libtasn1() { } install_p11_kit() { - if [ "$P11KIT_VERSION" -a ! -f p11-kit-$P11KIT_VERSION-done ] ; then + if [ "$P11KIT_VERSION" ] && [ ! -f "p11-kit-$P11KIT_VERSION-done" ] ; then echo "Downloading, building, and installing p11-kit:" - [ -f p11-kit-$P11KIT_VERSION.tar.xz ] || curl -L -O https://github.com/p11-glue/p11-kit/releases/download/$P11KIT_VERSION/p11-kit-$P11KIT_VERSION.tar.xz || exit 1 + [ -f "p11-kit-$P11KIT_VERSION.tar.xz" ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" "https://github.com/p11-glue/p11-kit/releases/download/$P11KIT_VERSION/p11-kit-$P11KIT_VERSION.tar.xz" $no_build && echo "Skipping installation" && return - xzcat p11-kit-$P11KIT_VERSION.tar.xz | tar xf - || exit 1 - cd p11-kit-$P11KIT_VERSION + xzcat "p11-kit-$P11KIT_VERSION.tar.xz" | tar xf - + cd "p11-kit-$P11KIT_VERSION" # # Prior to Catalina, the libffi that's supplied with macOS # doesn't support ffi_closure_alloc() or ffi_prep_closure_loc(), @@ -1587,34 +1483,35 @@ install_p11_kit() { # but it's not clear that this matters to us, so we just # configure p11-kit not to use libffi. # - CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS -L/usr/local/lib" LIBS=-lintl ./configure --without-libffi --without-trust-paths || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LIBS=-lintl \ + ./configure "${CONFIGURE_OPTS[@]}" --without-libffi --without-trust-paths + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. - touch p11-kit-$P11KIT_VERSION-done + touch "p11-kit-$P11KIT_VERSION-done" fi } uninstall_p11_kit() { - if [ ! -z "$installed_p11_kit_version" ] ; then + if [ -n "$installed_p11_kit_version" ] ; then # # Nettle depends on this, so uninstall it. # uninstall_nettle "$@" echo "Uninstalling p11-kit:" - cd p11-kit-$installed_p11_kit_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + cd "p11-kit-$installed_p11_kit_version" + $DO_MAKE_UNINSTALL + make distclean cd .. - rm p11-kit-$installed_p11_kit_version-done + rm "p11-kit-$installed_p11_kit_version-done" - if [ "$#" -eq 1 -a "$1" = "-r" ] ; then + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then # # Get rid of the previously downloaded and unpacked version. # - rm -rf p11-kit-$installed_p11_kit_version - rm -rf p11-kit-$installed_p11_kit_version.tar.xz + rm -rf "p11-kit-$installed_p11_kit_version" + rm -rf "p11-kit-$installed_p11_kit_version.tar.xz" fi installed_p11_kit_version="" @@ -1622,40 +1519,41 @@ uninstall_p11_kit() { } install_nettle() { - if [ "$NETTLE_VERSION" -a ! -f nettle-$NETTLE_VERSION-done ] ; then + if [ "$NETTLE_VERSION" ] && [ ! -f "nettle-$NETTLE_VERSION-done" ] ; then echo "Downloading, building, and installing Nettle:" - [ -f nettle-$NETTLE_VERSION.tar.gz ] || curl -L -O https://ftp.gnu.org/gnu/nettle/nettle-$NETTLE_VERSION.tar.gz || exit 1 + [ -f "nettle-$NETTLE_VERSION.tar.gz" ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" "https://ftp.gnu.org/gnu/nettle/nettle-$NETTLE_VERSION.tar.gz" $no_build && echo "Skipping installation" && return - gzcat nettle-$NETTLE_VERSION.tar.gz | tar xf - || exit 1 - cd nettle-$NETTLE_VERSION - CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS -I/usr/local/include" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS -L/usr/local/lib" ./configure || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + gzcat "nettle-$NETTLE_VERSION.tar.gz" | tar xf - + cd "nettle-$NETTLE_VERSION" + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. - touch nettle-$NETTLE_VERSION-done + touch "nettle-$NETTLE_VERSION-done" fi } uninstall_nettle() { - if [ ! -z "$installed_nettle_version" ] ; then + if [ -n "$installed_nettle_version" ] ; then # # GnuTLS depends on this, so uninstall it. # uninstall_gnutls "$@" echo "Uninstalling Nettle:" - cd nettle-$installed_nettle_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + cd "nettle-$installed_nettle_version" + $DO_MAKE_UNINSTALL + make distclean cd .. - rm nettle-$installed_nettle_version-done + rm "nettle-$installed_nettle_version-done" - if [ "$#" -eq 1 -a "$1" = "-r" ] ; then + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then # # Get rid of the previously downloaded and unpacked version. # - rm -rf nettle-$installed_nettle_version - rm -rf nettle-$installed_nettle_version.tar.gz + rm -rf "nettle-$installed_nettle_version" + rm -rf "nettle-$installed_nettle_version.tar.gz" fi installed_nettle_version="" @@ -1663,55 +1561,46 @@ uninstall_nettle() { } install_gnutls() { - if [ "$GNUTLS_VERSION" -a ! -f gnutls-$GNUTLS_VERSION-done ] ; then + if [ "$GNUTLS_VERSION" ] && [ ! -f "gnutls-$GNUTLS_VERSION-done" ] ; then # # GnuTLS requires Nettle. # - if [ -z $NETTLE_VERSION ] + if [ -z "$NETTLE_VERSION" ] then echo "GnuTLS requires Nettle, but you didn't install Nettle" 1>&2 exit 1 fi echo "Downloading, building, and installing GnuTLS:" - if [[ $GNUTLS_MAJOR_VERSION -ge 3 ]] - then - # - # Starting with GnuTLS 3.x, the tarballs are compressed with - # xz rather than bzip2. - # - [ -f gnutls-$GNUTLS_VERSION.tar.xz ] || curl -L -O https://www.gnupg.org/ftp/gcrypt/gnutls/v$GNUTLS_MAJOR_VERSION.$GNUTLS_MINOR_VERSION/gnutls-$GNUTLS_VERSION.tar.xz || exit 1 - $no_build && echo "Skipping installation" && return - xzcat gnutls-$GNUTLS_VERSION.tar.xz | tar xf - || exit 1 - else - [ -f gnutls-$GNUTLS_VERSION.tar.bz2 ] || curl -L -O https://www.gnupg.org/ftp/gcrypt/gnutls/v$GNUTLS_MAJOR_VERSION.$GNUTLS_MINOR_VERSION/gnutls-$GNUTLS_VERSION.tar.bz2 || exit 1 - $no_build && echo "Skipping installation" && return - bzcat gnutls-$GNUTLS_VERSION.tar.bz2 | tar xf - || exit 1 - fi + [ -f gnutls-$GNUTLS_VERSION.tar.xz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" "https://www.gnupg.org/ftp/gcrypt/gnutls/v$GNUTLS_MAJOR_VERSION.$GNUTLS_MINOR_VERSION/gnutls-$GNUTLS_VERSION.tar.xz" + echo "$GNUTLS_SHA256 gnutls-$GNUTLS_VERSION.tar.xz" | shasum --algorithm 256 --check + $no_build && echo "Skipping installation" && return + tar -xf gnutls-$GNUTLS_VERSION.tar.xz cd gnutls-$GNUTLS_VERSION - CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS -I /usr/local/include" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS -I/usr/local/include/" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS -L/usr/local/lib" ./configure --with-included-unistring --disable-guile || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" --with-included-unistring --disable-guile + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch gnutls-$GNUTLS_VERSION-done fi } uninstall_gnutls() { - if [ ! -z "$installed_gnutls_version" ] ; then + if [ -n "$installed_gnutls_version" ] ; then echo "Uninstalling GnuTLS:" - cd gnutls-$installed_gnutls_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + cd "gnutls-$installed_gnutls_version" + $DO_MAKE_UNINSTALL + make distclean cd .. - rm gnutls-$installed_gnutls_version-done + rm "gnutls-$installed_gnutls_version-done" s - if [ "$#" -eq 1 -a "$1" = "-r" ] ; then + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then # # Get rid of the previously downloaded and unpacked version. # - rm -rf gnutls-$installed_gnutls_version - rm -rf gnutls-$installed_gnutls_version.tar.bz2 + rm -rf "gnutls-$installed_gnutls_version" + rm -rf "gnutls-$installed_gnutls_version.tar.bz2" fi installed_gnutls_version="" @@ -1721,31 +1610,31 @@ uninstall_gnutls() { install_lua() { if [ "$LUA_VERSION" -a ! -f lua-$LUA_VERSION-done ] ; then echo "Downloading, building, and installing Lua:" - [ -f lua-$LUA_VERSION.tar.gz ] || curl -L -O https://www.lua.org/ftp/lua-$LUA_VERSION.tar.gz || exit 1 + [ -f lua-$LUA_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://www.lua.org/ftp/lua-$LUA_VERSION.tar.gz $no_build && echo "Skipping installation" && return - gzcat lua-$LUA_VERSION.tar.gz | tar xf - || exit 1 + gzcat lua-$LUA_VERSION.tar.gz | tar xf - cd lua-$LUA_VERSION - make MYCFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" MYLDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" $MAKE_BUILD_OPTS macosx || exit 1 - $DO_MAKE_INSTALL || exit 1 + make INSTALL_TOP="$installation_prefix" MYCFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" MYLDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" macosx + $DO_MAKE INSTALL_TOP="$installation_prefix" install cd .. touch lua-$LUA_VERSION-done fi } uninstall_lua() { - if [ ! -z "$installed_lua_version" ] ; then + if [ -n "$installed_lua_version" ] ; then echo "Uninstalling Lua:" # # Lua has no "make uninstall", so just remove stuff manually. # There's no configure script, so there's no need for # "make distclean", either; just do "make clean". # - (cd /usr/local/bin; $DO_RM -f lua luac) - (cd /usr/local/include; $DO_RM -f lua.h luaconf.h lualib.h lauxlib.h lua.hpp) - (cd /usr/local/lib; $DO_RM -f liblua.a) - (cd /usr/local/man/man1; $DO_RM -f lua.1 luac.1) + (cd "$installation_prefix/bin"; $DO_RM -f lua luac) + (cd "$installation_prefix/include"; $DO_RM -f lua.h luaconf.h lualib.h lauxlib.h lua.hpp) + (cd "$installation_prefix/lib"; $DO_RM -f liblua.a) + (cd "$installation_prefix/man/man1"; $DO_RM -f lua.1 luac.1) cd lua-$installed_lua_version - make clean || exit 1 + make clean cd .. rm lua-$installed_lua_version-done @@ -1764,13 +1653,13 @@ uninstall_lua() { install_snappy() { if [ "$SNAPPY_VERSION" -a ! -f snappy-$SNAPPY_VERSION-done ] ; then echo "Downloading, building, and installing snappy:" - [ -f snappy-$SNAPPY_VERSION.tar.gz ] || curl -L -o snappy-$SNAPPY_VERSION.tar.gz https://github.com/google/snappy/archive/$SNAPPY_VERSION.tar.gz || exit 1 + [ -f snappy-$SNAPPY_VERSION.tar.gz ] || curl "${CURL_LOCAL_NAME_OPTS[@]}" snappy-$SNAPPY_VERSION.tar.gz https://github.com/google/snappy/archive/$SNAPPY_VERSION.tar.gz $no_build && echo "Skipping installation" && return - gzcat snappy-$SNAPPY_VERSION.tar.gz | tar xf - || exit 1 + gzcat snappy-$SNAPPY_VERSION.tar.gz | tar xf - cd snappy-$SNAPPY_VERSION if [ "$SNAPPY_VERSION" = "1.1.10" ] ; then # This patch corresponds to https://github.com/google/snappy/commit/27f34a580be4a3becf5f8c0cba13433f53c21337 - patch -p0 <${topdir}/macosx-support-lib-patches/snappy-signed.patch || exit 1 + patch -p0 < "${topdir}/tools/macos-setup-patches/snappy-signed.patch" fi mkdir build_dir cd build_dir @@ -1781,44 +1670,44 @@ install_snappy() { # will carry that dependency with it, so linking with it should # Just Work. # - MACOSX_DEPLOYMENT_TARGET=$min_osx_target SDKROOT="$SDKPATH" $DO_CMAKE -DBUILD_SHARED_LIBS=YES -DSNAPPY_BUILD_BENCHMARKS=NO -DSNAPPY_BUILD_TESTS=NO ../ || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + "${DO_CMAKE[@]}" -DBUILD_SHARED_LIBS=YES -DSNAPPY_BUILD_BENCHMARKS=NO -DSNAPPY_BUILD_TESTS=NO .. + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd ../.. touch snappy-$SNAPPY_VERSION-done fi } uninstall_snappy() { - if [ ! -z "$installed_snappy_version" ] ; then + if [ -n "$installed_snappy_version" ] ; then echo "Uninstalling snappy:" cd snappy-$installed_snappy_version # # snappy uses cmake and doesn't support "make uninstall"; # just remove what we know it installs. # - # $DO_MAKE_UNINSTALL || exit 1 + # $DO_MAKE_UNINSTALL if [ -s build_dir/install_manifest.txt ] ; then while read -r ; do $DO_RM -v "$REPLY" ; done < <(cat build_dir/install_manifest.txt; echo) else - $DO_RM -f /usr/local/lib/libsnappy.1.1.8.dylib \ - /usr/local/lib/libsnappy.1.dylib \ - /usr/local/lib/libsnappy.dylib \ - /usr/local/include/snappy-c.h \ - /usr/local/include/snappy-sinksource.h \ - /usr/local/include/snappy-stubs-public.h \ - /usr/local/include/snappy.h \ - /usr/local/lib/cmake/Snappy/SnappyConfig.cmake \ - /usr/local/lib/cmake/Snappy/SnappyConfigVersion.cmake \ - /usr/local/lib/cmake/Snappy/SnappyTargets-noconfig.cmake \ - /usr/local/lib/cmake/Snappy/SnappyTargets.cmake || exit 1 + $DO_RM -f "$installation_prefix/lib/libsnappy.1.1.8.dylib" \ + "$installation_prefix/lib/libsnappy.1.dylib" \ + "$installation_prefix/lib/libsnappy.dylib" \ + "$installation_prefix/include/snappy-c.h" \ + "$installation_prefix/include/snappy-sinksource.h" \ + "$installation_prefix/include/snappy-stubs-public.h" \ + "$installation_prefix/include/snappy.h" \ + "$installation_prefix/lib/cmake/Snappy/SnappyConfig.cmake" \ + "$installation_prefix/lib/cmake/Snappy/SnappyConfigVersion.cmake" \ + "$installation_prefix/lib/cmake/Snappy/SnappyTargets-noconfig.cmake" \ + "$installation_prefix/lib/cmake/Snappy/SnappyTargets.cmake" fi # # snappy uses cmake and doesn't support "make distclean"; #.just remove the entire build directory. # - # make distclean || exit 1 - rm -rf build_dir || exit 1 + # make distclean + rm -rf build_dir cd .. rm snappy-$installed_snappy_version-done @@ -1835,75 +1724,119 @@ uninstall_snappy() { } install_zstd() { - if [ "$ZSTD_VERSION" -a ! -f zstd-$ZSTD_VERSION-done ] ; then + if [ "$ZSTD_VERSION" ] && [ ! -f zstd-$ZSTD_VERSION-done ] ; then echo "Downloading, building, and installing zstd:" - [ -f zstd-$ZSTD_VERSION.tar.gz ] || curl -L -O https://github.com/facebook/zstd/releases/download/v$ZSTD_VERSION/zstd-$ZSTD_VERSION.tar.gz || exit 1 + [ -f zstd-$ZSTD_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://github.com/facebook/zstd/releases/download/v$ZSTD_VERSION/zstd-$ZSTD_VERSION.tar.gz $no_build && echo "Skipping installation" && return - gzcat zstd-$ZSTD_VERSION.tar.gz | tar xf - || exit 1 + gzcat zstd-$ZSTD_VERSION.tar.gz | tar xf - cd zstd-$ZSTD_VERSION - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + # We shouldn't have to specify DESTDIR. + # https://github.com/facebook/zstd/issues/3146 + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + make PREFIX="$installation_prefix" DESTDIR="$installation_prefix" "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE PREFIX="$installation_prefix" install cd .. touch zstd-$ZSTD_VERSION-done fi } uninstall_zstd() { - if [ ! -z "$installed_zstd_version" ] ; then + if [ -n "$installed_zstd_version" ] ; then echo "Uninstalling zstd:" - cd zstd-$installed_zstd_version - $DO_MAKE_UNINSTALL || exit 1 + cd "zstd-$installed_zstd_version" + $DO_MAKE_UNINSTALL # # zstd has no configure script, so there's no need for # "make distclean", and the Makefile supplied with it # has no "make distclean" rule; just do "make clean". # - make clean || exit 1 + make clean cd .. - rm zstd-$installed_zstd_version-done + rm "zstd-$installed_zstd_version-done" - if [ "$#" -eq 1 -a "$1" = "-r" ] ; then + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then # # Get rid of the previously downloaded and unpacked version. # - rm -rf zstd-$installed_zstd_version - rm -rf zstd-$installed_zstd_version.tar.gz + rm -rf "zstd-$installed_zstd_version" + rm -rf "zstd-$installed_zstd_version.tar.gz" fi installed_zstd_version="" fi } +#$ZLIBNG_VERSION +install_zlibng() { + if [ "$ZLIBNG_VERSION" ] && [ ! -f zlib-ng-$ZLIBNG_VERSION-done ] ; then + echo "Downloading, building, and installing zlib-ng:" + [ -f $ZLIBNG_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://github.com/zlib-ng/zlib-ng/archive/refs/tags/$ZLIBNG_VERSION.tar.gz + $no_build && echo "Skipping installation" && return + gzcat $ZLIBNG_VERSION.tar.gz | tar xf - + cd zlib-ng-$ZLIBNG_VERSION + mkdir build + cd build + "${DO_CMAKE[@]}" .. + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL + cd ../.. + touch zlib-ng-$ZLIBNG_VERSION-done + fi +} + +uninstall_zlibng() { + if [ -n "$installed_zstd_version" ] ; then + echo "Uninstalling zlibng:" + cd "zlib-ng-$installed_zlibng_version" + $DO_MAKE_UNINSTALL + # + # XXX not sure what to do here... + # + make clean + cd .. + rm "zlib-ng-$installed_zlibng_version-done" + + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then + # + # Get rid of the previously downloaded and unpacked version. + # + rm -rf "zlib-ng-$installed_zlibng_version" + rm -rf "zlib-ng-$installed_zlibng_version.tar.gz" + fi + installed_zlibng_version="" + fi +} install_libxml2() { if [ "$LIBXML2_VERSION" -a ! -f libxml2-$LIBXML2_VERSION-done ] ; then echo "Downloading, building, and installing libxml2:" - LIBXML2_MAJOR_VERSION="`expr $LIBXML2_VERSION : '\([0-9][0-9]*\).*'`" - LIBXML2_MINOR_VERSION="`expr $LIBXML2_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`" + LIBXML2_MAJOR_VERSION="$( expr "$LIBXML2_VERSION" : '\([0-9][0-9]*\).*' )" + LIBXML2_MINOR_VERSION="$( expr "$LIBXML2_VERSION" : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )" LIBXML2_MAJOR_MINOR_VERSION=$LIBXML2_MAJOR_VERSION.$LIBXML2_MINOR_VERSION - [ -f libxml2-$LIBXML2_VERSION.tar.gz ] || curl -L -O https://download.gnome.org/sources/libxml2/$LIBXML2_MAJOR_MINOR_VERSION/libxml2-$LIBXML2_VERSION.tar.xz || exit 1 + [ -f libxml2-$LIBXML2_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://download.gnome.org/sources/libxml2/$LIBXML2_MAJOR_MINOR_VERSION/libxml2-$LIBXML2_VERSION.tar.xz $no_build && echo "Skipping installation" && return - xzcat libxml2-$LIBXML2_VERSION.tar.xz | tar xf - || exit 1 - cd libxml2-$LIBXML2_VERSION + xzcat libxml2-$LIBXML2_VERSION.tar.xz | tar xf - + cd "libxml2-$LIBXML2_VERSION" # # At least on macOS 12.0.1 with Xcode 13.1, when we build # libxml2, the linker complains that we don't have the right # to link with the Python framework, so don't build with # Python. # - CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure --without-python || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" --without-python + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch libxml2-$LIBXML2_VERSION-done fi } uninstall_libxml2() { - if [ ! -z "$installed_libxml2_version" ] ; then + if [ -n "$installed_libxml2_version" ] ; then echo "Uninstalling libxml2:" cd libxml2-$installed_libxml2_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MAKE_UNINSTALL + make distclean cd .. rm libxml2-$installed_libxml2_version-done @@ -1920,7 +1853,7 @@ uninstall_libxml2() { } install_lz4() { - if [ "$LZ4_VERSION" -a ! -f lz4-$LZ4_VERSION-done ] ; then + if [ "$LZ4_VERSION" ] && [ ! -f lz4-$LZ4_VERSION-done ] ; then echo "Downloading, building, and installing lz4:" # # lz4 switched from sequentially numbered releases, named rN, @@ -1945,12 +1878,12 @@ install_lz4() { # if [[ "$LZ4_VERSION" == r* ]] then - [ -f lz4-$LZ4_VERSION.tar.gz ] || curl -L -o lz4-$LZ4_VERSION.tar.gz https://github.com/lz4/lz4/archive/$LZ4_VERSION.tar.gz || exit 1 + [ -f lz4-$LZ4_VERSION.tar.gz ] || curl "${CURL_LOCAL_NAME_OPTS[@]}" lz4-$LZ4_VERSION.tar.gz https://github.com/lz4/lz4/archive/$LZ4_VERSION.tar.gz else - [ -f lz4-$LZ4_VERSION.tar.gz ] || curl -L -o lz4-$LZ4_VERSION.tar.gz https://github.com/lz4/lz4/archive/v$LZ4_VERSION.tar.gz || exit 1 + [ -f lz4-$LZ4_VERSION.tar.gz ] || curl "${CURL_LOCAL_NAME_OPTS[@]}" lz4-$LZ4_VERSION.tar.gz https://github.com/lz4/lz4/archive/v$LZ4_VERSION.tar.gz fi $no_build && echo "Skipping installation" && return - gzcat lz4-$LZ4_VERSION.tar.gz | tar xf - || exit 1 + gzcat lz4-$LZ4_VERSION.tar.gz | tar xf - cd lz4-$LZ4_VERSION # # No configure script here, but it appears that if MOREFLAGS is @@ -1958,29 +1891,30 @@ install_lz4() { # and CXXFLAGS into FLAGS, which is used when building source # files and libraries. # - MOREFLAGS="-D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + MOREFLAGS="-D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" \ + make PREFIX="$installation_prefix" "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE PREFIX="$installation_prefix" install cd .. touch lz4-$LZ4_VERSION-done fi } uninstall_lz4() { - if [ ! -z "$installed_lz4_version" ] ; then + if [ -n "$installed_lz4_version" ] ; then echo "Uninstalling lz4:" - cd lz4-$installed_lz4_version - $DO_MAKE_UNINSTALL || exit 1 + cd "lz4-$installed_lz4_version" + $DO_MAKE_UNINSTALL # # lz4's Makefile doesn't support "make distclean"; just do # "make clean". Perhaps not using autotools means that # there's no need for "make distclean". # - # make distclean || exit 1 - make clean || exit 1 + # make distclean + make clean cd .. - rm lz4-$installed_lz4_version-done + rm "lz4-$installed_lz4_version-done" - if [ "$#" -eq 1 -a "$1" = "-r" ] ; then + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then # # Get rid of the previously downloaded and unpacked version. # @@ -1990,8 +1924,8 @@ uninstall_lz4() { # tree. Therefore, we have to remove the build tree # as root. # - sudo rm -rf lz4-$installed_lz4_version - rm -rf lz4-$installed_lz4_version.tar.gz + sudo rm -rf "lz4-$installed_lz4_version" + rm -rf "lz4-$installed_lz4_version.tar.gz" fi installed_lz4_version="" @@ -2001,28 +1935,30 @@ uninstall_lz4() { install_sbc() { if [ "$SBC_VERSION" -a ! -f sbc-$SBC_VERSION-done ] ; then echo "Downloading, building, and installing sbc:" - [ -f sbc-$SBC_VERSION.tar.gz ] || curl -L -O https://www.kernel.org/pub/linux/bluetooth/sbc-$SBC_VERSION.tar.gz || exit 1 + [ -f sbc-$SBC_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://www.kernel.org/pub/linux/bluetooth/sbc-$SBC_VERSION.tar.gz $no_build && echo "Skipping installation" && return - gzcat sbc-$SBC_VERSION.tar.gz | tar xf - || exit 1 + gzcat sbc-$SBC_VERSION.tar.gz | tar xf - cd sbc-$SBC_VERSION - if [ "$DARWIN_PROCESSOR_ARCH" = "arm" ] ; then - CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS -U__ARM_NEON__" CXXFLAGS="$CXXFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure --disable-tools --disable-tester --disable-shared || exit 1 + if [ "$DARWIN_PROCESSOR_ARCH" = "arm64" ] ; then + CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS -U__ARM_NEON__" CXXFLAGS="$CXXFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" --disable-tools --disable-tester --disable-shared else - CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure --disable-tools --disable-tester --disable-shared || exit 1 + CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" --disable-tools --disable-tester --disable-shared fi - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch sbc-$SBC_VERSION-done fi } uninstall_sbc() { - if [ ! -z "$installed_sbc_version" ] ; then + if [ -n "$installed_sbc_version" ] ; then echo "Uninstalling sbc:" cd sbc-$installed_sbc_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MAKE_UNINSTALL + make distclean cd .. rm sbc-$installed_sbc_version-done @@ -2041,24 +1977,25 @@ uninstall_sbc() { install_maxminddb() { if [ "$MAXMINDDB_VERSION" -a ! -f maxminddb-$MAXMINDDB_VERSION-done ] ; then echo "Downloading, building, and installing MaxMindDB API:" - [ -f libmaxminddb-$MAXMINDDB_VERSION.tar.gz ] || curl -L -O https://github.com/maxmind/libmaxminddb/releases/download/$MAXMINDDB_VERSION/libmaxminddb-$MAXMINDDB_VERSION.tar.gz || exit 1 + [ -f libmaxminddb-$MAXMINDDB_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://github.com/maxmind/libmaxminddb/releases/download/$MAXMINDDB_VERSION/libmaxminddb-$MAXMINDDB_VERSION.tar.gz $no_build && echo "Skipping installation" && return - gzcat libmaxminddb-$MAXMINDDB_VERSION.tar.gz | tar xf - || exit 1 + gzcat libmaxminddb-$MAXMINDDB_VERSION.tar.gz | tar xf - cd libmaxminddb-$MAXMINDDB_VERSION - CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch maxminddb-$MAXMINDDB_VERSION-done fi } uninstall_maxminddb() { - if [ ! -z "$installed_maxminddb_version" ] ; then + if [ -n "$installed_maxminddb_version" ] ; then echo "Uninstalling MaxMindDB API:" cd libmaxminddb-$installed_maxminddb_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MAKE_UNINSTALL + make distclean cd .. rm maxminddb-$installed_maxminddb_version-done @@ -2077,24 +2014,26 @@ uninstall_maxminddb() { install_c_ares() { if [ "$CARES_VERSION" -a ! -f c-ares-$CARES_VERSION-done ] ; then echo "Downloading, building, and installing C-Ares API:" - [ -f c-ares-$CARES_VERSION.tar.gz ] || curl -L -O https://c-ares.org/download/c-ares-$CARES_VERSION.tar.gz || exit 1 + # https://github.com/c-ares/c-ares/releases/download/v1.31.0/c-ares-1.31.0.tar.gz + [ -f c-ares-$CARES_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://github.com/c-ares/c-ares/releases/download/v$CARES_VERSION/c-ares-$CARES_VERSION.tar.gz $no_build && echo "Skipping installation" && return - gzcat c-ares-$CARES_VERSION.tar.gz | tar xf - || exit 1 + gzcat c-ares-$CARES_VERSION.tar.gz | tar xf - cd c-ares-$CARES_VERSION - CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch c-ares-$CARES_VERSION-done fi } uninstall_c_ares() { - if [ ! -z "$installed_cares_version" ] ; then + if [ -n "$installed_cares_version" ] ; then echo "Uninstalling C-Ares API:" cd c-ares-$installed_cares_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MAKE_UNINSTALL + make distclean cd .. rm c-ares-$installed_cares_version-done @@ -2113,42 +2052,42 @@ uninstall_c_ares() { install_libssh() { if [ "$LIBSSH_VERSION" -a ! -f libssh-$LIBSSH_VERSION-done ] ; then echo "Downloading, building, and installing libssh:" - LIBSSH_MAJOR_VERSION="`expr $LIBSSH_VERSION : '\([0-9][0-9]*\).*'`" - LIBSSH_MINOR_VERSION="`expr $LIBSSH_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`" + LIBSSH_MAJOR_VERSION="$( expr "$LIBSSH_VERSION" : '\([0-9][0-9]*\).*' )" + LIBSSH_MINOR_VERSION="$( expr "$LIBSSH_VERSION" : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )" LIBSSH_MAJOR_MINOR_VERSION=$LIBSSH_MAJOR_VERSION.$LIBSSH_MINOR_VERSION - [ -f libssh-$LIBSSH_VERSION.tar.xz ] || curl -L -O https://www.libssh.org/files/$LIBSSH_MAJOR_MINOR_VERSION/libssh-$LIBSSH_VERSION.tar.xz + [ -f libssh-$LIBSSH_VERSION.tar.xz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://www.libssh.org/files/$LIBSSH_MAJOR_MINOR_VERSION/libssh-$LIBSSH_VERSION.tar.xz $no_build && echo "Skipping installation" && return - xzcat libssh-$LIBSSH_VERSION.tar.xz | tar xf - || exit 1 - cd libssh-$LIBSSH_VERSION + xzcat libssh-$LIBSSH_VERSION.tar.xz | tar xf - + cd "libssh-$LIBSSH_VERSION" mkdir build cd build - MACOSX_DEPLOYMENT_TARGET=$min_osx_target SDKROOT="$SDKPATH" $DO_CMAKE -DWITH_GCRYPT=1 ../ || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + "${DO_CMAKE[@]}" -DWITH_GCRYPT=1 .. + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd ../.. touch libssh-$LIBSSH_VERSION-done fi } uninstall_libssh() { - if [ ! -z "$installed_libssh_version" ] ; then + if [ -n "$installed_libssh_version" ] ; then echo "Uninstalling libssh:" cd libssh-$installed_libssh_version # # libssh uses cmake and doesn't support "make uninstall"; # just remove what we know it installs. # - # $DO_MAKE_UNINSTALL || exit 1 - $DO_RM -rf /usr/local/lib/libssh* \ - /usr/local/include/libssh \ - /usr/local/lib/pkgconfig/libssh* \ - /usr/local/lib/cmake/libssh || exit 1 + # $DO_MAKE_UNINSTALL + $DO_RM -rf "$installation_prefix"/lib/libssh* \ + "$installation_prefix"/include/libssh \ + "$installation_prefix"/lib/pkgconfig/libssh* \ + "$installation_prefix"/lib/cmake/libssh # # libssh uses cmake and doesn't support "make distclean"; # just remove the entire build directory. # - # make distclean || exit 1 - rm -rf build || exit 1 + # make distclean + rm -rf build cd .. rm libssh-$installed_libssh_version-done @@ -2167,24 +2106,25 @@ uninstall_libssh() { install_nghttp2() { if [ "$NGHTTP2_VERSION" -a ! -f nghttp2-$NGHTTP2_VERSION-done ] ; then echo "Downloading, building, and installing nghttp2:" - [ -f nghttp2-$NGHTTP2_VERSION.tar.xz ] || curl -L -O https://github.com/nghttp2/nghttp2/releases/download/v$NGHTTP2_VERSION/nghttp2-$NGHTTP2_VERSION.tar.xz || exit 1 + [ -f nghttp2-$NGHTTP2_VERSION.tar.xz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://github.com/nghttp2/nghttp2/releases/download/v$NGHTTP2_VERSION/nghttp2-$NGHTTP2_VERSION.tar.xz $no_build && echo "Skipping installation" && return - xzcat nghttp2-$NGHTTP2_VERSION.tar.xz | tar xf - || exit 1 + xzcat nghttp2-$NGHTTP2_VERSION.tar.xz | tar xf - cd nghttp2-$NGHTTP2_VERSION - CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure --enable-lib-only || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" --enable-lib-only + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch nghttp2-$NGHTTP2_VERSION-done fi } uninstall_nghttp2() { - if [ ! -z "$installed_nghttp2_version" ] ; then + if [ -n "$installed_nghttp2_version" ] ; then echo "Uninstalling nghttp2:" cd nghttp2-$installed_nghttp2_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MAKE_UNINSTALL + make distclean cd .. rm nghttp2-$installed_nghttp2_version-done @@ -2203,24 +2143,25 @@ uninstall_nghttp2() { install_nghttp3() { if [ "$NGHTTP3_VERSION" -a ! -f nghttp3-$NGHTTP3_VERSION-done ] ; then echo "Downloading, building, and installing nghttp3:" - [ -f nghttp3-$NGHTTP3_VERSION.tar.xz ] || curl -L -O https://github.com/ngtcp2/nghttp3/releases/download/v$NGHTTP3_VERSION/nghttp3-$NGHTTP3_VERSION.tar.xz || exit 1 + [ -f nghttp3-$NGHTTP3_VERSION.tar.xz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://github.com/ngtcp2/nghttp3/releases/download/v$NGHTTP3_VERSION/nghttp3-$NGHTTP3_VERSION.tar.xz $no_build && echo "Skipping installation" && return - xzcat nghttp3-$NGHTTP3_VERSION.tar.xz | tar xf - || exit 1 + xzcat nghttp3-$NGHTTP3_VERSION.tar.xz | tar xf - cd nghttp3-$NGHTTP3_VERSION - CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure --enable-lib-only || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" --enable-lib-only + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch nghttp3-$NGHTTP3_VERSION-done fi } uninstall_nghttp3() { - if [ ! -z "$installed_nghttp3_version" ] ; then + if [ -n "$installed_nghttp3_version" ] ; then echo "Uninstalling nghttp3:" cd nghttp3-$installed_nghttp3_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MAKE_UNINSTALL + make distclean cd .. rm nghttp3-$installed_nghttp3_version-done @@ -2240,26 +2181,27 @@ install_libtiff() { if [ "$LIBTIFF_VERSION" -a ! -f tiff-$LIBTIFF_VERSION-done ] ; then echo "Downloading, building, and installing libtiff:" [ -f tiff-$LIBTIFF_VERSION.tar.gz ] || - curl --fail -L -O https://download.osgeo.org/libtiff/tiff-$LIBTIFF_VERSION.tar.gz || - curl --fail -L -O https://download.osgeo.org/libtiff/old/tiff-$LIBTIFF_VERSION.tar.gz || + curl "${CURL_REMOTE_NAME_OPTS[@]}" https://download.osgeo.org/libtiff/tiff-$LIBTIFF_VERSION.tar.gz || + curl "${CURL_REMOTE_NAME_OPTS[@]}" https://download.osgeo.org/libtiff/old/tiff-$LIBTIFF_VERSION.tar.gz || exit 1 $no_build && echo "Skipping installation" && return - gzcat tiff-$LIBTIFF_VERSION.tar.gz | tar xf - || exit 1 + gzcat tiff-$LIBTIFF_VERSION.tar.gz | tar xf - cd tiff-$LIBTIFF_VERSION - CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch tiff-$LIBTIFF_VERSION-done fi } uninstall_libtiff() { - if [ ! -z "$installed_libtiff_version" ] ; then + if [ -n "$installed_libtiff_version" ] ; then echo "Uninstalling libtiff:" cd tiff-$installed_libtiff_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MAKE_UNINSTALL + make distclean cd .. rm tiff-$installed_libtiff_version-done @@ -2278,30 +2220,31 @@ uninstall_libtiff() { install_spandsp() { if [ "$SPANDSP_VERSION" -a ! -f spandsp-$SPANDSP_VERSION-done ] ; then echo "Downloading, building, and installing SpanDSP:" - [ -f spandsp-$SPANDSP_VERSION.tar.gz ] || curl -L -O https://www.soft-switch.org/downloads/spandsp/spandsp-$SPANDSP_VERSION.tar.gz || exit 1 + [ -f spandsp-$SPANDSP_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://www.soft-switch.org/downloads/spandsp/spandsp-$SPANDSP_VERSION.tar.gz $no_build && echo "Skipping installation" && return - gzcat spandsp-$SPANDSP_VERSION.tar.gz | tar xf - || exit 1 + gzcat spandsp-$SPANDSP_VERSION.tar.gz | tar xf - cd spandsp-$SPANDSP_VERSION # # Don't use -Wunused-but-set-variable, as it's not supported # by all the gcc versions in the versions of Xcode that we # support. # - patch -p0 <${topdir}/macosx-support-lib-patches/spandsp-configure-patch || exit 1 - CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + patch -p0 < "${topdir}/tools/macos-setup-patches/spandsp-configure-patch" + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch spandsp-$SPANDSP_VERSION-done fi } uninstall_spandsp() { - if [ ! -z "$installed_spandsp_version" ] ; then + if [ -n "$installed_spandsp_version" ] ; then echo "Uninstalling SpanDSP:" cd spandsp-$installed_spandsp_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MAKE_UNINSTALL + make distclean cd .. rm spandsp-$installed_spandsp_version-done @@ -2320,24 +2263,25 @@ uninstall_spandsp() { install_speexdsp() { if [ "$SPEEXDSP_VERSION" -a ! -f speexdsp-$SPEEXDSP_VERSION-done ] ; then echo "Downloading, building, and installing SpeexDSP:" - [ -f speexdsp-$SPEEXDSP_VERSION.tar.gz ] || curl -L -O https://ftp.osuosl.org/pub/xiph/releases/speex/speexdsp-$SPEEXDSP_VERSION.tar.gz || exit 1 + [ -f speexdsp-$SPEEXDSP_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://ftp.osuosl.org/pub/xiph/releases/speex/speexdsp-$SPEEXDSP_VERSION.tar.gz $no_build && echo "Skipping installation" && return - gzcat speexdsp-$SPEEXDSP_VERSION.tar.gz | tar xf - || exit 1 + gzcat speexdsp-$SPEEXDSP_VERSION.tar.gz | tar xf - cd speexdsp-$SPEEXDSP_VERSION - CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch speexdsp-$SPEEXDSP_VERSION-done fi } uninstall_speexdsp() { - if [ ! -z "$installed_speexdsp_version" ] ; then + if [ -n "$installed_speexdsp_version" ] ; then echo "Uninstalling SpeexDSP:" cd speexdsp-$installed_speexdsp_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MAKE_UNINSTALL + make distclean cd .. rm speexdsp-$installed_speexdsp_version-done @@ -2356,39 +2300,39 @@ uninstall_speexdsp() { install_bcg729() { if [ "$BCG729_VERSION" -a ! -f bcg729-$BCG729_VERSION-done ] ; then echo "Downloading, building, and installing bcg729:" - [ -f bcg729-$BCG729_VERSION.tar.gz ] || curl -L -O https://gitlab.linphone.org/BC/public/bcg729/-/archive/$BCG729_VERSION/bcg729-$BCG729_VERSION.tar.gz || exit 1 + [ -f bcg729-$BCG729_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://gitlab.linphone.org/BC/public/bcg729/-/archive/$BCG729_VERSION/bcg729-$BCG729_VERSION.tar.gz $no_build && echo "Skipping installation" && return - gzcat bcg729-$BCG729_VERSION.tar.gz | tar xf - || exit 1 + gzcat bcg729-$BCG729_VERSION.tar.gz | tar xf - cd bcg729-$BCG729_VERSION mkdir build_dir cd build_dir - MACOSX_DEPLOYMENT_TARGET=$min_osx_target SDKROOT="$SDKPATH" $DO_CMAKE ../ || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + "${DO_CMAKE[@]}" .. + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd ../.. touch bcg729-$BCG729_VERSION-done fi } uninstall_bcg729() { - if [ ! -z "$installed_bcg729_version" ] ; then + if [ -n "$installed_bcg729_version" ] ; then echo "Uninstalling bcg729:" cd bcg729-$installed_bcg729_version # # bcg729 uses cmake on macOS and doesn't support "make uninstall"; # just remove what we know it installs. # - # $DO_MAKE_UNINSTALL || exit 1 - $DO_RM -rf /usr/local/share/Bcg729 \ - /usr/local/lib/libbcg729* \ - /usr/local/include/bcg729 \ - /usr/local/lib/pkgconfig/libbcg729* || exit 1 + # $DO_MAKE_UNINSTALL + $DO_RM -rf "$installation_prefix"/share/Bcg729 \ + "$installation_prefix"/lib/libbcg729* \ + "$installation_prefix"/include/bcg729 \ + "$installation_prefix"/lib/pkgconfig/libbcg729* # # bcg729 uses cmake on macOS and doesn't support "make distclean"; # just remove the enire build directory. # - # make distclean || exit 1 - rm -rf build_dir || exit 1 + # make distclean + rm -rf build_dir cd .. rm bcg729-$installed_bcg729_version-done @@ -2407,13 +2351,14 @@ uninstall_bcg729() { install_ilbc() { if [ -n "$ILBC_VERSION" ] && [ ! -f ilbc-$ILBC_VERSION-done ] ; then echo "Downloading, building, and installing iLBC:" - [ -f libilbc-$ILBC_VERSION.tar.bz ] || curl --location --remote-name https://github.com/TimothyGu/libilbc/releases/download/v$ILBC_VERSION/libilbc-$ILBC_VERSION.tar.bz2 || exit 1 + [ -f libilbc-$ILBC_VERSION.tar.bz ] || curl --location --remote-name https://github.com/TimothyGu/libilbc/releases/download/v$ILBC_VERSION/libilbc-$ILBC_VERSION.tar.bz2 $no_build && echo "Skipping installation" && return - bzcat libilbc-$ILBC_VERSION.tar.bz2 | tar xf - || exit 1 - cd libilbc-$ILBC_VERSION || exit 1 - CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + bzcat libilbc-$ILBC_VERSION.tar.bz2 | tar xf - + cd libilbc-$ILBC_VERSION + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch ilbc-$ILBC_VERSION-done fi @@ -2422,9 +2367,9 @@ install_ilbc() { uninstall_ilbc() { if [ -n "$installed_ilbc_version" ] ; then echo "Uninstalling iLBC:" - cd "libilbc-$installed_ilbc_version" || exit 1 - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + cd "libilbc-$installed_ilbc_version" + $DO_MAKE_UNINSTALL + make distclean cd .. rm "ilbc-$installed_ilbc_version-done" @@ -2440,27 +2385,66 @@ uninstall_ilbc() { fi } +install_opencore_amr() { + if [ "$OPENCORE_AMR_VERSION" ] && [ ! -f opencore-amr-$OPENCORE_AMR_VERSION-done ] ; then + echo "Downloading, building, and installing opencore-amr:" + [ -f opencore-amr-$OPENCORE_AMR_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://downloads.sourceforge.net/project/opencore-amr/opencore-amr/opencore-amr-$OPENCORE_AMR_VERSION.tar.gz + echo "$OPENCORE_AMR_SHA256 opencore-amr-$OPENCORE_AMR_VERSION.tar.gz" | shasum --algorithm 256 --check + $no_build && echo "Skipping installation" && return + tar -xf opencore-amr-$OPENCORE_AMR_VERSION.tar.gz + cd opencore-amr-$OPENCORE_AMR_VERSION + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL + cd .. + touch opencore-amr-$OPENCORE_AMR_VERSION-done + fi +} + +uninstall_opencore_amr() { + if [ -n "$installed_opencore_amr_version" ] ; then + echo "Uninstalling opencore-amr:" + cd "opencore-amr-$installed_opencore_amr_version" + $DO_MAKE_UNINSTALL + make distclean + cd .. + rm "opencore-amr-$installed_opencore_amr_version-done" + + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then + # + # Get rid of the previously downloaded and unpacked version. + # + rm -rf "opencore-amr-$installed_opencore_amr_version" + rm -rf "opencore-amr-$installed_opencore_amr_version.tar.gz" + fi + + installed_opencore_amr_version="" + fi +} + install_opus() { if [ "$OPUS_VERSION" -a ! -f opus-$OPUS_VERSION-done ] ; then echo "Downloading, building, and installing opus:" - [ -f opus-$OPUS_VERSION.tar.gz ] || curl -L -O https://downloads.xiph.org/releases/opus/opus-$OPUS_VERSION.tar.gz || exit 1 + [ -f opus-$OPUS_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://downloads.xiph.org/releases/opus/opus-$OPUS_VERSION.tar.gz $no_build && echo "Skipping installation" && return - gzcat opus-$OPUS_VERSION.tar.gz | tar xf - || exit 1 + gzcat opus-$OPUS_VERSION.tar.gz | tar xf - cd opus-$OPUS_VERSION - CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd .. touch opus-$OPUS_VERSION-done fi } uninstall_opus() { - if [ ! -z "$installed_opus_version" ] ; then + if [ -n "$installed_opus_version" ] ; then echo "Uninstalling opus:" cd opus-$installed_opus_version - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MAKE_UNINSTALL + make distclean cd .. rm opus-$installed_opus_version-done @@ -2476,26 +2460,164 @@ uninstall_opus() { fi } -install_python3() { - # The macos11 installer can be deployed to older versions, down to - # 10.9 (Mavericks), but is still considered experimental so continue - # to use the 64-bit installer (10.9) on earlier releases for now. - local macver=x10.9 - if [[ $DARWIN_MAJOR_VERSION -gt 19 ]]; then - # The macos11 installer is required for Arm-based Macs, which require - # macOS 11 Big Sur. Note that the package name is "11.0" (no x) for - # 3.9.1 but simply "11" for 3.9.2 (and later) - if [[ $PYTHON3_VERSION = 3.9.1 ]]; then - macver=11.0 - else - macver=11 +install_jsoncpp() { + if [ "$JSONCPP_VERSION" ] && [ ! -f "jsoncpp-$JSONCPP_VERSION-done" ] ; then + echo "Downloading, building, and installing JsonCpp:" + [ -f "jsoncpp-$JSONCPP_VERSION.tar.gz" ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" --remote-header-name "https://github.com/open-source-parsers/jsoncpp/archive/refs/tags/$JSONCPP_VERSION.tar.gz" + $no_build && echo "Skipping installation" && return + tar -xf "jsoncpp-$JSONCPP_VERSION.tar.gz" + cd "jsoncpp-$JSONCPP_VERSION" + mkdir build_dir + cd build_dir + "${DO_CMAKE[@]}" -DBUILD_OBJECT_LIBS=OFF -DBUILD_SHARED_LIBS=ON -DBUILD_STATIC_LIBS=OFF -DJSONCPP_WITH_POST_BUILD_UNITTEST=OFF .. + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL + cd ../.. + touch "jsoncpp-$JSONCPP_VERSION-done" + fi +} + +uninstall_jsoncpp() { + if [ "$installed_jsoncpp_version" ] && [ -s "jsoncpp-$installed_jsoncpp_version/build_dir/install_manifest.txt" ] ; then + echo "Uninstalling JsonCpp:" + while read -r ; do $DO_RM -v "$REPLY" ; done < <(cat "jsoncpp-$installed_jsoncpp_version/build_dir/install_manifest.txt"; echo) + rm "jsoncpp-$JSONCPP_VERSION-done" + + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then + # + # Get rid of the previously downloaded and unpacked version. + # + rm -rf "jsoncpp-$installed_jsoncpp_version" + rm -rf "jsoncpp-$installed_jsoncpp_version.tar.gz" + fi + + installed_jsoncpp_version="" + fi +} + +install_onetbb() { + if [ "$ONETBB_VERSION" ] && [ ! -f "onetbb-$ONETBB_VERSION-done" ] ; then + echo "Downloading, building, and installing oneTBB:" + [ -f "oneTBB-$ONETBB_VERSION.tar.gz" ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" --remote-header-name "https://github.com/oneapi-src/oneTBB/archive/refs/tags/v$ONETBB_VERSION.tar.gz" + $no_build && echo "Skipping installation" && return + tar -xf "oneTBB-$ONETBB_VERSION.tar.gz" + cd "oneTBB-$ONETBB_VERSION" + mkdir build_dir + cd build_dir + "${DO_CMAKE[@]}" -DBUILD_SHARED_LIBS=ON -DTBB_TEST=OFF .. + make "${MAKE_BUILD_OPTS[@]}" tbb + $DO_MAKE_INSTALL + cd ../.. + touch "onetbb-$ONETBB_VERSION-done" + fi +} + +uninstall_onetbb() { + if [ "$installed_onetbb_version" ] && [ -s "oneTBB-$installed_onetbb_version/build_dir/install_manifest.txt" ] ; then + echo "Uninstalling oneTBB:" + while read -r ; do $DO_RM -v "$REPLY" ; done < <(cat "oneTBB-$installed_onetbb_version/build_dir/install_manifest.txt"; echo) + rm "onetbb-$installed_onetbb_version-done" + + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then + # + # Get rid of the previously downloaded and unpacked version. + # + rm -rf "oneTBB-$installed_onetbb_version" + rm -rf "oneTBB-$installed_onetbb_version.tar.gz" + fi + + installed_onetbb_version="" + fi +} + +install_re2() { + if [ "$RE2_VERSION" ] && [ ! -f "re2-$RE2_VERSION-done" ] ; then + echo "Downloading, building, and installing RE2:" + [ -f "re2-$RE2_VERSION.tar.gz" ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" --remote-header-name "https://github.com/google/re2/archive/refs/tags/$RE2_VERSION.tar.gz" + $no_build && echo "Skipping installation" && return + tar -xf "re2-$RE2_VERSION.tar.gz" + cd "re2-$RE2_VERSION" + mkdir build_dir + cd build_dir + "${DO_CMAKE[@]}" -DBUILD_SHARED_LIBS=ON -DRE2_BUILD_TESTING=OFF .. + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL + cd ../.. + touch "re2-$RE2_VERSION-done" + fi +} + +uninstall_re2() { + if [ -n "$installed_re2_version" ] && [ -s "re2-$installed_re2_version/build_dir/install_manifest.txt" ] ; then + echo "Uninstalling RE2:" + while read -r ; do $DO_RM -v "$REPLY" ; done < <(cat "re2-$installed_re2_version/build_dir/install_manifest.txt"; echo) + rm "re2-$installed_re2_version-done" + + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then + # + # Get rid of the previously downloaded and unpacked version. + # + rm -rf "re2-$installed_re2_version" + rm -rf "re2-$installed_re2_version.tar.gz" + fi + + installed_re2_version="" + fi +} + +install_falco_libs() { + if [ "$FALCO_LIBS_VERSION" ] && [ ! -f "falco-libs-$FALCO_LIBS_VERSION-done" ] ; then + echo "Downloading, building, and installing libsinsp and libscap:" + [ -f "falco-libs-$FALCO_LIBS_VERSION.tar.gz" ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" --remote-header-name "https://github.com/falcosecurity/libs/archive/refs/tags/$FALCO_LIBS_VERSION.tar.gz" + $no_build && echo "Skipping installation" && return + mv "libs-$FALCO_LIBS_VERSION.tar.gz" "falco-libs-$FALCO_LIBS_VERSION.tar.gz" + tar -xf "falco-libs-$FALCO_LIBS_VERSION.tar.gz" + mv "libs-$FALCO_LIBS_VERSION" "falco-libs-$FALCO_LIBS_VERSION" + cd "falco-libs-$FALCO_LIBS_VERSION" + patch -p1 < "${topdir}/tools/macos-setup-patches/falco-uthash_h-install.patch" + patch -p1 < "${topdir}/tools/macos-setup-patches/falco-include-dirs.patch" + mkdir build_dir + cd build_dir + "${DO_CMAKE[@]}" -DBUILD_SHARED_LIBS=ON -DMINIMAL_BUILD=ON -DCREATE_TEST_TARGETS=OFF \ + -DUSE_BUNDLED_DEPS=ON -DUSE_BUNDLED_CARES=OFF -DUSE_BUNDLED_ZLIB=OFF \ + -DUSE_BUNDLED_JSONCPP=OFF -DUSE_BUNDLED_TBB=OFF -DUSE_BUNDLED_RE2=OFF \ + .. + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL + cd ../.. + touch "falco-libs-$FALCO_LIBS_VERSION-done" + fi +} + +uninstall_falco_libs() { + if [ -n "$installed_falco_libs_version" ] && [ -s "falco-libs-$installed_falco_libs_version/build_dir/install_manifest.txt" ] ; then + echo "Uninstalling Falco libs:" + $DO_RM "$installation_prefix"/include/falcosecurity/uthash.h + while read -r ; do $DO_RM -v "$REPLY" ; done < <(cat "falco-libs-$installed_falco_libs_version/build_dir/install_manifest.txt"; echo) + rm "falco-libs-$installed_falco_libs_version-done" + + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then + # + # Get rid of the previously downloaded and unpacked version. + # + rm -rf "falco-libs-$installed_falco_libs_version" + rm -rf "falco-libs-$installed_falco_libs_version.tar.gz" fi + + installed_falco_libs_version="" fi +} + +install_python3() { + # The macos11 universal2 installer can be deployed to older versions, + # down to 10.9 (Mavericks). The 10.9 installer was deprecated in 3.9.8 + # and stopped being released after 3.9.13 + local macver=11 if [ "$PYTHON3_VERSION" -a ! -f python3-$PYTHON3_VERSION-done ] ; then echo "Downloading and installing python3:" - [ -f python-$PYTHON3_VERSION-macos$macver.pkg ] || curl -L -O https://www.python.org/ftp/python/$PYTHON3_VERSION/python-$PYTHON3_VERSION-macos$macver.pkg || exit 1 + [ -f python-$PYTHON3_VERSION-macos$macver.pkg ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://www.python.org/ftp/python/$PYTHON3_VERSION/python-$PYTHON3_VERSION-macos$macver.pkg $no_build && echo "Skipping installation" && return - sudo installer -target / -pkg python-$PYTHON3_VERSION-macos$macver.pkg || exit 1 + sudo installer -target / -pkg python-$PYTHON3_VERSION-macos$macver.pkg touch python3-$PYTHON3_VERSION-done # @@ -2506,7 +2628,7 @@ install_python3() { # # Strip off any dot-dot component in $PYTHON3_VERSION. # - python_version=`echo $PYTHON3_VERSION | sed 's/\([1-9][0-9]*\.[1-9][0-9]*\).*/\1/'` + python_version=$( echo "$PYTHON3_VERSION" | sed 's/\([1-9][0-9]*\.[1-9][0-9]*\).*/\1/' ) # # Now treat Meson as being in the directory in question. # @@ -2524,12 +2646,12 @@ install_python3() { uninstall_python3() { # Major version (e.g. "3.7") local PYTHON_VERSION=${installed_python3_version%.*} - if [ ! -z "$installed_python3_version" ] ; then + if [ -n "$installed_python3_version" ] ; then echo "Uninstalling python3:" frameworkdir="/Library/Frameworks/Python.framework/Versions/$PYTHON_VERSION" sudo rm -rf "$frameworkdir" sudo rm -rf "/Applications/Python $PYTHON_VERSION" - sudo find /usr/local/bin -maxdepth 1 -lname "*$frameworkdir/bin/*" -delete + sudo find "$installation_prefix"/bin -maxdepth 1 -lname "*$frameworkdir/bin/*" -delete # Remove three symlinks and empty directories. Removing directories # might fail if for some reason multiple versions are installed. sudo rm /Library/Frameworks/Python.framework/Headers @@ -2560,39 +2682,39 @@ uninstall_python3() { install_brotli() { if [ "$BROTLI_VERSION" -a ! -f brotli-$BROTLI_VERSION-done ] ; then echo "Downloading, building, and installing brotli:" - [ -f brotli-$BROTLI_VERSION.tar.gz ] || curl -L -o brotli-$BROTLI_VERSION.tar.gz https://github.com/google/brotli/archive/v$BROTLI_VERSION.tar.gz || exit 1 + [ -f brotli-$BROTLI_VERSION.tar.gz ] || curl "${CURL_LOCAL_NAME_OPTS[@]}" brotli-$BROTLI_VERSION.tar.gz https://github.com/google/brotli/archive/v$BROTLI_VERSION.tar.gz $no_build && echo "Skipping installation" && return - gzcat brotli-$BROTLI_VERSION.tar.gz | tar xf - || exit 1 + gzcat brotli-$BROTLI_VERSION.tar.gz | tar xf - cd brotli-$BROTLI_VERSION mkdir build_dir cd build_dir - MACOSX_DEPLOYMENT_TARGET=$min_osx_target SDKROOT="$SDKPATH" $DO_CMAKE ../ || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + "${DO_CMAKE[@]}" .. + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd ../.. touch brotli-$BROTLI_VERSION-done fi } uninstall_brotli() { - if [ ! -z "$installed_brotli_version" ] ; then + if [ -n "$installed_brotli_version" ] ; then echo "Uninstalling brotli:" cd brotli-$installed_brotli_version # # brotli uses cmake on macOS and doesn't support "make uninstall"; # just remove what we know it installs. # - # $DO_MAKE_UNINSTALL || exit 1 - $DO_RM -rf /usr/local/bin/brotli \ - /usr/local/lib/libbrotli* \ - /usr/local/include/brotli \ - /usr/local/lib/pkgconfig/libbrotli* || exit 1 + # $DO_MAKE_UNINSTALL + $DO_RM -rf "$installation_prefix"/bin/brotli \ + "$installation_prefix"/lib/libbrotli* \ + "$installation_prefix"/include/brotli \ + "$installation_prefix"/lib/pkgconfig/libbrotli* # # brotli uses cmake on macOS and doesn't support "make distclean"; # just remove the enire build directory. # - # make distclean || exit 1 - rm -rf build_dir || exit 1 + # make distclean + rm -rf build_dir cd .. rm brotli-$installed_brotli_version-done @@ -2611,25 +2733,26 @@ uninstall_brotli() { install_minizip() { if [ "$ZLIB_VERSION" ] && [ ! -f minizip-$ZLIB_VERSION-done ] ; then echo "Downloading, building, and installing zlib for minizip:" - [ -f zlib-$ZLIB_VERSION.tar.gz ] || curl -L -o zlib-$ZLIB_VERSION.tar.gz https://zlib.net/zlib-$ZLIB_VERSION.tar.gz || exit 1 + [ -f zlib-$ZLIB_VERSION.tar.gz ] || curl "${CURL_LOCAL_NAME_OPTS[@]}" zlib-$ZLIB_VERSION.tar.gz https://zlib.net/fossils/zlib-$ZLIB_VERSION.tar.gz $no_build && echo "Skipping installation" && return - gzcat zlib-$ZLIB_VERSION.tar.gz | tar xf - || exit 1 + gzcat zlib-$ZLIB_VERSION.tar.gz | tar xf - # # minizip ships both with a minimal Makefile that doesn't # support "make install", "make uninstall", or "make distclean", # and with a Makefile.am file that, if we do an autoreconf, # gives us a configure script, and a Makefile.in that, if we run - # the configure script, gives us a Makefile that supports ll of + # the configure script, gives us a Makefile that supports all of # those targets, and that installs a pkg-config .pc file for # minizip. # # So that's what we do. # - cd zlib-$ZLIB_VERSION/contrib/minizip || exit 1 + cd zlib-$ZLIB_VERSION/contrib/minizip LIBTOOLIZE=glibtoolize autoreconf --force --install - CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1 - make $MAKE_BUILD_OPTS || exit 1 - $DO_MAKE_INSTALL || exit 1 + CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \ + ./configure "${CONFIGURE_OPTS[@]}" + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL cd ../../.. touch minizip-$ZLIB_VERSION-done fi @@ -2639,8 +2762,8 @@ uninstall_minizip() { if [ -n "$installed_minizip_version" ] ; then echo "Uninstalling minizip:" cd zlib-$installed_minizip_version/contrib/minizip - $DO_MAKE_UNINSTALL || exit 1 - make distclean || exit 1 + $DO_MAKE_UNINSTALL + make distclean cd ../../.. rm minizip-$installed_minizip_version-done @@ -2657,16 +2780,54 @@ uninstall_minizip() { fi } +install_minizip_ng() { + if [ "$MINIZIPNG_VERSION" ] && [ ! -f minizip-ng-$MINIZIPNG_VERSION-done ] ; then + echo "Downloading, building, and installing minizip-ng:" + [ -f $MINIZIPNG_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://github.com/zlib-ng/minizip-ng/archive/refs/tags/$MINIZIPNG_VERSION.tar.gz + $no_build && echo "Skipping installation" && return + gzcat $MINIZIPNG_VERSION.tar.gz | tar xf - + cd minizip-ng-$MINIZIPNG_VERSION + mkdir build + cd build + "${DO_CMAKE[@]}" .. + make "${MAKE_BUILD_OPTS[@]}" + $DO_MAKE_INSTALL + cd ../.. + touch minizip-ng-$MINIZIPNG_VERSION-done + fi +} + +uninstall_minizip_ng() { + if [ -n "$installed_minizip_ng_version" ] ; then + echo "Uninstalling minizip:" + cd minizip-ng-$installed_minizip_ng_version/contrib/minizip + $DO_MAKE_UNINSTALL + make distclean + cd ../../.. + + rm minizip-ng-$installed_minizip_ng_version-done + + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then + # + # Get rid of the previously downloaded and unpacked version. + # + rm -rf minizip-ng-$installed_minizip_ng_version + rm -rf minizip-ng-$installed_minizip_ng_version.tar.gz + fi + + installed_minizip_ng_version="" + fi +} install_sparkle() { if [ "$SPARKLE_VERSION" ] && [ ! -f sparkle-$SPARKLE_VERSION-done ] ; then echo "Downloading and installing Sparkle:" # - # Download the tarball and unpack it in /usr/local/Sparkle-x.y.z + # Download the tarball and unpack it in $installation_prefix/Sparkle-x.y.z # - [ -f Sparkle-$SPARKLE_VERSION.tar.xz ] || curl -L -o Sparkle-$SPARKLE_VERSION.tar.xz https://github.com/sparkle-project/Sparkle/releases/download/$SPARKLE_VERSION/Sparkle-$SPARKLE_VERSION.tar.xz || exit 1 + [ -f Sparkle-$SPARKLE_VERSION.tar.xz ] || curl "${CURL_LOCAL_NAME_OPTS[@]}" Sparkle-$SPARKLE_VERSION.tar.xz https://github.com/sparkle-project/Sparkle/releases/download/$SPARKLE_VERSION/Sparkle-$SPARKLE_VERSION.tar.xz $no_build && echo "Skipping installation" && return - test -d "/usr/local/Sparkle-$SPARKLE_VERSION" || sudo mkdir "/usr/local/Sparkle-$SPARKLE_VERSION" - sudo tar -C "/usr/local/Sparkle-$SPARKLE_VERSION" -xpof Sparkle-$SPARKLE_VERSION.tar.xz + test -d "$installation_prefix/Sparkle-$SPARKLE_VERSION" || sudo mkdir "$installation_prefix/Sparkle-$SPARKLE_VERSION" + sudo tar -C "$installation_prefix/Sparkle-$SPARKLE_VERSION" -xpof Sparkle-$SPARKLE_VERSION.tar.xz touch sparkle-$SPARKLE_VERSION-done fi } @@ -2674,7 +2835,10 @@ install_sparkle() { uninstall_sparkle() { if [ -n "$installed_sparkle_version" ]; then echo "Uninstalling Sparkle:" - sudo rm -rf "/usr/local/Sparkle-$installed_sparkle_version" + sudo rm -rf "$installation_prefix/Sparkle-$installed_sparkle_version" + + rm sparkle-$installed_sparkle_version-done + if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then rm -f "Sparkle-$installed_sparkle_version.tar.xz" fi @@ -2688,7 +2852,7 @@ install_all() { # Check whether the versions we have installed are the versions # requested; if not, uninstall the installed versions. # - if [ ! -z "$installed_brotli_version" -a \ + if [ -n "$installed_brotli_version" -a \ "$installed_brotli_version" != "$BROTLI_VERSION" ] ; then echo "Installed brotli version is $installed_brotli_version" if [ -z "$BROTLI_VERSION" ] ; then @@ -2699,7 +2863,7 @@ install_all() { uninstall_brotli -r fi - if [ ! -z "$installed_python3_version" -a \ + if [ -n "$installed_python3_version" -a \ "$installed_python3_version" != "$PYTHON3_VERSION" ] ; then echo "Installed python3 version is $installed_python3_version" if [ -z "$PYTHON3_VERSION" ] ; then @@ -2710,7 +2874,7 @@ install_all() { uninstall_python3 -r fi - if [ ! -z "$installed_bcg729_version" -a \ + if [ -n "$installed_bcg729_version" -a \ "$installed_bcg729_version" != "$BCG729_VERSION" ] ; then echo "Installed bcg729 version is $installed_bcg729_version" if [ -z "$BCG729_VERSION" ] ; then @@ -2732,6 +2896,17 @@ install_all() { uninstall_ilbc -r fi + if [ -n "$installed_opencore_amr_version" ] \ + && [ "$installed_opencore_amr_version" != "$OPENCORE_AMR_VERSION" ] ; then + echo "Installed opencore-amr version is $installed_opencore_amr_version" + if [ -z "$OPENCORE_AMR_VERSION" ] ; then + echo "opencore-amr is not requested" + else + echo "Requested opencore-amr version is $OPENCORE_AMR_VERSION" + fi + uninstall_opencore_amr -r + fi + if [ -n "$installed_opus_version" ] \ && [ "$installed_opus_version" != "$OPUS_VERSION" ] ; then echo "Installed opus version is $installed_opus_version" @@ -2743,7 +2918,7 @@ install_all() { uninstall_opus -r fi - if [ ! -z "$installed_spandsp_version" -a \ + if [ -n "$installed_spandsp_version" -a \ "$installed_spandsp_version" != "$SPANDSP_VERSION" ] ; then echo "Installed SpanDSP version is $installed_spandsp_version" if [ -z "$SPANDSP_VERSION" ] ; then @@ -2754,7 +2929,7 @@ install_all() { uninstall_spandsp -r fi - if [ ! -z "$installed_speexdsp_version" -a \ + if [ -n "$installed_speexdsp_version" -a \ "$installed_speexdsp_version" != "$SPEEXDSP_VERSION" ] ; then echo "Installed SpeexDSP version is $installed_speexdsp_version" if [ -z "$SPEEXDSP_VERSION" ] ; then @@ -2765,7 +2940,7 @@ install_all() { uninstall_speexdsp -r fi - if [ ! -z "$installed_libtiff_version" -a \ + if [ -n "$installed_libtiff_version" -a \ "$installed_libtiff_version" != "$LIBTIFF_VERSION" ] ; then echo "Installed libtiff version is $installed_libtiff_version" if [ -z "$LIBTIFF_VERSION" ] ; then @@ -2776,7 +2951,7 @@ install_all() { uninstall_libtiff -r fi - if [ ! -z "$installed_nghttp2_version" -a \ + if [ -n "$installed_nghttp2_version" -a \ "$installed_nghttp2_version" != "$NGHTTP2_VERSION" ] ; then echo "Installed nghttp2 version is $installed_nghttp2_version" if [ -z "$NGHTTP2_VERSION" ] ; then @@ -2787,7 +2962,7 @@ install_all() { uninstall_nghttp2 -r fi - if [ ! -z "$installed_nghttp3_version" -a \ + if [ -n "$installed_nghttp3_version" -a \ "$installed_nghttp3_version" != "$NGHTTP3_VERSION" ] ; then echo "Installed nghttp3 version is $installed_nghttp3_version" if [ -z "$NGHTTP3_VERSION" ] ; then @@ -2798,7 +2973,7 @@ install_all() { uninstall_nghttp3 -r fi - if [ ! -z "$installed_libssh_version" -a \ + if [ -n "$installed_libssh_version" -a \ "$installed_libssh_version" != "$LIBSSH_VERSION" ] ; then echo "Installed libssh version is $installed_libssh_version" if [ -z "$LIBSSH_VERSION" ] ; then @@ -2809,7 +2984,7 @@ install_all() { uninstall_libssh -r fi - if [ ! -z "$installed_cares_version" -a \ + if [ -n "$installed_cares_version" -a \ "$installed_cares_version" != "$CARES_VERSION" ] ; then echo "Installed C-Ares version is $installed_cares_version" if [ -z "$CARES_VERSION" ] ; then @@ -2820,7 +2995,7 @@ install_all() { uninstall_c_ares -r fi - if [ ! -z "$installed_maxminddb_version" -a \ + if [ -n "$installed_maxminddb_version" -a \ "$installed_maxminddb_version" != "$MAXMINDDB_VERSION" ] ; then echo "Installed MaxMindDB API version is $installed_maxminddb_version" if [ -z "$MAXMINDDB_VERSION" ] ; then @@ -2831,7 +3006,7 @@ install_all() { uninstall_maxminddb -r fi - if [ ! -z "$installed_sbc_version" -a \ + if [ -n "$installed_sbc_version" -a \ "$installed_sbc_version" != "$SBC_VERSION" ] ; then echo "Installed SBC version is $installed_sbc_version" if [ -z "$SBC_VERSION" ] ; then @@ -2842,7 +3017,7 @@ install_all() { uninstall_sbc -r fi - if [ ! -z "$installed_lz4_version" -a \ + if [ -n "$installed_lz4_version" -a \ "$installed_lz4_version" != "$LZ4_VERSION" ] ; then echo "Installed LZ4 version is $installed_lz4_version" if [ -z "$LZ4_VERSION" ] ; then @@ -2853,7 +3028,7 @@ install_all() { uninstall_lz4 -r fi - if [ ! -z "$installed_libxml2_version" -a \ + if [ -n "$installed_libxml2_version" -a \ "$installed_libxml2_version" != "$LIBXML2_VERSION" ] ; then echo "Installed libxml2 version is $installed_libxml2_version" if [ -z "$LIBXML2_VERSION" ] ; then @@ -2864,7 +3039,7 @@ install_all() { uninstall_libxml2 -r fi - if [ ! -z "$installed_snappy_version" -a \ + if [ -n "$installed_snappy_version" -a \ "$installed_snappy_version" != "$SNAPPY_VERSION" ] ; then echo "Installed SNAPPY version is $installed_snappy_version" if [ -z "$SNAPPY_VERSION" ] ; then @@ -2875,7 +3050,27 @@ install_all() { uninstall_snappy -r fi - if [ ! -z "$installed_lua_version" -a \ + if [ -n "$installed_zstd_version" ] && [ "$installed_zstd_version" != "$ZSTD_VERSION" ] ; then + echo "Installed zstd version is $installed_zstd_version" + if [ -z "$ZSTD_VERSION" ] ; then + echo "zstd is not requested" + else + echo "Requested zstd version is $ZSTD_VERSION" + fi + uninstall_zstd -r + fi + + if [ -n "$installed_zlibng_version" ] && [ "$installed_zlibng_version" != "$ZLIBNG_VERSION" ] ; then + echo "Installed zlibng version is $installed_zlibng_version" + if [ -z "$ZLIBNG_VERSION" ] ; then + echo "zlibng is not requested" + else + echo "Requested zlibng version is $ZLIBNG_VERSION" + fi + uninstall_zlibng -r + fi + + if [ -n "$installed_lua_version" -a \ "$installed_lua_version" != "$LUA_VERSION" ] ; then echo "Installed Lua version is $installed_lua_version" if [ -z "$LUA_VERSION" ] ; then @@ -2886,8 +3081,7 @@ install_all() { uninstall_lua -r fi - if [ ! -z "$installed_gnutls_version" -a \ - "$installed_gnutls_version" != "$GNUTLS_VERSION" ] ; then + if [ -n "$installed_gnutls_version" ] && [ "$installed_gnutls_version" != "$GNUTLS_VERSION" ] ; then echo "Installed GnuTLS version is $installed_gnutls_version" if [ -z "$GNUTLS_VERSION" ] ; then echo "GnuTLS is not requested" @@ -2897,7 +3091,7 @@ install_all() { uninstall_gnutls -r fi - if [ ! -z "$installed_nettle_version" -a \ + if [ -n "$installed_nettle_version" -a \ "$installed_nettle_version" != "$NETTLE_VERSION" ] ; then echo "Installed Nettle version is $installed_nettle_version" if [ -z "$NETTLE_VERSION" ] ; then @@ -2908,7 +3102,7 @@ install_all() { uninstall_nettle -r fi - if [ ! -z "$installed_gmp_version" -a \ + if [ -n "$installed_gmp_version" -a \ "$installed_gmp_version" != "$GMP_VERSION" ] ; then echo "Installed GMP version is $installed_gmp_version" if [ -z "$GMP_VERSION" ] ; then @@ -2919,7 +3113,7 @@ install_all() { uninstall_gmp -r fi - if [ ! -z "$installed_p11_kit_version" -a \ + if [ -n "$installed_p11_kit_version" -a \ "$installed_p11_kit_version" != "$P11KIT_VERSION" ] ; then echo "Installed p11-kit version is $installed_p11_kit_version" if [ -z "$P11KIT_VERSION" ] ; then @@ -2930,7 +3124,7 @@ install_all() { uninstall_p11_kit -r fi - if [ ! -z "$installed_libtasn1_version" -a \ + if [ -n "$installed_libtasn1_version" -a \ "$installed_libtasn1_version" != "$LIBTASN1_VERSION" ] ; then echo "Installed libtasn1 version is $installed_libtasn1_version" if [ -z "$LIBTASN1_VERSION" ] ; then @@ -2941,7 +3135,7 @@ install_all() { uninstall_libtasn1 -r fi - if [ ! -z "$installed_libgcrypt_version" -a \ + if [ -n "$installed_libgcrypt_version" -a \ "$installed_libgcrypt_version" != "$LIBGCRYPT_VERSION" ] ; then echo "Installed libgcrypt version is $installed_libgcrypt_version" if [ -z "$LIBGCRYPT_VERSION" ] ; then @@ -2952,7 +3146,7 @@ install_all() { uninstall_libgcrypt -r fi - if [ ! -z "$installed_libgpg_error_version" -a \ + if [ -n "$installed_libgpg_error_version" -a \ "$installed_libgpg_error_version" != "$LIBGPG_ERROR_VERSION" ] ; then echo "Installed libgpg-error version is $installed_libgpg_error_version" if [ -z "$LIBGPG_ERROR_VERSION" ] ; then @@ -2963,7 +3157,7 @@ install_all() { uninstall_libgpg_error -r fi - if [ ! -z "$installed_libsmi_version" -a \ + if [ -n "$installed_libsmi_version" -a \ "$installed_libsmi_version" != "$LIBSMI_VERSION" ] ; then echo "Installed libsmi version is $installed_libsmi_version" if [ -z "$LIBSMI_VERSION" ] ; then @@ -2974,7 +3168,7 @@ install_all() { uninstall_libsmi -r fi - if [ ! -z "$installed_qt_version" -a \ + if [ -n "$installed_qt_version" -a \ "$installed_qt_version" != "$QT_VERSION" ] ; then echo "Installed Qt version is $installed_qt_version" if [ -z "$QT_VERSION" ] ; then @@ -2985,7 +3179,7 @@ install_all() { uninstall_qt -r fi - if [ ! -z "$installed_glib_version" -a \ + if [ -n "$installed_glib_version" -a \ "$installed_glib_version" != "$GLIB_VERSION" ] ; then echo "Installed GLib version is $installed_glib_version" if [ -z "$GLIB_VERSION" ] ; then @@ -2996,7 +3190,7 @@ install_all() { uninstall_glib -r fi - if [ ! -z "$installed_pkg_config_version" -a \ + if [ -n "$installed_pkg_config_version" -a \ "$installed_pkg_config_version" != "$PKG_CONFIG_VERSION" ] ; then echo "Installed pkg-config version is $installed_pkg_config_version" if [ -z "$PKG_CONFIG_VERSION" ] ; then @@ -3007,7 +3201,7 @@ install_all() { uninstall_pkg_config -r fi - if [ ! -z "$installed_gettext_version" -a \ + if [ -n "$installed_gettext_version" -a \ "$installed_gettext_version" != "$GETTEXT_VERSION" ] ; then echo "Installed GNU gettext version is $installed_gettext_version" if [ -z "$GETTEXT_VERSION" ] ; then @@ -3018,7 +3212,7 @@ install_all() { uninstall_gettext -r fi - if [ ! -z "$installed_ninja_version" -a \ + if [ -n "$installed_ninja_version" -a \ "$installed_ninja_version" != "$NINJA_VERSION" ] ; then echo "Installed Ninja version is $installed_ninja_version" if [ -z "$NINJA_VERSION" ] ; then @@ -3029,7 +3223,7 @@ install_all() { uninstall_ninja -r fi - if [ ! -z "$installed_asciidoctorpdf_version" -a \ + if [ -n "$installed_asciidoctorpdf_version" -a \ "$installed_asciidoctorpdf_version" != "$ASCIIDOCTORPDF_VERSION" ] ; then echo "Installed Asciidoctor-pdf version is $installed_asciidoctorpdf_version" if [ -z "$ASCIIDOCTORPDF_VERSION" ] ; then @@ -3044,7 +3238,7 @@ install_all() { uninstall_asciidoctorpdf -r fi - if [ ! -z "$installed_asciidoctor_version" -a \ + if [ -n "$installed_asciidoctor_version" -a \ "$installed_asciidoctor_version" != "$ASCIIDOCTOR_VERSION" ] ; then echo "Installed Asciidoctor version is $installed_asciidoctor_version" if [ -z "$ASCIIDOCTOR_VERSION" ] ; then @@ -3059,7 +3253,7 @@ install_all() { uninstall_asciidoctor -r fi - if [ ! -z "$installed_cmake_version" -a \ + if [ -n "$installed_cmake_version" -a \ "$installed_cmake_version" != "$CMAKE_VERSION" ] ; then echo "Installed CMake version is $installed_cmake_version" if [ -z "$CMAKE_VERSION" ] ; then @@ -3070,7 +3264,7 @@ install_all() { uninstall_cmake -r fi - if [ ! -z "$installed_libtool_version" -a \ + if [ -n "$installed_libtool_version" -a \ "$installed_libtool_version" != "$LIBTOOL_VERSION" ] ; then echo "Installed GNU libtool version is $installed_libtool_version" if [ -z "$LIBTOOL_VERSION" ] ; then @@ -3081,7 +3275,7 @@ install_all() { uninstall_libtool -r fi - if [ ! -z "$installed_automake_version" -a \ + if [ -n "$installed_automake_version" -a \ "$installed_automake_version" != "$AUTOMAKE_VERSION" ] ; then echo "Installed GNU automake version is $installed_automake_version" if [ -z "$AUTOMAKE_VERSION" ] ; then @@ -3092,7 +3286,7 @@ install_all() { uninstall_automake -r fi - if [ ! -z "$installed_autoconf_version" -a \ + if [ -n "$installed_autoconf_version" -a \ "$installed_autoconf_version" != "$AUTOCONF_VERSION" ] ; then echo "Installed GNU autoconf version is $installed_autoconf_version" if [ -z "$AUTOCONF_VERSION" ] ; then @@ -3103,14 +3297,9 @@ install_all() { uninstall_autoconf -r fi - if [ ! -z "$installed_pcre_version" -a \ - "$installed_pcre_version" != "$PCRE_VERSION" ] ; then - echo "Installed pcre version is $installed_pcre_version" - if [ -z "$PCRE_VERSION" ] ; then - echo "pcre is not requested" - else - echo "Requested pcre version is $PCRE_VERSION" - fi + if [ -n "$installed_pcre_version" ] ; then + echo "Installed pcre1 version is $installed_pcre_version" + echo "(We no longer build with pcre1)" uninstall_pcre -r fi @@ -3125,18 +3314,12 @@ install_all() { uninstall_pcre2 -r fi - if [ ! -z "$installed_lzip_version" -a \ - "$installed_lzip_version" != "$LZIP_VERSION" ] ; then - echo "Installed lzip version is $installed_lzip_version" - if [ -z "$LZIP_VERSION" ] ; then - echo "lzip is not requested" - else - echo "Requested lzip version is $LZIP_VERSION" - fi + if [ -n "$installed_lzip_version" ] ; then + echo "Removing legacy install of lzip" uninstall_lzip -r fi - if [ ! -z "$installed_xz_version" -a \ + if [ -n "$installed_xz_version" -a \ "$installed_xz_version" != "$XZ_VERSION" ] ; then echo "Installed xz version is $installed_xz_version" if [ -z "$XZ_VERSION" ] ; then @@ -3147,7 +3330,7 @@ install_all() { uninstall_xz -r fi - if [ ! -z "$installed_curl_version" -a \ + if [ -n "$installed_curl_version" -a \ "$installed_curl_version" != "$CURL_VERSION" ] ; then echo "Installed curl version is $installed_curl_version" if [ -z "$CURL_VERSION" ] ; then @@ -3158,7 +3341,7 @@ install_all() { uninstall_curl -r fi - if [ ! -z "$installed_minizip_version" -a \ + if [ -n "$installed_minizip_version" -a \ "$installed_minizip_version" != "$ZLIB_VERSION" ] ; then echo "Installed minizip (zlib) version is $installed_minizip_version" if [ -z "$ZLIB_VERSION" ] ; then @@ -3169,7 +3352,17 @@ install_all() { uninstall_minizip -r fi - if [ ! -z "$installed_sparkle_version" -a \ + if [ -n "$installed_minizip_ng_version" ] && [ "$installed_minizip_ng_version" != "$MINIZIPNG_VERSION" ] ; then + echo "Installed minizip-ng version is $installed_minizip_ng_version" + if [ -z "$MINIZIPNG_VERSION" ] ; then + echo "minizip-ng is not requested" + else + echo "Requested minizip-ng version is $MINIZIPNG_VERSION" + fi + uninstall_minizip_ng -r + fi + + if [ -n "$installed_sparkle_version" -a \ "$installed_sparkle_version" != "$SPARKLE_VERSION" ] ; then echo "Installed Sparkle version is $installed_sparkle_version" if [ -z "$SPARKLE_VERSION" ] ; then @@ -3180,20 +3373,56 @@ install_all() { uninstall_sparkle -r fi + if [ "$installed_falco_libs_version" ] && [ "$installed_falco_libs_version" != "$FALCO_LIBS_VERSION" ] ; then + echo "Installed Falco libs (libsinsp and libscap) version is $installed_falco_libs_version" + if [ -z "$FALCO_LIBS_VERSION" ] ; then + echo "Falco libs is not requested" + else + echo "Requested Falco libs version is $FALCO_LIBS_VERSION" + fi + uninstall_falco_libs -r + fi + + if [ "$installed_jsoncpp_version" ] && [ "$installed_jsoncpp_version" != "$JSONCPP_VERSION" ] ; then + echo "Installed JsonCpp version is $installed_jsoncpp_version" + if [ -z "$JSONCPP_VERSION" ] ; then + echo "JsonCpp is not requested" + else + echo "Requested JsonCpp version is $JSONCPP_VERSION" + fi + uninstall_jsoncpp -r + fi + + if [ "$installed_onetbb_version" ] && [ "$installed_onetbb_version" != "$ONETBB_VERSION" ] ; then + echo "Installed oneTBB version is $installed_onetbb_version" + if [ -z "$ONETBB_VERSION" ] ; then + echo "oneTBB is not requested" + else + echo "Requested oneTBB version is $ONETBB_VERSION" + fi + uninstall_onetbb -r + fi + + if [ "$installed_re2_version" ] && [ "$installed_re2_version" != "$RE2_VERSION" ] ; then + echo "Installed RE2 version is $installed_re2_version" + if [ -z "$RE2_VERSION" ] ; then + echo "RE2 is not requested" + else + echo "Requested RE2 version is $RE2_VERSION" + fi + uninstall_re2 -r + fi + # # Start with curl: we may need it to download and install xz. # install_curl # - # Now intall xz: it is the sole download format of glib later than 2.31.2. + # Now install xz: it is the sole download format of glib later than 2.31.2. # install_xz - install_lzip - - install_pcre - install_autoconf install_automake @@ -3284,6 +3513,8 @@ install_all() { install_zstd + install_zlibng + install_libxml2 install_lz4 @@ -3310,13 +3541,25 @@ install_all() { install_ilbc + install_opencore_amr + install_opus install_brotli install_minizip + install_minizip_ng + install_sparkle + + install_re2 + + install_onetbb + + install_jsoncpp + + install_falco_libs } uninstall_all() { @@ -3333,14 +3576,26 @@ uninstall_all() { # We also do a "make distclean", so that we don't have leftovers from # old configurations. # + uninstall_falco_libs + + uninstall_jsoncpp + + uninstall_onetbb + + uninstall_re2 + uninstall_sparkle uninstall_minizip + uninstall_minizip_ng + uninstall_brotli uninstall_opus + uninstall_opencore_amr + uninstall_ilbc uninstall_bcg729 @@ -3365,6 +3620,8 @@ uninstall_all() { uninstall_zstd + uninstall_zlibng + uninstall_libxml2 uninstall_lz4 @@ -3424,6 +3681,7 @@ uninstall_all() { uninstall_pcre + # Legacy, remove uninstall_lzip uninstall_xz @@ -3432,66 +3690,12 @@ uninstall_all() { fi } -# -# Do we have permission to write in /usr/local? -# -# If so, assume we have permission to write in its subdirectories. -# (If that's not the case, this test needs to check the subdirectories -# as well.) -# -# If not, do "make install", "make uninstall", "ninja install", -# "ninja uninstall", the removes for dependencies that don't support -# "make uninstall" or "ninja uninstall", the renames of [g]libtool*, -# and the writing of a libffi .pc file with sudo. -# -if [ -w /usr/local ] -then - DO_MAKE_INSTALL="make install" - DO_MAKE_UNINSTALL="make uninstall" - DO_NINJA_INSTALL="ninja -C _build install" - DO_NINJA_UNINSTALL="ninja -C _build uninstall" - DO_TEE_TO_PC_FILE="tee" - DO_RM="rm" - DO_MV="mv" -else - DO_MAKE_INSTALL="sudo make install" - DO_MAKE_UNINSTALL="sudo make uninstall" - DO_NINJA_INSTALL="sudo ninja -C _build install" - DO_NINJA_UNINSTALL="sudo ninja -C _build uninstall" - DO_TEE_TO_PC_FILE="sudo tee" - DO_RM="sudo rm" - DO_MV="sudo mv" -fi - -# -# When building with CMake, don't build libraries with an install path -# that begins with @rpath because that will cause binaries linked with it -# to use that path as the library to look for, and that will cause the -# run-time linker, at least on macOS 14 and later, not to find the library -# in /usr/local/lib unless you explicitly set DYLD_LIBRARY_PATH to include -# /usr/local/lib. That means that you get "didn't find libpcre" errors if -# you try to run binaries from a build unless you set DYLD_LIBRARYPATH to -# include /usr/local/lib. -# -# However, setting CMAKE_MACOSX_RPATH to OFF causes the installed -# library just to have the file name of the library as its install -# name. It needs to be the full installed path of the library in -# order to make running binaries from the build directory work, so -# we set CMAKE_INSTALL_NAME_DIR to /usr/local/lib. -# -# packaging/macosx/osx-app.sh will convert *all* libraries in -# the app bundle to have an @rpath install name, so this won't -# break anything there; it just fixes the ability to run from the -# build directory. -# -DO_CMAKE="cmake -DCMAKE_MACOSX_RPATH=OFF -DCMAKE_INSTALL_NAME_DIR=/usr/local/lib" - # This script is meant to be run in the source root. The following -# code will attempt to get you there, but is not perfect (particulary +# code will attempt to get you there, but is not perfect (particularly # if someone copies the script). -topdir=`pwd`/`dirname $0`/.. -cd $topdir +topdir="$( pwd )/$( dirname "$0" )/.." +cd "$topdir" # Preference of the support libraries directory: # ${MACOSX_SUPPORT_LIBS} @@ -3517,7 +3721,7 @@ for i in /Developer/SDKs \ do if [ -d "$i" ] then - min_osx_target=`sw_vers -productVersion | sed 's/\([0-9]*\)\.\([0-9]*\)\.[0-9]*/\1.\2/'` + min_osx_target=$( sw_vers -productVersion | sed 's/\([0-9]*\)\.\([0-9]*\)\.[0-9]*/\1.\2/' ) break fi done @@ -3534,25 +3738,103 @@ done no_build=false -while getopts ht:un name +installation_prefix=/usr/local + +while getopts hnp:t:u name do case $name in - u) - do_uninstall=yes + h|\?) + echo "Usage: macos-setup.sh [ -n ] [ -p ] [ -t ] [ -u ]" 1>&1 + exit 0 ;; n) no_build=true ;; + p) + installation_prefix="$OPTARG" + ;; t) min_osx_target="$OPTARG" ;; - h|?) - echo "Usage: macos-setup.sh [ -t ] [ -u ] [ -n ]" 1>&1 - exit 0 + u) + do_uninstall=yes ;; esac done +# +# Create our custom installation prefix if needed. +# +if [ "$installation_prefix" != "/usr/local" ] ; then + export PATH="$installation_prefix/bin:$PATH" + if [ ! -d "$installation_prefix" ] ; then + echo "Creating $installation_prefix" + $DO_MKDIR "$installation_prefix" + fi +fi + +# +# Do we have permission to write in $installation_prefix? +# +# If so, assume we have permission to write in its subdirectories. +# (If that's not the case, this test needs to check the subdirectories +# as well.) +# +# If not, do "make install", "make uninstall", "ninja install", +# "ninja uninstall", the removes for dependencies that don't support +# "make uninstall" or "ninja uninstall", the renames of [g]libtool*, +# and the writing of a libffi .pc file with sudo. +# +if [ -w "$installation_prefix" ] +then + DO_MAKE="make" + DO_MAKE_INSTALL="make install" + DO_MAKE_UNINSTALL="make uninstall" + DO_NINJA_INSTALL="ninja -C _build install" + DO_NINJA_UNINSTALL="ninja -C _build uninstall" + DO_TEE_TO_PC_FILE="tee" + DO_RM="rm" + DO_MV="mv" +else + DO_MAKE="sudo make" + DO_MAKE_INSTALL="sudo make install" + DO_MAKE_UNINSTALL="sudo make uninstall" + DO_NINJA_INSTALL="sudo ninja -C _build install" + DO_NINJA_UNINSTALL="sudo ninja -C _build uninstall" + DO_TEE_TO_PC_FILE="sudo tee" + DO_RM="sudo rm" + DO_MV="sudo mv" +fi + +# +# When building with CMake, don't build libraries with an install path +# that begins with @rpath because that will cause binaries linked with it +# to use that path as the library to look for, and that will cause the +# run-time linker, at least on macOS 14 and later, not to find the library +# in $installation_prefix/lib unless you explicitly set DYLD_LIBRARY_PATH to include +# $installation_prefix/lib. That means that you get "didn't find libpcre" errors if +# you try to run binaries from a build unless you set DYLD_LIBRARYPATH to +# include $installation_prefix/lib. +# +# However, setting CMAKE_MACOSX_RPATH to OFF causes the installed +# library just to have the file name of the library as its install +# name. It needs to be the full installed path of the library in +# order to make running binaries from the build directory work, so +# we set CMAKE_INSTALL_NAME_DIR to $installation_prefix/lib. +# +# packaging/macosx/osx-app.sh will convert *all* libraries in +# the app bundle to have an @rpath install name, so this won't +# break anything there; it just fixes the ability to run from the +# build directory. +# +DO_CMAKE=( cmake + -DCMAKE_OSX_DEPLOYMENT_TARGET="$min_osx_target" + -DSDKROOT="$SDKPATH" + -DCMAKE_MACOSX_RPATH=OFF + -DCMAKE_INSTALL_PREFIX="$installation_prefix" + -DCMAKE_INSTALL_NAME_DIR="$installation_prefix/lib" + ) + # # Get the version numbers of installed packages, if any. # @@ -3560,52 +3842,55 @@ if [ -d "${MACOSX_SUPPORT_LIBS}" ] then cd "${MACOSX_SUPPORT_LIBS}" - installed_xz_version=`ls xz-*-done 2>/dev/null | sed 's/xz-\(.*\)-done/\1/'` - installed_lzip_version=`ls lzip-*-done 2>/dev/null | sed 's/lzip-\(.*\)-done/\1/'` - installed_pcre_version=`ls pcre-*-done 2>/dev/null | sed 's/pcre-\(.*\)-done/\1/'` - installed_pcre2_version=$(ls pcre2-*-done 2>/dev/null | sed 's/pcre2-\(.*\)-done/\1/') - installed_autoconf_version=`ls autoconf-*-done 2>/dev/null | sed 's/autoconf-\(.*\)-done/\1/'` - installed_automake_version=`ls automake-*-done 2>/dev/null | sed 's/automake-\(.*\)-done/\1/'` - installed_libtool_version=`ls libtool-*-done 2>/dev/null | sed 's/libtool-\(.*\)-done/\1/'` - installed_cmake_version=`ls cmake-*-done 2>/dev/null | sed 's/cmake-\(.*\)-done/\1/'` - installed_ninja_version=`ls ninja-*-done 2>/dev/null | sed 's/ninja-\(.*\)-done/\1/'` - installed_asciidoctor_version=`ls asciidoctor-*-done 2>/dev/null | sed 's/asciidoctor-\(.*\)-done/\1/'` - installed_asciidoctorpdf_version=`ls asciidoctorpdf-*-done 2>/dev/null | sed 's/asciidoctorpdf-\(.*\)-done/\1/'` - installed_gettext_version=`ls gettext-*-done 2>/dev/null | sed 's/gettext-\(.*\)-done/\1/'` - installed_pkg_config_version=`ls pkg-config-*-done 2>/dev/null | sed 's/pkg-config-\(.*\)-done/\1/'` - installed_glib_version=`ls glib-*-done 2>/dev/null | sed 's/glib-\(.*\)-done/\1/'` - installed_qt_version=`ls qt-*-done 2>/dev/null | sed 's/qt-\(.*\)-done/\1/'` - installed_libsmi_version=`ls libsmi-*-done 2>/dev/null | sed 's/libsmi-\(.*\)-done/\1/'` - installed_libgpg_error_version=`ls libgpg-error-*-done 2>/dev/null | sed 's/libgpg-error-\(.*\)-done/\1/'` - installed_libgcrypt_version=`ls libgcrypt-*-done 2>/dev/null | sed 's/libgcrypt-\(.*\)-done/\1/'` - installed_gmp_version=`ls gmp-*-done 2>/dev/null | sed 's/gmp-\(.*\)-done/\1/'` - installed_libtasn1_version=`ls libtasn1-*-done 2>/dev/null | sed 's/libtasn1-\(.*\)-done/\1/'` - installed_p11_kit_version=`ls p11-kit-*-done 2>/dev/null | sed 's/p11-kit-\(.*\)-done/\1/'` - installed_nettle_version=`ls nettle-*-done 2>/dev/null | sed 's/nettle-\(.*\)-done/\1/'` - installed_gnutls_version=`ls gnutls-*-done 2>/dev/null | sed 's/gnutls-\(.*\)-done/\1/'` - installed_lua_version=`ls lua-*-done 2>/dev/null | sed 's/lua-\(.*\)-done/\1/'` - installed_snappy_version=`ls snappy-*-done 2>/dev/null | sed 's/snappy-\(.*\)-done/\1/'` - installed_zstd_version=`ls zstd-*-done 2>/dev/null | sed 's/zstd-\(.*\)-done/\1/'` - installed_libxml2_version=`ls libxml2-*-done 2>/dev/null | sed 's/libxml2-\(.*\)-done/\1/'` - installed_lz4_version=`ls lz4-*-done 2>/dev/null | sed 's/lz4-\(.*\)-done/\1/'` - installed_sbc_version=`ls sbc-*-done 2>/dev/null | sed 's/sbc-\(.*\)-done/\1/'` - installed_maxminddb_version=`ls maxminddb-*-done 2>/dev/null | sed 's/maxminddb-\(.*\)-done/\1/'` - installed_cares_version=`ls c-ares-*-done 2>/dev/null | sed 's/c-ares-\(.*\)-done/\1/'` - installed_libssh_version=`ls libssh-*-done 2>/dev/null | sed 's/libssh-\(.*\)-done/\1/'` - installed_nghttp2_version=`ls nghttp2-*-done 2>/dev/null | sed 's/nghttp2-\(.*\)-done/\1/'` - installed_nghttp3_version=`ls nghttp3-*-done 2>/dev/null | sed 's/nghttp3-\(.*\)-done/\1/'` - installed_libtiff_version=`ls tiff-*-done 2>/dev/null | sed 's/tiff-\(.*\)-done/\1/'` - installed_spandsp_version=`ls spandsp-*-done 2>/dev/null | sed 's/spandsp-\(.*\)-done/\1/'` - installed_speexdsp_version=`ls speexdsp-*-done 2>/dev/null | sed 's/speexdsp-\(.*\)-done/\1/'` - installed_bcg729_version=`ls bcg729-*-done 2>/dev/null | sed 's/bcg729-\(.*\)-done/\1/'` - installed_ilbc_version=`ls ilbc-*-done 2>/dev/null | sed 's/ilbc-\(.*\)-done/\1/'` - installed_opus_version=`ls opus-*-done 2>/dev/null | sed 's/opus-\(.*\)-done/\1/'` - installed_python3_version=`ls python3-*-done 2>/dev/null | sed 's/python3-\(.*\)-done/\1/'` - installed_brotli_version=`ls brotli-*-done 2>/dev/null | sed 's/brotli-\(.*\)-done/\1/'` - installed_minizip_version=`ls minizip-*-done 2>/dev/null | sed 's/minizip-\(.*\)-done/\1/'` - installed_sparkle_version=`ls sparkle-*-done 2>/dev/null | sed 's/sparkle-\(.*\)-done/\1/'` - - cd $topdir + installed_xz_version=$( ls xz-*-done 2>/dev/null | sed 's/xz-\(.*\)-done/\1/' ) + installed_lzip_version=$( ls lzip-*-done 2>/dev/null | sed 's/lzip-\(.*\)-done/\1/' ) + installed_pcre_version=$( ls pcre-*-done 2>/dev/null | sed 's/pcre-\(.*\)-done/\1/' ) + installed_pcre2_version=$( ls pcre2-*-done 2>/dev/null | sed 's/pcre2-\(.*\)-done/\1/' ) + installed_autoconf_version=$( ls autoconf-*-done 2>/dev/null | sed 's/autoconf-\(.*\)-done/\1/' ) + installed_automake_version=$( ls automake-*-done 2>/dev/null | sed 's/automake-\(.*\)-done/\1/' ) + installed_libtool_version=$( ls libtool-*-done 2>/dev/null | sed 's/libtool-\(.*\)-done/\1/' ) + installed_cmake_version=$( ls cmake-*-done 2>/dev/null | sed 's/cmake-\(.*\)-done/\1/' ) + installed_ninja_version=$( ls ninja-*-done 2>/dev/null | sed 's/ninja-\(.*\)-done/\1/' ) + installed_asciidoctor_version=$( ls asciidoctor-*-done 2>/dev/null | sed 's/asciidoctor-\(.*\)-done/\1/' ) + installed_asciidoctorpdf_version=$( ls asciidoctorpdf-*-done 2>/dev/null | sed 's/asciidoctorpdf-\(.*\)-done/\1/' ) + installed_gettext_version=$( ls gettext-*-done 2>/dev/null | sed 's/gettext-\(.*\)-done/\1/' ) + installed_pkg_config_version=$( ls pkg-config-*-done 2>/dev/null | sed 's/pkg-config-\(.*\)-done/\1/' ) + installed_glib_version=$( ls glib-*-done 2>/dev/null | sed 's/glib-\(.*\)-done/\1/' ) + installed_qt_version=$( ls qt-*-done 2>/dev/null | sed 's/qt-\(.*\)-done/\1/' ) + installed_libsmi_version=$( ls libsmi-*-done 2>/dev/null | sed 's/libsmi-\(.*\)-done/\1/' ) + installed_libgpg_error_version=$( ls libgpg-error-*-done 2>/dev/null | sed 's/libgpg-error-\(.*\)-done/\1/' ) + installed_libgcrypt_version=$( ls libgcrypt-*-done 2>/dev/null | sed 's/libgcrypt-\(.*\)-done/\1/' ) + installed_gmp_version=$( ls gmp-*-done 2>/dev/null | sed 's/gmp-\(.*\)-done/\1/' ) + installed_libtasn1_version=$( ls libtasn1-*-done 2>/dev/null | sed 's/libtasn1-\(.*\)-done/\1/' ) + installed_p11_kit_version=$( ls p11-kit-*-done 2>/dev/null | sed 's/p11-kit-\(.*\)-done/\1/' ) + installed_nettle_version=$( ls nettle-*-done 2>/dev/null | sed 's/nettle-\(.*\)-done/\1/' ) + installed_gnutls_version=$( ls gnutls-*-done 2>/dev/null | sed 's/gnutls-\(.*\)-done/\1/' ) + installed_lua_version=$( ls lua-*-done 2>/dev/null | sed 's/lua-\(.*\)-done/\1/' ) + installed_snappy_version=$( ls snappy-*-done 2>/dev/null | sed 's/snappy-\(.*\)-done/\1/' ) + installed_zstd_version=$( ls zstd-*-done 2>/dev/null | sed 's/zstd-\(.*\)-done/\1/' ) + installed_zlibng_version=$( ls zlibng-*-done 2>/dev/null | sed 's/zlibng-\(.*\)-done/\1/' ) + installed_libxml2_version=$( ls libxml2-*-done 2>/dev/null | sed 's/libxml2-\(.*\)-done/\1/' ) + installed_lz4_version=$( ls lz4-*-done 2>/dev/null | sed 's/lz4-\(.*\)-done/\1/' ) + installed_sbc_version=$( ls sbc-*-done 2>/dev/null | sed 's/sbc-\(.*\)-done/\1/' ) + installed_maxminddb_version=$( ls maxminddb-*-done 2>/dev/null | sed 's/maxminddb-\(.*\)-done/\1/' ) + installed_cares_version=$( ls c-ares-*-done 2>/dev/null | sed 's/c-ares-\(.*\)-done/\1/' ) + installed_libssh_version=$( ls libssh-*-done 2>/dev/null | sed 's/libssh-\(.*\)-done/\1/' ) + installed_nghttp2_version=$( ls nghttp2-*-done 2>/dev/null | sed 's/nghttp2-\(.*\)-done/\1/' ) + installed_nghttp3_version=$( ls nghttp3-*-done 2>/dev/null | sed 's/nghttp3-\(.*\)-done/\1/' ) + installed_libtiff_version=$( ls tiff-*-done 2>/dev/null | sed 's/tiff-\(.*\)-done/\1/' ) + installed_spandsp_version=$( ls spandsp-*-done 2>/dev/null | sed 's/spandsp-\(.*\)-done/\1/' ) + installed_speexdsp_version=$( ls speexdsp-*-done 2>/dev/null | sed 's/speexdsp-\(.*\)-done/\1/' ) + installed_bcg729_version=$( ls bcg729-*-done 2>/dev/null | sed 's/bcg729-\(.*\)-done/\1/' ) + installed_ilbc_version=$( ls ilbc-*-done 2>/dev/null | sed 's/ilbc-\(.*\)-done/\1/' ) + installed_opencore_amr_version=$( ls opencore-amr-*-done 2>/dev/null | sed 's/opencore-amr-\(.*\)-done/\1/' ) + installed_opus_version=$( ls opus-*-done 2>/dev/null | sed 's/opus-\(.*\)-done/\1/' ) + installed_python3_version=$( ls python3-*-done 2>/dev/null | sed 's/python3-\(.*\)-done/\1/' ) + installed_brotli_version=$( ls brotli-*-done 2>/dev/null | sed 's/brotli-\(.*\)-done/\1/' ) + installed_minizip_version=$( ls minizip-*-done 2>/dev/null | sed 's/minizip-\(.*\)-done/\1/' ) + installed_minizip_ng_version=$( ls minizip-ng-*-done 2>/dev/null | sed 's/minizip-ng-\(.*\)-done/\1/' ) + installed_sparkle_version=$( ls sparkle-*-done 2>/dev/null | sed 's/sparkle-\(.*\)-done/\1/' ) + + cd "$topdir" fi if [ "$do_uninstall" = "yes" ] @@ -3621,15 +3906,22 @@ fi # However, we *are* setting them in the environment, for our own # nefarious purposes, so start them out as "-g -O2". # -CFLAGS="-g -O2" -CXXFLAGS="-g -O2" +export CFLAGS="-g -O2 -I$installation_prefix/include" +export CXXFLAGS="-g -O2 -I$installation_prefix/include" +export LDFLAGS="-L$installation_prefix/lib" +export PKG_CONFIG_PATH="$installation_prefix/lib/pkgconfig" +CONFIGURE_OPTS=( --prefix="$installation_prefix" ) # if no make options are present, set default options +# Should we just set MAKEFLAGS instead? if [ -z "$MAKE_BUILD_OPTS" ] ; then # by default use 1.5x number of cores for parallel build - MAKE_BUILD_OPTS="-j $(( $(sysctl -n hw.logicalcpu) * 3 / 2))" + MAKE_BUILD_OPTS=( -j $(( $(sysctl -n hw.logicalcpu) * 3 / 2)) ) fi +CURL_REMOTE_NAME_OPTS=(--fail-with-body --location --remote-name) +CURL_LOCAL_NAME_OPTS=(--fail-with-body --location --output) + # # If we have a target release, look for the oldest SDK that's for an # OS equal to or later than that one, and build libraries against it @@ -3639,7 +3931,7 @@ fi # versions of the same release, or earlier releases if the minimum is # earlier. # -if [ ! -z "$min_osx_target" ] +if [ -n "$min_osx_target" ] then # # Get the major and minor version of the target release. @@ -3651,8 +3943,8 @@ then # # major.minor. # - min_osx_target_major=`echo "$min_osx_target" | sed -n 's/\([1-9][0-9]*\)\..*/\1/p'` - min_osx_target_minor=`echo "$min_osx_target" | sed -n 's/[1-9][0-9]*\.\(.*\)/\1/p'` + min_osx_target_major=$( echo "$min_osx_target" | sed -n 's/\([1-9][0-9]*\)\..*/\1/p' ) + min_osx_target_minor=$( echo "$min_osx_target" | sed -n 's/[1-9][0-9]*\.\(.*\)/\1/p' ) ;; [1-9][0-9]) @@ -3694,15 +3986,15 @@ then # Get a list of all the SDKs in that directory, if any. # We assume it'll be a while before there's a macOS 100. :-) # - sdklist=`(cd "$sdksdir"; ls -d MacOSX[1-9][0-9].[0-9]*.sdk 2>/dev/null)` + sdklist=$( (cd "$sdksdir"; ls -d MacOSX[1-9][0-9].[0-9]*.sdk 2>/dev/null) ) for sdk in $sdklist do # # Get the major and minor version for this SDK. # - sdk_major=`echo "$sdk" | sed -n 's/MacOSX\([1-9][0-9]*\)\..*\.sdk/\1/p'` - sdk_minor=`echo "$sdk" | sed -n 's/MacOSX[1-9][0-9]*\.\(.*\)\.sdk/\1/p'` + sdk_major=$( echo "$sdk" | sed -n 's/MacOSX\([1-9][0-9]*\)\..*\.sdk/\1/p' ) + sdk_minor=$( echo "$sdk" | sed -n 's/MacOSX[1-9][0-9]*\.\(.*\)\.sdk/\1/p' ) # # Is it for the deployment target or some later release? @@ -3732,30 +4024,31 @@ then echo "Using the $sdk_major.$sdk_minor SDK" # - # Make sure there are links to /usr/local/include and /usr/local/lib + # Make sure there are links to $installation_prefix/include and $installation_prefix/lib # in the SDK's usr/local. # - if [ ! -e $SDKPATH/usr/local/include ] - then - if [ ! -d $SDKPATH/usr/local ] - then - sudo mkdir $SDKPATH/usr/local - fi - sudo ln -s /usr/local/include $SDKPATH/usr/local/include - fi - if [ ! -e $SDKPATH/usr/local/lib ] - then - if [ ! -d $SDKPATH/usr/local ] - then - sudo mkdir $SDKPATH/usr/local - fi - sudo ln -s /usr/local/lib $SDKPATH/usr/local/lib - fi + # XXX - Is this needed any more? +# if [ ! -e $SDKPATH$installation_prefix/include ] +# then +# if [ ! -d $SDKPATH$installation_prefix ] +# then +# sudo mkdir $SDKPATH$installation_prefix +# fi +# sudo ln -s $installation_prefix/include $SDKPATH$installation_prefix/include +# fi +# if [ ! -e $SDKPATH$installation_prefix/lib ] +# then +# if [ ! -d $SDKPATH$installation_prefix ] +# then +# sudo mkdir $SDKPATH$installation_prefix +# fi +# sudo ln -s $installation_prefix/lib $SDKPATH$installation_prefix/lib +# fi # # Set the minimum OS version for which to build to the specified # minimum target OS version, so we don't, for example, end up using - # linker features supported by the OS verson on which we're building + # linker features supported by the OS version on which we're building # but not by the target version. # VERSION_MIN_FLAGS="-mmacosx-version-min=$min_osx_target" @@ -3767,9 +4060,6 @@ then fi -export CFLAGS -export CXXFLAGS - # # You need Xcode or the command-line tools installed to get the compilers (xcrun checks both). # @@ -3801,15 +4091,13 @@ if [ "$QT_VERSION" ]; then fi fi -export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig - # # Do all the downloads and untarring in a subdirectory, so all that # stuff can be removed once we've installed the support libraries. if [ ! -d "${MACOSX_SUPPORT_LIBS}" ] then - mkdir "${MACOSX_SUPPORT_LIBS}" || exit 1 + mkdir "${MACOSX_SUPPORT_LIBS}" fi cd "${MACOSX_SUPPORT_LIBS}" @@ -3820,11 +4108,9 @@ echo "" # # Indicate what paths to use for pkg-config and cmake. # -pkg_config_path=/usr/local/lib/pkgconfig if [ "$QT_VERSION" ]; then qt_base_path=$HOME/Qt$QT_VERSION/$QT_VERSION/clang_64 - pkg_config_path="$pkg_config_path":"$qt_base_path/lib/pkgconfig" - CMAKE_PREFIX_PATH="$CMAKE_PREFIX_PATH":"$qt_base_path/lib/cmake" + # CMAKE_PREFIX_PATH="$PACKAGE_CONFIG_PATH:$qt_base_path/lib/cmake" fi if $no_build; then @@ -3833,7 +4119,7 @@ if $no_build; then fi if [ "$QT_VERSION" ]; then - if [ -f qt-$QT_VERSION-done ]; then + if [ -f "qt-$QT_VERSION-done" ]; then echo "You are now prepared to build Wireshark." else echo "Qt was not installed; you will have to install it in order to build the" @@ -3851,18 +4137,16 @@ fi echo echo "To build:" echo -echo "export PKG_CONFIG_PATH=$pkg_config_path" -echo "export CMAKE_PREFIX_PATH=$CMAKE_PREFIX_PATH" echo "export PATH=$PATH:$qt_base_path/bin" echo echo "mkdir build; cd build" -if [ ! -z "$NINJA_VERSION" ]; then +if [ -n "$NINJA_VERSION" ]; then echo "cmake -G Ninja .." echo "ninja wireshark_app_bundle logray_app_bundle # (Modify as needed)" echo "ninja install/strip" else echo "cmake .." - echo "make $MAKE_BUILD_OPTS wireshark_app_bundle logray_app_bundle # (Modify as needed)" + echo "make ${MAKE_BUILD_OPTS[*]} wireshark_app_bundle logray_app_bundle # (Modify as needed)" echo "make install/strip" fi echo diff --git a/tools/make-bluetooth.py b/tools/make-bluetooth.py new file mode 100755 index 00000000..71942b91 --- /dev/null +++ b/tools/make-bluetooth.py @@ -0,0 +1,368 @@ +#!/usr/bin/env python3 +# +# Wireshark - Network traffic analyzer +# By Gerald Combs +# Copyright 1998 Gerald Combs +# +# SPDX-License-Identifier: GPL-2.0-or-later +# +''' +make-bluetooth - Generate value_strings containing bluetooth uuids and company identifiers. +It makes use of the databases from +The Bluetooth SIG Repository: https://bitbucket.org/bluetooth-SIG/public/src/main/assigned_numbers/ +and processes the YAML into human-readable strings to go into packet-bluetooth.c. +''' + +import sys +import urllib.request, urllib.error, urllib.parse +import yaml + +base_url = "https://bitbucket.org/bluetooth-SIG/public/raw/HEAD/assigned_numbers/" + +MIN_UUIDS = 1400 # 1424 as of 31-12-2023 +MIN_COMPANY_IDS = 3400 # 3405 as of 31-12-2023 + +## +## UUIDs +## + +''' +List of all YAML files to retrieve, the lists of UUIDs to put into the value_string +and other information. +Unfortunately the encoding of the names among the YAML files is inconsistent, +to say the least. This will need post-processing. +Also the previous value_string contained additional uuids, which are not currently +present in the databases. Prepare the lists with these uuids so they are not lost. +When they do appear in the databases they must be removed here. +''' + +uuids_sources = [ +{ # 0x0001 + "yaml": "protocol_identifiers.yaml", + "description": "Protocol Identifiers", + "unCamelCase": True, + "unlist": [], + "list": [ + { "uuid": 0x001D, "name": "UDI C-Plane" }, + ] +}, +{ # 0x1000 + "yaml": "service_class.yaml", + "description": "Service Class", + "unCamelCase": True, + "unlist": [], + "list": [ + # Then we have this weird one stuck in between "Service Class" + # from browse_group_identifiers.yaml + { "uuid": 0x1002, "name": "Public Browse Group" }, + # And some from other sources + { "uuid": 0x1129, "name": "Video Conferencing GW" }, + { "uuid": 0x112A, "name": "UDI MT" }, + { "uuid": 0x112B, "name": "UDI TA" }, + { "uuid": 0x112C, "name": "Audio/Video" }, + ] +}, +{ # 0x1600 + "yaml": "mesh_profile_uuids.yaml", + "description": "Mesh Profile", + "unCamelCase": False, + "unlist": [], + "list": [] +}, +{ # 0x1800 + "yaml": "service_uuids.yaml", + "description": "Service", + "unCamelCase": False, + "unlist": [], + "list": [] +}, +{ # 0x2700 + "yaml": "units.yaml", + "description": "Units", + "unCamelCase": False, + "unlist": [], + "list": [] +}, +{ # 0x2800 + "yaml": "declarations.yaml", + "description": "Declarations", + "unCamelCase": False, + "unlist": [], + "list": [] +}, +{ # 0x2900 + "yaml": "descriptors.yaml", + "description": "Descriptors", + "unCamelCase": False, + "unlist": [], + "list": [] +}, +{ # 0x2a00 + "yaml": "characteristic_uuids.yaml", + "description": "Characteristics", + "unCamelCase": False, + "unlist": [], + "list": [ + # Then we have these weird ones stuck in between "Characteristics" + # from object_types.yaml + { "uuid": 0x2ACA, "name": "Unspecified" }, + { "uuid": 0x2ACB, "name": "Directory Listing" }, + # And some from other sources + { "uuid": 0x2A0B, "name": "Exact Time 100" }, + { "uuid": 0x2A10, "name": "Secondary Time Zone" }, + { "uuid": 0x2A15, "name": "Time Broadcast" }, + { "uuid": 0x2A1A, "name": "Battery Power State" }, + { "uuid": 0x2A1B, "name": "Battery Level State" }, + { "uuid": 0x2A1F, "name": "Temperature Celsius" }, + { "uuid": 0x2A20, "name": "Temperature Fahrenheit" }, + { "uuid": 0x2A2F, "name": "Position 2D" }, + { "uuid": 0x2A30, "name": "Position 3D" }, + { "uuid": 0x2A3A, "name": "Removable" }, + { "uuid": 0x2A3B, "name": "Service Required" }, + { "uuid": 0x2A3C, "name": "Scientific Temperature Celsius" }, + { "uuid": 0x2A3D, "name": "String" }, + { "uuid": 0x2A3E, "name": "Network Availability" }, + { "uuid": 0x2A56, "name": "Digital" }, + { "uuid": 0x2A57, "name": "Digital Output" }, + { "uuid": 0x2A58, "name": "Analog" }, + { "uuid": 0x2A59, "name": "Analog Output" }, + { "uuid": 0x2A62, "name": "Pulse Oximetry Control Point" }, + # These have somehow disappeared. We keep them for if they were used. + { "uuid": 0x2BA9, "name": "Media Player Icon Object Type" }, + { "uuid": 0x2BAA, "name": "Track Segments Object Type" }, + { "uuid": 0x2BAB, "name": "Track Object Type" }, + { "uuid": 0x2BAC, "name": "Group Object Type" }, + ] +}, +{ # 0xfxxx + "yaml": "member_uuids.yaml", + "description": "Members", + "unCamelCase": False, + "unlist": [], + "list": [ + # This they really screwed up. The UUID was moved to sdo_uuids, + # thereby breaking the range and ordering completely. + { "uuid": 0xFCCC, "name": "Wi-Fi Easy Connect Specification" }, + ] +}, +{ # 0xffef (and 0xfccc) + "yaml": "sdo_uuids.yaml", + "description": "SDO", + "unCamelCase": False, + "unlist": [ 0xFCCC, + ], + "list": [] +}] + +''' +Retrieve the YAML files defining the UUIDs and add them to the lists +''' +for uuids in uuids_sources: + req_headers = { 'User-Agent': 'Wireshark make-bluetooth' } + try: + req = urllib.request.Request(base_url + 'uuids/' + uuids["yaml"], headers=req_headers) + response = urllib.request.urlopen(req) + lines = response.read().decode('UTF-8', 'replace') + except Exception as e: + print("Failed to get UUIDs at {url}, because of: {e}".format(url=base_url + 'uuids/' + uuids["yaml"], e=e), file=sys.stderr) + sys.exit(1) + + uuids_dir = yaml.safe_load(lines) + for uuid in uuids_dir["uuids"]: + if uuid["uuid"] not in uuids["unlist"]: + uuids["list"].append(uuid) + +''' +Go through the lists and perform general and specific transforms. +Several exceptional cases are addressed directly by their UUID, because of the inconsistent nature +by which their name is constructed. +When they appear more sensibly in the databases they must be removed here. +When new inconsistent entries appear in the databases their transforms can be added here, +but also add their UUID below. +''' +for uuids in uuids_sources: + for uuid in uuids["list"]: + # Handle a few exceptional cases + if uuid["uuid"] == 0x001E: + uuid["name"] = "MCAP Control Channel" + elif uuid["uuid"] == 0x001F: + uuid["name"] = "MCAP Data Channel" + elif uuid["uuid"] == 0x1102: + uuid["name"] = "LAN Access Using PPP" + elif uuid["uuid"] == 0x1104: + uuid["name"] = "IrMC Sync" + elif uuid["uuid"] == 0x1105: + uuid["name"] = "OBEX Object Push" + elif uuid["uuid"] == 0x1106: + uuid["name"] = "OBEX File Transfer" + elif uuid["uuid"] == 0x1107: + uuid["name"] = "IrMC Sync Command" + elif uuid["uuid"] == 0x1200: + uuid["name"] = "PnP Information" + elif uuid["uuid"] == 0x2B8C: + uuid["name"] = "CO\u2082 Concentration" + else: + # And these in general + uuid["name"] = uuid["name"].replace("_", " ") + uuid["name"] = uuid["name"].replace('"', '\\"') + +''' +Go through the lists and, for those lists flagged as such, perform the unCamelCase transform +on all the names in that list. +Several exceptional cases were addressed directly by their UUID and must be excluded from this +transform. +When additional characters indicating a break in words appear in database entries they can be +added to break_chars. +''' +for uuids in uuids_sources: + if uuids["unCamelCase"]: + for uuid in uuids["list"]: + # if not a few exceptional cases (see above) + if uuid["uuid"] not in [0x001E, 0x001F, 0x1102, 0x1104, 0x1105, 0x1106, 0x1107, 0x1200, 0x2B8C]: + # Parse through the names and look for capital letters; when + # not preceded by another capital letter or one of break_chars, insert a space + break_chars = [" ", "-", "+", "/", "(", ".", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9"] + was_break = True # fake space at beginning of string + was_upper = False + name = "" + for character in uuid["name"]: + is_upper = True if character.isupper() else False + if is_upper and not was_break and not was_upper: + name += " " + name += character + was_break = True if character in break_chars else False + was_upper = is_upper + uuid["name"] = name + +''' +To be able to generate a value_string_ext array the entries need to be sorted. +''' +for uuids in uuids_sources: + uuids_sorted = sorted(uuids["list"], key=lambda uuid: uuid["uuid"]) + uuids["list"] = uuids_sorted + +''' +Do a check on duplicate entries. +While at it, do a count of the number of UUIDs retrieved. +''' +prev_uuid = 0 +uuid_count = 0 +for uuids in uuids_sources: + for uuid in uuids["list"]: + if uuid["uuid"] > prev_uuid: + prev_uuid = uuid["uuid"] + else: + print("Duplicate UUID detected: 0x{uuid:04X}".format(uuid=uuid["uuid"]), file=sys.stderr) + sys.exit(1) + uuid_count += len(uuids["list"]) + +''' +Sanity check to see if enough entries were retrieved +''' +if (uuid_count < MIN_UUIDS): + print("There are fewer UUIDs than expected: got {count} but was expecting {minimum}".format(count=uuid_count, minimum=MIN_UUIDS), file=sys.stderr) + sys.exit(1) + +''' +Finally output the annotated source code for the value_string +''' +print("const value_string bluetooth_uuid_vals[] = {") + +for uuids in uuids_sources: + print(" /* {description} - {base_url}uuids/{yaml} */".format(description=uuids["description"], base_url=base_url, yaml=uuids["yaml"])) + for uuid in uuids["list"]: + print(" {{ 0x{uuid:04X}, \"{name}\" }},".format(uuid=uuid["uuid"], name=uuid["name"])) + +print(" { 0, NULL }") +print("};") +print("value_string_ext bluetooth_uuid_vals_ext = VALUE_STRING_EXT_INIT(bluetooth_uuid_vals);") +print("") + +## +## Company Identifiers +## + +''' +List of the YAML files to retrieve and the lists of values to put into the value_string. +Also the previous value_string contained additional company IDs, which are not currently +present in the databases. Prepare the lists with these company IDs so they are not lost. +When they do appear in the databases they must be removed here. +''' + +company_ids_sources = [ +{ + "yaml": "company_identifiers.yaml", + "list": [ + # Some from other sources + { "value": 0x0418, "name": "Alpine Electronics Inc." }, + { "value": 0x0943, "name": "Inovonics Corp." }, + { "value": 0xFFFF, "name": "For use in internal and interoperability tests" }, + ] +}] + +''' +Retrieve the YAML files defining the company IDs and add them to the lists +''' +for company_ids in company_ids_sources: + req_headers = { 'User-Agent': 'Wireshark make-bluetooth' } + try: + req = urllib.request.Request(base_url + 'company_identifiers/' + company_ids["yaml"], headers=req_headers) + response = urllib.request.urlopen(req) + lines = response.read().decode('UTF-8', 'replace') + except Exception as e: + print("Failed to get company IDs at {url}, because of: {e}".format(url=base_url + 'company_identifiers/' + company_ids["yaml"], e=e), file=sys.stderr) + sys.exit(-1) + + company_ids_dir = yaml.safe_load(lines) + company_ids["list"].extend(company_ids_dir["company_identifiers"]) + +''' +Go through the lists and perform general transforms. +''' +for company_ids in company_ids_sources: + for company_id in company_ids["list"]: + company_id["name"] = company_id["name"].replace('"', '\\"') + +''' +To be able to generate a value_string_ext array the entries need to be sorted. +''' +for company_ids in company_ids_sources: + company_ids_sorted = sorted(company_ids["list"], key=lambda company_id: company_id['value']) + company_ids["list"] = company_ids_sorted + +''' +Do a check on duplicate entries. +While at it, do a count of the number of company IDs retrieved. +''' +prev_company_id = -1 +company_id_count = 0 +for company_ids in company_ids_sources: + for company_id in company_ids["list"]: + if company_id["value"] > prev_company_id: + prev_company_id = company_id["value"] + else: + print("Duplicate company ID detected: 0x{company_id:04X}".format(company_id=company_id["value"]), file=sys.stderr) + sys.exit(1) + company_id_count += len(company_ids["list"]) + +''' +Sanity check to see if enough entries were retrieved +''' +if company_id_count < MIN_COMPANY_IDS: + print("There are fewer company IDs than expected: got {count} but was expecting {minimum}".format(count=company_id_count, minimum=MIN_COMPANY_IDS), file=sys.stderr) + sys.exit(1) + +''' +Finally output the source code for the value_string +''' +print("/* Taken from {base_url}company_identifiers/{yaml} */".format(base_url=base_url, yaml=company_ids_sources[0]["yaml"])) +print("static const value_string bluetooth_company_id_vals[] = {") + +for company_ids in company_ids_sources: + for company_id in company_ids["list"]: + print(" {{ 0x{company_id:04X}, \"{name}\" }},".format(company_id=company_id["value"], name=company_id["name"])) + +print(" { 0, NULL }") +print("};") +print("value_string_ext bluetooth_company_id_vals_ext = VALUE_STRING_EXT_INIT(bluetooth_company_id_vals);") + diff --git a/tools/make-enterprises.py b/tools/make-enterprises.py index 1b2b2d0d..1491e548 100755 --- a/tools/make-enterprises.py +++ b/tools/make-enterprises.py @@ -31,19 +31,19 @@ FORMERLY_PATTERN = r" \(((formerly|previously) .*)\)" LOOKUP_FUNCTION = r""" const char* global_enterprises_lookup(uint32_t value) { - if (value > table.max_idx) { + if (value >= array_length(table)) { return NULL; } - else return table.values[value]; + return table[value]; } """ DUMP_FUNCTION = r""" void global_enterprises_dump(FILE *fp) { - for (size_t idx = 0; idx <= table.max_idx; idx++) { - if (table.values[idx] != NULL) { - fprintf(fp, "%zu\t%s\n", idx, table.values[idx]); + for (size_t idx = 0; idx < array_length(table); idx++) { + if (table[idx] != NULL) { + fprintf(fp, "%zu\t%s\n", idx, table[idx]); } } } @@ -100,30 +100,23 @@ class CFile: # Include header files self.f.write('#include "config.h"\n\n') self.f.write('#include \n') + self.f.write('#include \n') self.f.write('#include "enterprises.h"\n') self.f.write('\n\n') def __del__(self): - self.f.write('typedef struct\n') - self.f.write('{\n') - self.f.write(' uint32_t max_idx;\n') - self.f.write(' const char* values[' + str(self.highest_num+1) + '];\n') - self.f.write('} global_enterprises_table_t;\n\n') - # Write static table - self.f.write('static global_enterprises_table_t table =\n') + self.f.write('static const char * const table[] =\n') self.f.write('{\n') # Largest index - self.f.write(' ' + str(self.highest_num) + ',\n') - self.f.write(' {\n') # Entries (read from dict) for n in range(0, self.highest_num+1): if n not in self.mappings: # There are some gaps, write a NULL entry so can lookup by index - line = ' NULL' + line = ' NULL' else: - line = ' "' + self.mappings[n] + '"' - # Add coma. + line = ' "' + self.mappings[n] + '"' + # Add comma. if n < self.highest_num: line += ',' # Add number as aligned comment. @@ -132,8 +125,6 @@ class CFile: self.f.write(line+'\n') # End of array - self.f.write(' }\n') - # End of struct self.f.write('};\n') print('Re-generated', self.filename) diff --git a/tools/make-enums.py b/tools/make-enums.py index b6a2835e..74274313 100755 --- a/tools/make-enums.py +++ b/tools/make-enums.py @@ -51,7 +51,7 @@ def parse_files(infiles, outfile): source += """ #define ENUM(arg) { #arg, arg } -static ws_enum_t all_enums[] = { +static ws_enum_t const all_enums[] = { """ definitions = parser.defs['values'] diff --git a/tools/make-iana-ip.py b/tools/make-iana-ip.py new file mode 100755 index 00000000..f6e0a86c --- /dev/null +++ b/tools/make-iana-ip.py @@ -0,0 +1,209 @@ +#!/usr/bin/env python3 +# +# Wireshark - Network traffic analyzer +# By Gerald Combs +# Copyright 1998 Gerald Combs +# +# SPDX-License-Identifier: GPL-2.0-or-later +'''Update the IANA IP registry file. + +Make-iana-ip creates a file containing information about IPv4/IPv6 allocation blocks. +''' + +import csv +import io +import ipaddress +import os +import re +import sys +import urllib.request, urllib.error, urllib.parse + +def exit_msg(msg=None, status=1): + if msg is not None: + sys.stderr.write(msg + '\n\n') + sys.stderr.write(__doc__ + '\n') + sys.exit(status) + +def open_url(url): + '''Open a URL. + Returns a tuple containing the body and response dict. The body is a + str in Python 3 and bytes in Python 2 in order to be compatible with + csv.reader. + ''' + + if len(sys.argv) > 1: + url_path = os.path.join(sys.argv[1], url[1]) + url_fd = open(url_path) + body = url_fd.read() + url_fd.close() + else: + url_path = '/'.join(url) + + req_headers = { 'User-Agent': 'Wireshark iana-ip' } + try: + req = urllib.request.Request(url_path, headers=req_headers) + response = urllib.request.urlopen(req) + body = response.read().decode('UTF-8', 'replace') + except Exception: + exit_msg('Error opening ' + url_path) + + return body + +class IPv4SpecialBlock(ipaddress.IPv4Network): + @staticmethod + def ip_get_subnet_mask(bits): + masks = ( + 0x00000000, + 0x80000000, 0xc0000000, 0xe0000000, 0xf0000000, + 0xf8000000, 0xfc000000, 0xfe000000, 0xff000000, + 0xff800000, 0xffc00000, 0xffe00000, 0xfff00000, + 0xfff80000, 0xfffc0000, 0xfffe0000, 0xffff0000, + 0xffff8000, 0xffffc000, 0xffffe000, 0xfffff000, + 0xfffff800, 0xfffffc00, 0xfffffe00, 0xffffff00, + 0xffffff80, 0xffffffc0, 0xffffffe0, 0xfffffff0, + 0xfffffff8, 0xfffffffc, 0xfffffffe, 0xffffffff) + if bits > 32: + ValueError("Expected bit mask less or equal to 32") + return masks[bits] + + def __str__(self): + addr = self.network_address + mask = self.prefixlen + line = '{{ .ipv4 = {{ {:#x}, {:#010x} }} }}'.format(addr, self.ip_get_subnet_mask(mask)) + return line + +class IPv6SpecialBlock(ipaddress.IPv6Network): + @staticmethod + def addr_c_array(byte_array): + if len(byte_array) != 16: + raise ValueError("Expected byte array of length 16") + c_array = ", ".join(f"0x{byte:02x}" for byte in byte_array) + return f"{{ {c_array} }}" + + def __str__(self): + addr = self.network_address.packed + mask = self.prefixlen + line = '{{ .ipv6 = {{ {}, {} }} }}'.format(self.addr_c_array(addr), mask) + return line + +class IPRegistry(list): + @staticmethod + def true_or_false(val): + if val == 'True': + return '1' + elif val == 'False': + return '0' + else: + return '-1' + + def append(self, row): + ip, name, _, _, termin_date, source, destination, forward, glob, reserved = row + if termin_date[0].isdigit(): + # skip allocations that have expired + return + name = re.sub(r'\[.*\]', '', name) + name = '"' + name.replace('"', '\\"') + '"' + source = self.true_or_false(source) + destination = self.true_or_false(destination) + forward = self.true_or_false(forward) + glob = self.true_or_false(glob) + reserved = self.true_or_false(reserved) + super().append([ip, name, source, destination, forward, glob, reserved]) + +class IPv4Registry(IPRegistry): + @staticmethod + def ipv4_addr_and_mask(s): + ip = IPv4SpecialBlock(s) + return ip + + def append(self, row): + # some lines contain multiple (comma separated) blocks + ip_list = row[0].split(',') + for s in ip_list: + # remove annotations like "1.1.1.1 [2]" + ip_str = s.split()[0] + row = [self.ipv4_addr_and_mask(ip_str)] + row[1:] + super().append(row) + + def dump(self, fd): + self.sort() + fd.write('_U_ static const struct ws_iana_ip_special_block __ipv4_special_block[] = {\n') + for row in self: + line = ' {{ 4, {}, {}, {}, {}, {}, {}, {} }},\n'.format(*row) + fd.write(line) + fd.write('};\n') + +class IPv6Registry(IPRegistry): + @staticmethod + def ipv6_addr_and_mask(s): + ip_str = s.split()[0] + ip = IPv6SpecialBlock(ip_str) + return ip + + def append(self, row): + # remove annotations like "1.1.1.1 [2]" + ip_str = row[0].split()[0] + row = [self.ipv6_addr_and_mask(ip_str)] + row[1:] + super().append(row) + + def dump(self, fd): + self.sort() + fd.write('// GCC bug?\n') + fd.write('DIAG_OFF(missing-braces)\n') + fd.write('_U_ static const struct ws_iana_ip_special_block __ipv6_special_block[] = {\n') + for row in self: + line = \ +''' {{ 6, {}, + {}, {}, {}, {}, {}, {} }},\n'''.format(*row) + fd.write(line) + fd.write('};\n') + fd.write('DIAG_ON(missing-braces)\n') + +IANA_URLS = { + 'IPv4': { 'url': ["https://www.iana.org/assignments/iana-ipv4-special-registry/", "iana-ipv4-special-registry-1.csv"], 'min_entries': 2 }, + 'IPv6': { 'url': ["https://www.iana.org/assignments/iana-ipv6-special-registry/", "iana-ipv6-special-registry-1.csv"], 'min_entries': 2 }, +} + +def dump_registry(db, fd, reg): + db_url = IANA_URLS[db]['url'] + print('Loading {} data from {}'.format(db, db_url)) + body = open_url(db_url) + iana_csv = csv.reader(body.splitlines()) + + # Pop the title row. + next(iana_csv) + for iana_row in iana_csv: + # Address Block,Name,RFC,Allocation Date,Termination Date,Source,Destination,Forwardable,Globally Reachable,Reserved-by-Protocol + # ::1/128,Loopback Address,[RFC4291],2006-02,N/A,False,False,False,False,True + reg.append(iana_row) + + if len(reg) < IANA_URLS[db]['min_entries']: + exit_msg("Too few {} entries. Got {}, wanted {}".format(db, len(reg), IANA_URLS[db]['min_entries'])) + + reg.dump(fd) + +def main(): + iana_path = os.path.join('epan', 'iana-ip-data.c') + + try: + fd = io.open(iana_path, 'w', encoding='UTF-8') + except Exception: + exit_msg("Couldn't open \"{}\" file for reading".format(iana_path)) + + fd.write('''/* + * This file was generated by running ./tools/make-iana-ip.py. + * + * SPDX-License-Identifier: GPL-2.0-or-later + */ + +#include "iana-ip.h" + +''') + + dump_registry('IPv4', fd, IPv4Registry()) + fd.write('\n') + dump_registry('IPv6', fd, IPv6Registry()) + fd.close() + +if __name__ == '__main__': + main() diff --git a/tools/make-isobus.py b/tools/make-isobus.py old mode 100644 new mode 100755 index ce0259c7..792f2b33 --- a/tools/make-isobus.py +++ b/tools/make-isobus.py @@ -41,7 +41,6 @@ def open_url_zipped(url): return zipfile.ZipFile(io.BytesIO(body)) def main(): - this_dir = os.path.dirname(__file__) isobus_output_path = os.path.join('epan', 'dissectors', 'packet-isobus-parameters.h') isobus_zip_url = [ "https://www.isobus.net/isobus/attachments/", "isoExport_csv.zip"] @@ -132,7 +131,7 @@ def main(): pgn_id, pgn_name, = row[:2] if not pgn_name.startswith("Proprietary B"): pgn_names[int(pgn_id)] = pgn_name.replace("\"","'") - except: + except Exception: pass # prepare output file @@ -164,7 +163,7 @@ def main(): output_fd.write(" { 0, NULL }\n") output_fd.write("};\n") - output_fd.write("static value_string_ext isobus_industry_groups_ext = VALUE_STRING_EXT_INIT(_isobus_industry_groups);\n\n"); + output_fd.write("static value_string_ext isobus_industry_groups_ext = VALUE_STRING_EXT_INIT(_isobus_industry_groups);\n\n") # Write Vehicle System Names output_fd.write("/* key: 256 * Industry-Group-ID + Vehicle-Group-ID */\n") @@ -175,7 +174,7 @@ def main(): output_fd.write(" { 0, NULL }\n") output_fd.write("};\n") - output_fd.write("static value_string_ext isobus_vehicle_systems_ext = VALUE_STRING_EXT_INIT(_isobus_vehicle_systems);\n\n"); + output_fd.write("static value_string_ext isobus_vehicle_systems_ext = VALUE_STRING_EXT_INIT(_isobus_vehicle_systems);\n\n") # Write Global Name Functions output_fd.write("static const value_string _isobus_global_name_functions[] = {\n") @@ -185,7 +184,7 @@ def main(): output_fd.write(" { 0, NULL }\n") output_fd.write("};\n") - output_fd.write("static value_string_ext isobus_global_name_functions_ext = VALUE_STRING_EXT_INIT(_isobus_global_name_functions);\n\n"); + output_fd.write("static value_string_ext isobus_global_name_functions_ext = VALUE_STRING_EXT_INIT(_isobus_global_name_functions);\n\n") # IG Specific Global Name Functions output_fd.write("/* key: 65536 * Industry-Group-ID + 256 * Vehicle-System-ID + Function-ID */\n") @@ -196,7 +195,7 @@ def main(): output_fd.write(" { 0, NULL }\n") output_fd.write("};\n") - output_fd.write("static value_string_ext isobus_ig_specific_name_functions_ext = VALUE_STRING_EXT_INIT(_isobus_ig_specific_name_functions);\n\n"); + output_fd.write("static value_string_ext isobus_ig_specific_name_functions_ext = VALUE_STRING_EXT_INIT(_isobus_ig_specific_name_functions);\n\n") # Write Manufacturers output_fd.write("static const value_string _isobus_manufacturers[] = {\n") @@ -206,7 +205,7 @@ def main(): output_fd.write(" { 0, NULL }\n") output_fd.write("};\n") - output_fd.write("static value_string_ext isobus_manufacturers_ext = VALUE_STRING_EXT_INIT(_isobus_manufacturers);\n\n"); + output_fd.write("static value_string_ext isobus_manufacturers_ext = VALUE_STRING_EXT_INIT(_isobus_manufacturers);\n\n") # PGN Names output_fd.write("static const value_string _isobus_pgn_names[] = {\n") @@ -216,7 +215,7 @@ def main(): output_fd.write(" { 0, NULL }\n") output_fd.write("};\n") - output_fd.write("static value_string_ext isobus_pgn_names_ext = VALUE_STRING_EXT_INIT(_isobus_pgn_names);\n\n"); + output_fd.write("static value_string_ext isobus_pgn_names_ext = VALUE_STRING_EXT_INIT(_isobus_pgn_names);\n\n") output_fd.write("#endif /* __PACKET_ISOBUS_PARAMETERS_H__ */") if __name__ == '__main__': diff --git a/tools/make-manuf.py b/tools/make-manuf.py index 22f3aa03..8f006405 100755 --- a/tools/make-manuf.py +++ b/tools/make-manuf.py @@ -36,7 +36,7 @@ def exit_msg(msg=None, status=1): def open_url(url): '''Open a URL. Returns a tuple containing the body and response dict. The body is a - str in Python 3 and bytes in Python 2 in order to be compatibile with + str in Python 3 and bytes in Python 2 in order to be compatible with csv.reader. ''' @@ -230,7 +230,6 @@ def prefix_to_oui(prefix, prefix_map): return '{}/{:d}'.format(oui, int(pfx_len)), kind def main(): - this_dir = os.path.dirname(__file__) manuf_path = os.path.join('epan', 'manuf-data.c') ieee_d = { @@ -246,7 +245,7 @@ def main(): MA_S: {}, } - min_total = 35000; # 35830 as of 2018-09-05 + min_total = 35000 # 35830 as of 2018-09-05 total_added = 0 # Add IEEE entries from each of their databases @@ -276,9 +275,10 @@ def main(): # "Watts A\S" manuf = manuf.replace('\\', '/') if manuf == 'IEEE Registration Authority': + # These are held for subdivision into MA-M/MA-S continue - if manuf == 'Private': - continue + #if manuf == 'Private': + # continue if oui in oui_d[kind]: action = 'Skipping' print('{} - {} IEEE "{}" in favor of "{}"'.format(oui, action, manuf, oui_d[kind][oui])) diff --git a/tools/make-no-reassembly-profile.py b/tools/make-no-reassembly-profile.py index cd68155a..25ae0153 100755 --- a/tools/make-no-reassembly-profile.py +++ b/tools/make-no-reassembly-profile.py @@ -33,12 +33,12 @@ def main(): # Make sure plugin prefs are present. cp = subprocess.run([tshark_path, '-G', 'plugins'], stdout=subprocess.PIPE, check=True, encoding='utf-8') plugin_lines = cp.stdout.splitlines() - dissector_count = len(tuple(filter(lambda p: re.search('\sdissector\s', p), plugin_lines))) + dissector_count = len(tuple(filter(lambda p: re.search(r'\sdissector\s', p), plugin_lines))) if dissector_count < MIN_PLUGINS: print('Found {} plugins but require {}.'.format(dissector_count, MIN_PLUGINS)) sys.exit(1) - rd_pref_re = re.compile('^#\s*(.*(reassembl|desegment)\S*):\s*TRUE') + rd_pref_re = re.compile(r'^#\s*(.*(reassembl|desegment)\S*):\s*TRUE') out_prefs = [ '# Generated by ' + os.path.basename(__file__), '', '####### Protocols ########', '', diff --git a/tools/make-packet-dcm.py b/tools/make-packet-dcm.py index 51cbcf10..d122dbff 100755 --- a/tools/make-packet-dcm.py +++ b/tools/make-packet-dcm.py @@ -123,6 +123,10 @@ vrs = {i+1: get_texts_in_row(x)[0].split(maxsplit=1) for i,x in enumerate(get_tr uid_trs = get_trs(part06, "table_A-1") uid_rows = [get_texts_in_row(x) for x in uid_trs] +wkfr_trs = get_trs(part06, "table_A-2") +wkfr_rows = [get_texts_in_row(x) for x in wkfr_trs] +uid_rows += [x[:3] + ['Well-known frame of reference'] + x[3:] for x in wkfr_rows] + def uid_define_name(uid): if uid[1] == "(Retired)": return f'"{uid[0]}"' @@ -168,7 +172,7 @@ extern "C" { """ + "\n".join(f"#define DCM_VR_{vr[0]} {i:2d} /* {vr[1]:25s} */" for i,vr in vrs.items()) + """ /* Following must be in the same order as the definitions above */ -static const gchar* dcm_tag_vr_lookup[] = { +static const char* dcm_tag_vr_lookup[] = { " ", """ + ",\n ".join(",".join(f'"{x[1][0]}"' for x in j[1]) for j in itertools.groupby(vrs.items(), lambda i: (i[0]-1)//8)) + """ }; @@ -188,12 +192,12 @@ static const gchar* dcm_tag_vr_lookup[] = { */ typedef struct dcm_tag { - const guint32 tag; - const gchar *description; - const gchar *vr; - const gchar *vm; - const gboolean is_retired; - const gboolean add_to_summary; /* Add to parent's item description */ + const uint32_t tag; + const char *description; + const char *vr; + const char *vm; + const bool is_retired; + const bool add_to_summary; /* Add to parent's item description */ } dcm_tag_t; static dcm_tag_t const dcm_tag_data[] = { @@ -225,9 +229,9 @@ static dcm_tag_t const dcm_tag_data[] = { */ typedef struct dcm_uid { - const gchar *value; - const gchar *name; - const gchar *type; + const char *value; + const char *name; + const char *type; } dcm_uid_t; """ + "\n".join(f'#define {uid_define_name(uid)} "{uid[0]}"' diff --git a/tools/make-pci-ids.py b/tools/make-pci-ids.py index 093637d7..0aadef14 100755 --- a/tools/make-pci-ids.py +++ b/tools/make-pci-ids.py @@ -35,6 +35,9 @@ CODE_PREFIX = """\ #include #include +#include + +#include "wsutil/array.h" #include "pci-ids.h" @@ -59,38 +62,10 @@ typedef struct """ CODE_POSTFIX = """ -static pci_vid_index_t const *get_vid_index(uint16_t vid) +static int vid_search(const void *key, const void *tbl_entry) { - uint32_t start_index = 0; - uint32_t end_index = 0; - uint32_t idx = 0; - - end_index = sizeof(pci_vid_index)/sizeof(pci_vid_index[0]); - - while(start_index != end_index) - { - if(end_index - start_index == 1) - { - if(pci_vid_index[start_index].vid == vid) - return &pci_vid_index[start_index]; - - break; - } - - idx = (start_index + end_index)/2; - - if(pci_vid_index[idx].vid < vid) - start_index = idx; - else - if(pci_vid_index[idx].vid > vid) - end_index = idx; - else - return &pci_vid_index[idx]; - - } - - return NULL; - + return (int)*(const uint16_t *)key - + (int)((const pci_vid_index_t *)tbl_entry)->vid; } const char *pci_id_str(uint16_t vid, uint16_t did, uint16_t svid, uint16_t ssid) @@ -100,7 +75,7 @@ const char *pci_id_str(uint16_t vid, uint16_t did, uint16_t svid, uint16_t ssid) pci_vid_index_t const *index_ptr; pci_id_t const *ids_ptr; - index_ptr = get_vid_index(vid); + index_ptr = bsearch(&vid, pci_vid_index, array_length(pci_vid_index), sizeof pci_vid_index[0], vid_search); if(index_ptr == NULL) return not_found; diff --git a/tools/make-plugin-reg.py b/tools/make-plugin-reg.py index 2b9bc345..135850c2 100755 --- a/tools/make-plugin-reg.py +++ b/tools/make-plugin-reg.py @@ -116,6 +116,7 @@ reg_code += """ /* plugins are DLLs on Windows */ #define WS_BUILD_DLL #include "ws_symbol_export.h" +#include """ @@ -139,16 +140,29 @@ for symbol in regs['codec_register']: for symbol in regs['register_tap_listener']: reg_code += "void register_tap_listener_%s(void);\n" % (symbol) +DESCRIPTION_FLAG = { + 'plugin': 'WS_PLUGIN_DESC_DISSECTOR', + 'plugin_wtap': 'WS_PLUGIN_DESC_FILE_TYPE', + 'plugin_codec': 'WS_PLUGIN_DESC_CODEC', + 'plugin_tap': 'WS_PLUGIN_DESC_TAP_LISTENER' +} + reg_code += """ -WS_DLL_PUBLIC_DEF const gchar plugin_version[] = PLUGIN_VERSION; +WS_DLL_PUBLIC_DEF const char plugin_version[] = PLUGIN_VERSION; WS_DLL_PUBLIC_DEF const int plugin_want_major = VERSION_MAJOR; WS_DLL_PUBLIC_DEF const int plugin_want_minor = VERSION_MINOR; WS_DLL_PUBLIC void plugin_register(void); +WS_DLL_PUBLIC uint32_t plugin_describe(void); + +uint32_t plugin_describe(void) +{ + return %s; +} void plugin_register(void) { -""" +""" % DESCRIPTION_FLAG[registertype] if registertype == "plugin": for symbol in regs['proto_reg']: diff --git a/tools/make-regs.py b/tools/make-regs.py index 376b3c65..f17a3f4a 100755 --- a/tools/make-regs.py +++ b/tools/make-regs.py @@ -64,11 +64,11 @@ const unsigned long dissector_reg_handoff_count = {1}; output += gen_prototypes(protos) output += "\n" - output += gen_array(protos, "dissector_reg_t dissector_reg_proto") + output += gen_array(protos, "dissector_reg_t const dissector_reg_proto") output += "\n" output += gen_prototypes(handoffs) output += "\n" - output += gen_array(handoffs, "dissector_reg_t dissector_reg_handoff") + output += gen_array(handoffs, "dissector_reg_t const dissector_reg_handoff") with open(outfile, "w") as f: f.write(output) @@ -96,7 +96,7 @@ const unsigned wtap_module_count = {0}; output += gen_prototypes(wtap_modules) output += "\n" - output += gen_array(wtap_modules, "wtap_module_reg_t wtap_module_reg") + output += gen_array(wtap_modules, "wtap_module_reg_t const wtap_module_reg") with open(outfile, "w") as f: f.write(output) @@ -124,7 +124,7 @@ const unsigned long tap_reg_listener_count = {0}; output += gen_prototypes(taps) output += "\n" - output += gen_array(taps, "tap_reg_t tap_reg_listener") + output += gen_array(taps, "tap_reg_t const tap_reg_listener") with open(outfile, "w") as f: f.write(output) @@ -143,7 +143,7 @@ if __name__ == "__main__": outfile = sys.argv[2] if sys.argv[3].startswith("@"): with open(sys.argv[3][1:]) as f: - infiles = [l.strip() for l in f.readlines()] + infiles = [line.strip() for line in f.readlines()] else: infiles = sys.argv[3:] diff --git a/tools/make-services.py b/tools/make-services.py index 0f832bec..db2afd3d 100755 --- a/tools/make-services.py +++ b/tools/make-services.py @@ -9,6 +9,14 @@ # # SPDX-License-Identifier: GPL-2.0-or-later +import sys +import getopt +import csv +import re +import collections +import urllib.request, urllib.error, urllib.parse +import codecs + iana_svc_url = 'https://www.iana.org/assignments/service-names-port-numbers/service-names-port-numbers.csv' __doc__ = '''\ @@ -18,13 +26,6 @@ url defaults to %s ''' % (iana_svc_url) -import sys -import getopt -import csv -import re -import collections -import urllib.request, urllib.error, urllib.parse -import codecs services_file = 'epan/services-data.c' @@ -105,7 +106,7 @@ def parse_rows(svc_fd): if description == service or description == service.replace("-", " "): description = None - if not port in services_map: + if port not in services_map: services_map[port] = collections.OrderedDict() # Remove some duplicates (first entry wins) @@ -117,7 +118,7 @@ def parse_rows(svc_fd): if proto_exists: continue - if not service in services_map[port]: + if service not in services_map[port]: services_map[port][service] = [description] services_map[port][service].append(proto) @@ -229,10 +230,12 @@ def main(argv): * service names, e.g. TCP port 80 -> http. * * It is subject to copyright and being used with IANA's permission: - * https://www.wireshark.org/lists/wireshark-dev/200708/msg00160.html + * https://lists.wireshark.org/archives/wireshark-dev/200708/msg00160.html * * The original file can be found at: * %s + * + * Generated by tools/make-services.py */ ''' % (iana_svc_url)) @@ -259,27 +262,27 @@ def main(argv): return e[0] return max_port - out.write("static ws_services_entry_t global_tcp_udp_services_table[] = {\n") + out.write("static const ws_services_entry_t global_tcp_udp_services_table[] = {\n") for e in tcp_udp: max_port = write_entry(out, e, max_port) out.write("};\n\n") - out.write("static ws_services_entry_t global_tcp_services_table[] = {\n") + out.write("static const ws_services_entry_t global_tcp_services_table[] = {\n") for e in tcp: max_port = write_entry(out, e, max_port) out.write("};\n\n") - out.write("static ws_services_entry_t global_udp_services_table[] = {\n") + out.write("static const ws_services_entry_t global_udp_services_table[] = {\n") for e in udp: max_port = write_entry(out, e, max_port) out.write("};\n\n") - out.write("static ws_services_entry_t global_sctp_services_table[] = {\n") + out.write("static const ws_services_entry_t global_sctp_services_table[] = {\n") for e in sctp: max_port = write_entry(out, e, max_port) out.write("};\n\n") - out.write("static ws_services_entry_t global_dccp_services_table[] = {\n") + out.write("static const ws_services_entry_t global_dccp_services_table[] = {\n") for e in dccp: max_port = write_entry(out, e, max_port) out.write("};\n\n") diff --git a/tools/make-usb.py b/tools/make-usb.py index 65408034..8b7bd369 100755 --- a/tools/make-usb.py +++ b/tools/make-usb.py @@ -40,7 +40,7 @@ for i in range(256): for utf8line in lines: # Convert single backslashes to double (escaped) backslashes, escape quotes, etc. utf8line = utf8line.rstrip() - utf8line = re.sub("\?+", "?", utf8line) + utf8line = re.sub(r"\?+", "?", utf8line) line = ''.join(escapes[byte] for byte in utf8line.encode('utf8')) if line == "# Vendors, devices and interfaces. Please keep sorted.": diff --git a/tools/make-version.py b/tools/make-version.py index 05458b65..a3416812 100755 --- a/tools/make-version.py +++ b/tools/make-version.py @@ -44,8 +44,8 @@ GIT_ABBREV_LENGTH = 12 # If the text "$Format" is still present, it means that # git archive did not replace the $Format string, which # means that this not a git archive. -GIT_EXPORT_SUBST_H = '2acd1a854babc4caae980ef9ed79ad36b6bc0362' -GIT_EXPORT_SUBST_D = 'tag: wireshark-4.2.6, tag: v4.2.6, refs/merge-requests/16375/head, refs/keep-around/2acd1a854babc4caae980ef9ed79ad36b6bc0362' +GIT_EXPORT_SUBST_H = '009a163470b581c7d3ee66d89c819cef1f9e50fe' +GIT_EXPORT_SUBST_D = 'tag: wireshark-4.4.0, tag: v4.4.0, refs/merge-requests/17013/head, refs/keep-around/009a163470b581c7d3ee66d89c819cef1f9e50fe' IS_GIT_ARCHIVE = not GIT_EXPORT_SUBST_H.startswith('$Format') @@ -94,7 +94,7 @@ def update_debian_changelog(src_dir, repo_data): changelog_contents = fh.read() CHANGELOG_PATTERN = r"^.*" - text_replacement = f"wireshark ({repo_data['version_major']}.{repo_data['version_minor']}.{repo_data['version_patch']}{repo_data['package_string']}) unstable; urgency=low" + text_replacement = f"wireshark ({repo_data['version_major']}.{repo_data['version_minor']}.{repo_data['version_patch']}{repo_data['package_string']}) UNRELEASED; urgency=low" # Note: Only need to replace the first line, so we don't use re.MULTILINE or re.DOTALL new_changelog_contents = re.sub(CHANGELOG_PATTERN, text_replacement, changelog_contents) with open(deb_changelog_filepath, mode='w', encoding='utf-8') as fh: @@ -110,9 +110,9 @@ def create_version_file(version_f, repo_data): def update_attributes_asciidoc(src_dir, repo_data): - # Read docbook/attributes.adoc, then write it back out with an updated + # Read doc/attributes.adoc, then write it back out with an updated # wireshark-version replacement line. - asiidoc_filepath = os.path.join(src_dir, "docbook", "attributes.adoc") + asiidoc_filepath = os.path.join(src_dir, "doc", "attributes.adoc") with open(asiidoc_filepath, encoding='utf-8') as fh: asciidoc_contents = fh.read() @@ -129,8 +129,8 @@ def update_attributes_asciidoc(src_dir, repo_data): def update_docinfo_asciidoc(src_dir, repo_data): doc_paths = [] - doc_paths += [os.path.join(src_dir, 'docbook', 'wsdg_src', 'developer-guide-docinfo.xml')] - doc_paths += [os.path.join(src_dir, 'docbook', 'wsug_src', 'user-guide-docinfo.xml')] + doc_paths += [os.path.join(src_dir, 'doc', 'wsdg_src', 'developer-guide-docinfo.xml')] + doc_paths += [os.path.join(src_dir, 'doc', 'wsug_src', 'user-guide-docinfo.xml')] for doc_path in doc_paths: with open(doc_path, encoding='utf-8') as fh: @@ -184,33 +184,44 @@ def update_versioned_files(src_dir, set_version, repo_data): def generate_version_h(repo_data): # Generate new contents of version.h from repository data + num_commits_line = '#define VCS_NUM_COMMITS "0"\n' + + commit_id_line = '/* #undef VCS_COMMIT_ID */\n' + if not repo_data.get('enable_vcsversion'): - return "/* #undef VCSVERSION */\n" + return '/* #undef VCS_VERSION */\n' + num_commits_line + commit_id_line + + if repo_data.get('num_commits'): + num_commits_line = f'#define VCS_NUM_COMMITS "{int(repo_data["num_commits"])}"\n' + + if repo_data.get('commit_id'): + commit_id_line = f'#define VCS_COMMIT_ID "{repo_data["commit_id"]}"' if repo_data.get('git_description'): # Do not bother adding the git branch, the git describe output # normally contains the base tag and commit ID which is more # than sufficient to determine the actual source tree. - return f'#define VCSVERSION "{repo_data["git_description"]}"\n' + return f'#define VCS_VERSION "{repo_data["git_description"]}"\n' + num_commits_line + commit_id_line if repo_data.get('last_change') and repo_data.get('num_commits'): version_string = f"v{repo_data['version_major']}.{repo_data['version_minor']}.{repo_data['version_patch']}" - vcs_line = f'#define VCSVERSION "{version_string}-Git-{repo_data["num_commits"]}"\n' - return vcs_line + vcs_line = f'#define VCS_VERSION "{version_string}-Git-{repo_data["num_commits"]}"\n' + return vcs_line + num_commits_line + commit_id_line if repo_data.get('commit_id'): - vcs_line = f'#define VCSVERSION "Git commit {repo_data["commit_id"]}"\n' - return vcs_line + vcs_line = f'#define VCS_VERSION "Git commit {repo_data["commit_id"]}"\n' + return vcs_line + num_commits_line + commit_id_line + + vcs_line = '#define VCS_VERSION "Git Rev Unknown from unknown"\n' - vcs_line = '#define VCSVERSION "Git Rev Unknown from unknown"\n' - return vcs_line + return vcs_line + num_commits_line + commit_id_line def print_VCS_REVISION(version_file, repo_data, set_vcs): # Write the version control system's version to $version_file. # Don't change the file if it is not needed. # - # XXX - We might want to add VCSVERSION to CMakeLists.txt so that it can + # XXX - We might want to add VCS_VERSION to CMakeLists.txt so that it can # generate vcs_version.h independently. new_version_h = generate_version_h(repo_data) @@ -418,7 +429,7 @@ def read_repo_info(src_dir, tagged_version_extra, untagged_version_extra): def main(): parser = argparse.ArgumentParser(description='Wireshark file and package versions') action_group = parser.add_mutually_exclusive_group() - action_group.add_argument('--set-version', '-v', metavar='', type=parse_versionstring, help='Set the major, minor, and patch versions in the top-level CMakeLists.txt, docbook/attributes.adoc, packaging/debian/changelog, and the CMakeLists.txt for all libraries to the provided version number') + action_group.add_argument('--set-version', '-v', metavar='', type=parse_versionstring, help='Set the major, minor, and patch versions in the top-level CMakeLists.txt, doc/attributes.adoc, packaging/debian/changelog, and the CMakeLists.txt for all libraries to the provided version number') action_group.add_argument('--set-release', '-r', action='store_true', help='Set the extra release information in the top-level CMakeLists.txt based on either default or command-line specified options.') setrel_group = parser.add_argument_group() setrel_group.add_argument('--tagged-version-extra', '-t', default="", help="Extra version information format to use when a tag is found. No format \ diff --git a/tools/make-wsluarm.py b/tools/make-wsluarm.py new file mode 100755 index 00000000..52330756 --- /dev/null +++ b/tools/make-wsluarm.py @@ -0,0 +1,458 @@ +#!/usr/bin/env python3 +# +# make-wsluarm.py +# +# By Gerald Combs +# Based on make-wsluarm.pl by Luis E. Garcia Onatnon and Hadriel Kaplan +# +# Wireshark - Network traffic analyzer +# By Gerald Combs +# Copyright 1998 Gerald Combs +# +# SPDX-License-Identifier: GPL-2.0-or-later +'''\ +WSLUA's Reference Manual Generator + +This reads Doxygen-style comments in C code and generates wslua API documentation +formatted as AsciiDoc. + +Behavior as documented by Hadriel: +- Allows modules (i.e., WSLUA_MODULE) to have detailed descriptions +- Two (or more) line breaks in comments result in separate paragraphs +- Any indent with a single leading star '*' followed by space is a bulleted list item + reducing indent or having an extra linebreak stops the list +- Any indent with a leading digits-dot followed by space, i.e. "1. ", is a numbered list item + reducing indent or having an extra linebreak stops the list +''' + +import argparse +import logging +import os +import re +import sys + +from enum import Enum +from string import Template + +def parse_desc(description): + '''\ +Break up descriptions based on newlines and keywords. Some processing +is done for code blocks and lists, but the output is otherwise left +intact. Assumes the input has been stripped. +''' + + c_lines = description.strip().splitlines() + + if len(c_lines) < 1: + return '' + + adoc_lines = [] + cli = iter(c_lines) + for line in cli: + raw_len = len(line) + line = line.lstrip() + indent = raw_len - len(line) + + # If we find "[source,...]" then treat it as a block + if re.search(r'\[source.*\]', line): + # The next line *should* be a delimiter... + block_delim = next(cli).strip() + line += f'\n{block_delim}\n' + block_line = next(cli) + # XXX try except StopIteration + while block_line.strip() != block_delim: + # Keep eating lines until the closing delimiter. + # XXX Strip indent spaces? + line += block_line + '\n' + block_line = next(cli) + line += block_delim + '\n' + + adoc_lines.append(line) + elif re.match(r'^\s*$', line): + # line is either empty or just whitespace, and we're not in a @code block + # so it's the end of a previous paragraph, beginning of new one + adoc_lines.append('') + else: + # We have a regular line, not in a @code block. + # Add it as-is. + + # if line starts with "@version" or "@since", make it a "Since:" + if re.match(r'^@(version|since)\s+', line): + line = re.sub(r'^@(version|since)\s+', 'Since: ', line) + adoc_lines.append(line) + + # If line starts with single "*" and space, leave it mostly intact. + elif re.match(r'^\*\s', line): + adoc_lines += ['', line] + # keep eating until we find a blank line or end + line = next(cli) + try: + while not re.match(r'^\s*$', line): + raw_len = len(line) + line = line.lstrip() + # if this is less indented than before, break out + if raw_len - len(line) < indent: + break + adoc_lines += [line] + line = next(cli) + except StopIteration: + pass + adoc_lines.append('') + + # if line starts with "1." and space, leave it mostly intact. + elif re.match(r'^1\.\s', line): + adoc_lines += ['', line] + # keep eating until we find a blank line or end + line = next(cli) + try: + while not re.match(r'^\s*$', line): + raw_len = len(line) + line = line.lstrip() + # if this is less indented than before, break out + if raw_len - len(line) < indent: + break + adoc_lines += [line] + line = next(cli) + except StopIteration: + pass + adoc_lines.append('') + + # Just a normal line, add it to array + else: + # Nested Lua arrays + line = re.sub(r'\[\[(.*)\]\]', r'$$\1$$', line) + adoc_lines += [line] + + # Strip out consecutive empty lines. + # This isn't strictly necessary but makes the AsciiDoc output prettier. + adoc_lines = '\n'.join(adoc_lines).splitlines() + adoc_lines = [val for idx, val in enumerate(adoc_lines) if idx == 0 or not (val == '' and val == adoc_lines[idx - 1])] + + return '\n'.join(adoc_lines) + + +class LuaFunction: + def __init__(self, c_file, id, start, name, raw_description): + self.c_file = c_file + self.id = id + self.start = start + self.name = name + if not raw_description: + raw_description = '' + self.description = parse_desc(raw_description) + self.arguments = [] # (name, description, optional) + self.returns = [] # description + self.errors = [] # description + logging.info(f'Created function {id} ({name}) at {start}') + + def add_argument(self, id, raw_name, raw_description, raw_optional): + if id != self.id: + logging.critical(f'Invalid argument ID {id} in function {self.id}') + sys.exit(1) + if not raw_description: + raw_description = '' + optional = False + if raw_optional == 'OPT': + optional = True + self.arguments.append((raw_name.lower(), parse_desc(raw_description), optional)) + + def extract_buf(self, buf): + "Extract arguments, errors, and return values from a function's buffer." + + # Splits "WSLUA_OPTARG_ProtoField_int8_NAME /* food */" into + # "OPT" (1), "ProtoField_int8" (2), "NAME" (3), ..., ..., " food " (6) + # Handles functions like "loadfile(filename)" too. + for m in re.finditer(r'#define WSLUA_(OPT)?ARG_((?:[A-Za-z0-9]+_)?[a-z0-9_]+)_([A-Z0-9_]+)\s+\d+' + TRAILING_COMMENT_RE, buf, re.MULTILINE|re.DOTALL): + self.add_argument(m.group(2), m.group(3), m.group(6), m.group(1)) + logging.info(f'Created arg {m.group(3)} for {self.id} at {m.start()}') + + # Same as above, except that there is no macro but a (multi-line) comment. + for m in re.finditer(r'/\*\s*WSLUA_(OPT)?ARG_((?:[A-Za-z0-9]+_)?[a-z0-9_]+)_([A-Z0-9_]+)\s*(.*?)\*/', buf, re.MULTILINE|re.DOTALL): + self.add_argument(m.group(2), m.group(3), m.group(4), m.group(1)) + logging.info(f'Created arg {m.group(3)} for {self.id} at {m.start()}') + + for m in re.finditer(r'/\*\s+WSLUA_MOREARGS\s+([A-Za-z_]+)\s+(.*?)\*/', buf, re.MULTILINE|re.DOTALL): + self.add_argument(m.group(1), '...', m.group(2), False) + logging.info(f'Created morearg for {self.id}') + + for m in re.finditer(r'WSLUA_(FINAL_)?RETURN\(\s*.*?\s*\)\s*;' + TRAILING_COMMENT_RE, buf, re.MULTILINE|re.DOTALL): + if m.group(4) and len(m.group(4)) > 0: + self.returns.append(m.group(4).strip()) + logging.info(f'Created return for {self.id} at {m.start()}') + + for m in re.finditer(r'/\*\s*_WSLUA_RETURNS_\s*(.*?)\*/', buf, re.MULTILINE|re.DOTALL): + if m.group(1) and len(m.group(1)) > 0: + self.returns.append(m.group(1).strip()) + logging.info(f'Created return for {self.id} at {m.start()}') + + for m in re.finditer(r'WSLUA_ERROR\s*\(\s*(([A-Z][A-Za-z]+)_)?([a-z_]+),' + QUOTED_RE, buf, re.MULTILINE|re.DOTALL): + self.errors.append(m.group(4).strip()) + logging.info(f'Created error {m.group(4)[:10]} for {self.id} at {m.start()}') + + def to_adoc(self): + # The Perl script wrapped optional args in '[]', joined them with ', ', and + # converted non-alphabetic characters to underscores. + mangled_names = [f'_{a}_' if optional else a for a, _, optional in self.arguments] + section_name = re.sub('[^A-Za-z0-9]', '_', f'{self.name}_{"__".join(mangled_names)}_') + opt_names = [f'[{a}]' if optional else a for a, _, optional in self.arguments] + adoc_buf = f''' +// {self.c_file} +[#lua_fn_{section_name}] +===== {self.name}({', '.join(opt_names)}) + +{self.description} +''' + if len(self.arguments) > 0: + adoc_buf += ''' +[float] +===== Arguments +''' + for (name, description, optional) in self.arguments: + if optional: + name += ' (optional)' + adoc_buf += f'\n{name}::\n' + + if len(description) > 0: + adoc_buf += f'\n{description}\n' + + adoc_buf += f'\n// function_arg_footer: {name}' + + if len(self.arguments) > 0: + adoc_buf += '\n// end of function_args\n' + + if len(self.returns) > 0: + adoc_buf += ''' +[float] +===== Returns +''' + for description in self.returns: + adoc_buf += f'\n{description}\n' + + if len(self.returns) > 0: + adoc_buf += f'\n// function_returns_footer: {self.name}' + + if len(self.errors) > 0: + adoc_buf += ''' +[float] +===== Errors +''' + for description in self.errors: + adoc_buf += f'\n* {description}\n' + + if len(self.errors) > 0: + adoc_buf += f'\n// function_errors_footer: {self.name}' + + adoc_buf += f'\n// function_footer: {section_name}\n' + + return adoc_buf + + +# group 1: whole trailing comment (possibly empty), e.g. " /* foo */" +# group 2: any leading whitespace. XXX why is this not removed using (?:...) +# group 3: actual comment text, e.g. " foo ". +TRAILING_COMMENT_RE = r'((\s*|[\n\r]*)/\*(.*?)\*/)?' +IN_COMMENT_RE = r'[\s\r\n]*((.*?)\s*\*/)?' +QUOTED_RE = r'"([^"]*)"' + +# XXX We might want to create a "LuaClass" class similar to LuaFunction +# and move these there. +def extract_class_definitions(c_file, c_buf, module, classes, functions): + for m in re.finditer(r'WSLUA_CLASS_DEFINE(?:_BASE)?\(\s*([A-Z][a-zA-Z0-9]+).*?\);' + TRAILING_COMMENT_RE, c_buf, re.MULTILINE|re.DOTALL): + raw_desc = m.group(4) + if raw_desc is None: + raw_desc = '' + name = m.group(1) + mod_class = { + 'description': parse_desc(raw_desc), + 'constructors': [], + 'methods': [], + 'attributes': [], + } + classes[name] = mod_class + logging.info(f'Created class {name}') + return 0 + +def extract_function_definitions(c_file, c_buf, module, classes, functions): + for m in re.finditer(r'WSLUA_FUNCTION\s+wslua_([a-z_0-9]+)[^\{]*\{' + TRAILING_COMMENT_RE, c_buf, re.MULTILINE|re.DOTALL): + id = m.group(1) + functions[id] = LuaFunction(c_file, id, m.start(), id, m.group(4)) + +def extract_constructor_definitions(c_file, c_buf, module, classes, functions): + for m in re.finditer(r'WSLUA_CONSTRUCTOR\s+([A-Za-z0-9]+)_([a-z0-9_]+).*?\{' + TRAILING_COMMENT_RE, c_buf, re.MULTILINE|re.DOTALL): + class_name = m.group(1) + id = f'{class_name}_{m.group(2)}' + name = f'{class_name}.{m.group(2)}' + functions[id] = LuaFunction(c_file, id, m.start(), name, m.group(5)) + classes[class_name]['constructors'].append(id) + +def extract_constructor_markups(c_file, c_buf, module, classes, functions): + for m in re.finditer(r'_WSLUA_CONSTRUCTOR_\s+([A-Za-z0-9]+)_([a-z0-9_]+)\s*(.*?)\*/', c_buf, re.MULTILINE|re.DOTALL): + class_name = m.group(1) + id = f'{class_name}_{m.group(2)}' + name = f'{class_name}.{m.group(2)}' + functions[id] = LuaFunction(c_file, id, m.start(), name, m.group(3)) + classes[class_name]['constructors'].append(id) + +def extract_method_definitions(c_file, c_buf, module, classes, functions): + for m in re.finditer(r'WSLUA_METHOD\s+([A-Za-z0-9]+)_([a-z0-9_]+)[^\{]*\{' + TRAILING_COMMENT_RE, c_buf, re.MULTILINE|re.DOTALL): + class_name = m.group(1) + id = f'{class_name}_{m.group(2)}' + name = f'{class_name.lower()}:{m.group(2)}' + functions[id] = LuaFunction(c_file, id, m.start(), name, m.group(5)) + classes[class_name]['methods'].append(id) + +def extract_metamethod_definitions(c_file, c_buf, module, classes, functions): + for m in re.finditer(r'WSLUA_METAMETHOD\s+([A-Za-z0-9]+)(__[a-z0-9]+)[^\{]*\{' + TRAILING_COMMENT_RE, c_buf, re.MULTILINE|re.DOTALL): + class_name = m.group(1) + id = f'{class_name}{m.group(2)}' + name = f'{class_name.lower()}:{m.group(2)}' + functions[id] = LuaFunction(c_file, id, m.start(), name, m.group(5)) + classes[class_name]['methods'].append(id) + +def extract_attribute_markups(c_file, c_buf, module, classes, functions): + for m in re.finditer(r'/\*\s+WSLUA_ATTRIBUTE\s+([A-Za-z0-9]+)_([a-z0-9_]+)\s+([A-Z]*)\s*(.*?)\*/', c_buf, re.MULTILINE|re.DOTALL): + class_name = m.group(1) + name = f'{m.group(1).lower()}.{m.group(2)}' + mode = m.group(3) + mode_desc = 'Mode: ' + if 'RO' in mode: + mode_desc += 'Retrieve only.\n' + elif 'WO' in mode: + mode_desc += 'Assign only.\n' + elif 'RW' in mode or 'WR' in mode: + mode_desc += 'Retrieve or assign.\n' + else: + sys.stderr.write(f'Attribute does not have a RO/WO/RW mode {mode}\n') + sys.exit(1) + + attribute = { + 'name': name, + 'description': parse_desc(f'{mode_desc}\n{m.group(4)}'), + } + classes[class_name]['attributes'].append(attribute) + logging.info(f'Created attribute {name} for class {class_name}') + +def main(): + parser = argparse.ArgumentParser(description="WSLUA's Reference Manual Generator") + parser.add_argument("c_files", nargs='+', metavar='C file', help="C file") + parser.add_argument('--output-directory', help='Output directory') + parser.add_argument('--verbose', action='store_true', help='Show more output') + args = parser.parse_args() + + logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.DEBUG if args.verbose else logging.WARNING) + + modules = {} + + for c_file in args.c_files: + with open(c_file, encoding='utf-8') as c_f: + c_buf = c_f.read() + + # Peek for modules vs continuations. + m = re.search(r'WSLUA_(|CONTINUE_)MODULE\s*(\w+)', c_buf) + if m: + module_name = m.group(2) + c_pair = (os.path.basename(c_file), c_buf) + try: + if m.group(1) == 'CONTINUE_': + modules[module_name]['c'].append(c_pair) + else: + modules[module_name]['c'].insert(0, c_pair) + except KeyError: + modules[module_name] = {} + modules[module_name]['c'] = [c_pair] + modules[module_name]['file_base'] = os.path.splitext(c_pair[0])[0] + else: + logging.warning(f'No module found in {c_file}') + + extractors = [ + extract_class_definitions, + extract_function_definitions, + extract_constructor_definitions, + extract_constructor_markups, + extract_method_definitions, + extract_metamethod_definitions, + extract_attribute_markups, + ] + + for module_name in sorted(modules): + adoc_file = f'{modules[module_name]["file_base"]}.adoc' + logging.info(f'Writing module {module_name} to {adoc_file} from {len(modules[module_name]["c"])} input(s)') + functions = {} + classes = {} + + # Extract our module's description. + m = re.search(r'WSLUA_MODULE\s*[A-Z][a-zA-Z0-9]+' + IN_COMMENT_RE, modules[module_name]['c'][0][1], re.MULTILINE|re.DOTALL) + if not m: + return + modules[module_name]['description'] = parse_desc(f'{m.group(2)}') + + # Extract module-level information from each file. + for (c_file, c_buf) in modules[module_name]['c']: + for extractor in extractors: + extractor(c_file, c_buf, modules[module_name], classes, functions) + + # Extract function-level information from each file. + for (c_file, c_buf) in modules[module_name]['c']: + c_file_ids = filter(lambda k: functions[k].c_file == c_file, functions.keys()) + func_ids = sorted(c_file_ids, key=lambda k: functions[k].start) + id = func_ids.pop(0) + for next_id in func_ids: + functions[id].extract_buf(c_buf[functions[id].start:functions[next_id].start]) + id = next_id + functions[id].extract_buf(c_buf[functions[id].start:]) + + with open(os.path.join(args.output_directory, adoc_file), 'w', encoding='utf-8') as adoc_f: + adoc_f.write(f'''\ +// {c_file} +[#lua_module_{module_name}] +=== {modules[module_name]["description"]} +''') + for class_name in sorted(classes.keys()): + lua_class = classes[class_name] + adoc_f.write(f''' +// {c_file} +[#lua_class_{class_name}] +==== {class_name} +''') + + if not lua_class["description"] == '': + adoc_f.write(f'\n{lua_class["description"]}\n') + + for constructor_id in sorted(lua_class['constructors'], key=lambda id: functions[id].start): + adoc_f.write(functions[constructor_id].to_adoc()) + del functions[constructor_id] + + for method_id in sorted(lua_class['methods'], key=lambda id: functions[id].start): + adoc_f.write(functions[method_id].to_adoc()) + del functions[method_id] + + for attribute in lua_class['attributes']: + attribute_id = re.sub('[^A-Za-z0-9]', '_', f'{attribute["name"]}') + adoc_f.write(f''' +[#lua_class_attrib_{attribute_id}] +===== {attribute["name"]} + +{attribute["description"]} + +// End {attribute["name"]} +''') + + + adoc_f.write(f'\n// class_footer: {class_name}\n') + + if len(functions.keys()) > 0: + adoc_f.write(f'''\ +[#global_functions_{module_name}] +==== Global Functions +''') + + for global_id in sorted(functions.keys(), key=lambda id: functions[id].start): + adoc_f.write(functions[global_id].to_adoc()) + + if len(functions.keys()) > 0: + adoc_f.write(f'// Global function\n') + + adoc_f.write('// end of module\n') + +if __name__ == '__main__': + main() diff --git a/tools/ncp2222.py b/tools/ncp2222.py index f14d0c5c..acc0af4e 100755 --- a/tools/ncp2222.py +++ b/tools/ncp2222.py @@ -3,7 +3,7 @@ """ Creates C code from a table of NCP type 0x2222 packet types. (And 0x3333, which are the replies, but the packets are more commonly -refered to as type 0x2222; the 0x3333 replies are understood to be +referred to as type 0x2222; the 0x3333 replies are understood to be part of the 0x2222 "family") The data-munging code was written by Gilbert Ramirez. @@ -324,7 +324,7 @@ class PTVCBitfield(PTVC): def Code(self): ett_name = self.ETTName() - x = "static int %s = -1;\n" % (ett_name,) + x = "static int %s;\n" % (ett_name,) x = x + "static const ptvc_record ptvc_%s[] = {\n" % (self.Name()) for ptvc_rec in self.list: @@ -884,7 +884,7 @@ class struct(PTVC, Type): def Code(self): ett_name = self.ETTName() - x = "static int %s = -1;\n" % (ett_name,) + x = "static int %s;\n" % (ett_name,) x = x + "static const ptvc_record ptvc_%s[] = {\n" % (self.name,) for ptvc_rec in self.list: x = x + " %s,\n" % (ptvc_rec.Code()) @@ -5850,7 +5850,7 @@ def define_errors(): errors[0xff0d] = "Object associated with ObjectID is not a manager" errors[0xff0e] = "Invalid initial semaphore value" errors[0xff0f] = "The semaphore handle is not valid" - errors[0xff10] = "SemaphoreHandle is not associated with a valid sempahore" + errors[0xff10] = "SemaphoreHandle is not associated with a valid semaphore" errors[0xff11] = "Invalid semaphore handle" errors[0xff12] = "Transaction tracking is not available" errors[0xff13] = "The transaction has not yet been written to disk" @@ -5905,7 +5905,6 @@ def produce_code(): #include "config.h" #include -#include #include #include #include @@ -5917,6 +5916,7 @@ def produce_code(): #include #include #include +#include #include "packet-ncp-int.h" #include "packet-ncp-nmas.h" #include "packet-ncp-sss.h" @@ -5957,565 +5957,565 @@ static int ptvc_struct_int_storage; #define NREV 0x00000004 #define NFLAGS 0x00000008 -static int hf_ncp_number_of_data_streams_long = -1; -static int hf_ncp_func = -1; -static int hf_ncp_length = -1; -static int hf_ncp_subfunc = -1; -static int hf_ncp_group = -1; -static int hf_ncp_fragment_handle = -1; -static int hf_ncp_completion_code = -1; -static int hf_ncp_connection_status = -1; -static int hf_ncp_req_frame_num = -1; -static int hf_ncp_req_frame_time = -1; -static int hf_ncp_fragment_size = -1; -static int hf_ncp_message_size = -1; -static int hf_ncp_nds_flag = -1; -static int hf_ncp_nds_verb = -1; -static int hf_ping_version = -1; -/* static int hf_nds_version = -1; */ -/* static int hf_nds_flags = -1; */ -static int hf_nds_reply_depth = -1; -static int hf_nds_reply_rev = -1; -static int hf_nds_reply_flags = -1; -static int hf_nds_p1type = -1; -static int hf_nds_uint32value = -1; -static int hf_nds_bit1 = -1; -static int hf_nds_bit2 = -1; -static int hf_nds_bit3 = -1; -static int hf_nds_bit4 = -1; -static int hf_nds_bit5 = -1; -static int hf_nds_bit6 = -1; -static int hf_nds_bit7 = -1; -static int hf_nds_bit8 = -1; -static int hf_nds_bit9 = -1; -static int hf_nds_bit10 = -1; -static int hf_nds_bit11 = -1; -static int hf_nds_bit12 = -1; -static int hf_nds_bit13 = -1; -static int hf_nds_bit14 = -1; -static int hf_nds_bit15 = -1; -static int hf_nds_bit16 = -1; -static int hf_outflags = -1; -static int hf_bit1outflags = -1; -static int hf_bit2outflags = -1; -static int hf_bit3outflags = -1; -static int hf_bit4outflags = -1; -static int hf_bit5outflags = -1; -static int hf_bit6outflags = -1; -static int hf_bit7outflags = -1; -static int hf_bit8outflags = -1; -static int hf_bit9outflags = -1; -static int hf_bit10outflags = -1; -static int hf_bit11outflags = -1; -static int hf_bit12outflags = -1; -static int hf_bit13outflags = -1; -static int hf_bit14outflags = -1; -static int hf_bit15outflags = -1; -static int hf_bit16outflags = -1; -static int hf_bit1nflags = -1; -static int hf_bit2nflags = -1; -static int hf_bit3nflags = -1; -static int hf_bit4nflags = -1; -static int hf_bit5nflags = -1; -static int hf_bit6nflags = -1; -static int hf_bit7nflags = -1; -static int hf_bit8nflags = -1; -static int hf_bit9nflags = -1; -static int hf_bit10nflags = -1; -static int hf_bit11nflags = -1; -static int hf_bit12nflags = -1; -static int hf_bit13nflags = -1; -static int hf_bit14nflags = -1; -static int hf_bit15nflags = -1; -static int hf_bit16nflags = -1; -static int hf_bit1rflags = -1; -static int hf_bit2rflags = -1; -static int hf_bit3rflags = -1; -static int hf_bit4rflags = -1; -static int hf_bit5rflags = -1; -static int hf_bit6rflags = -1; -static int hf_bit7rflags = -1; -static int hf_bit8rflags = -1; -static int hf_bit9rflags = -1; -static int hf_bit10rflags = -1; -static int hf_bit11rflags = -1; -static int hf_bit12rflags = -1; -static int hf_bit13rflags = -1; -static int hf_bit14rflags = -1; -static int hf_bit15rflags = -1; -static int hf_bit16rflags = -1; -static int hf_cflags = -1; -static int hf_bit1cflags = -1; -static int hf_bit2cflags = -1; -static int hf_bit3cflags = -1; -static int hf_bit4cflags = -1; -static int hf_bit5cflags = -1; -static int hf_bit6cflags = -1; -static int hf_bit7cflags = -1; -static int hf_bit8cflags = -1; -static int hf_bit9cflags = -1; -static int hf_bit10cflags = -1; -static int hf_bit11cflags = -1; -static int hf_bit12cflags = -1; -static int hf_bit13cflags = -1; -static int hf_bit14cflags = -1; -static int hf_bit15cflags = -1; -static int hf_bit16cflags = -1; -static int hf_bit1acflags = -1; -static int hf_bit2acflags = -1; -static int hf_bit3acflags = -1; -static int hf_bit4acflags = -1; -static int hf_bit5acflags = -1; -static int hf_bit6acflags = -1; -static int hf_bit7acflags = -1; -static int hf_bit8acflags = -1; -static int hf_bit9acflags = -1; -static int hf_bit10acflags = -1; -static int hf_bit11acflags = -1; -static int hf_bit12acflags = -1; -static int hf_bit13acflags = -1; -static int hf_bit14acflags = -1; -static int hf_bit15acflags = -1; -static int hf_bit16acflags = -1; -static int hf_vflags = -1; -static int hf_bit1vflags = -1; -static int hf_bit2vflags = -1; -static int hf_bit3vflags = -1; -static int hf_bit4vflags = -1; -static int hf_bit5vflags = -1; -static int hf_bit6vflags = -1; -static int hf_bit7vflags = -1; -static int hf_bit8vflags = -1; -static int hf_bit9vflags = -1; -static int hf_bit10vflags = -1; -static int hf_bit11vflags = -1; -static int hf_bit12vflags = -1; -static int hf_bit13vflags = -1; -static int hf_bit14vflags = -1; -static int hf_bit15vflags = -1; -static int hf_bit16vflags = -1; -static int hf_eflags = -1; -static int hf_bit1eflags = -1; -static int hf_bit2eflags = -1; -static int hf_bit3eflags = -1; -static int hf_bit4eflags = -1; -static int hf_bit5eflags = -1; -static int hf_bit6eflags = -1; -static int hf_bit7eflags = -1; -static int hf_bit8eflags = -1; -static int hf_bit9eflags = -1; -static int hf_bit10eflags = -1; -static int hf_bit11eflags = -1; -static int hf_bit12eflags = -1; -static int hf_bit13eflags = -1; -static int hf_bit14eflags = -1; -static int hf_bit15eflags = -1; -static int hf_bit16eflags = -1; -static int hf_infoflagsl = -1; -static int hf_retinfoflagsl = -1; -static int hf_bit1infoflagsl = -1; -static int hf_bit2infoflagsl = -1; -static int hf_bit3infoflagsl = -1; -static int hf_bit4infoflagsl = -1; -static int hf_bit5infoflagsl = -1; -static int hf_bit6infoflagsl = -1; -static int hf_bit7infoflagsl = -1; -static int hf_bit8infoflagsl = -1; -static int hf_bit9infoflagsl = -1; -static int hf_bit10infoflagsl = -1; -static int hf_bit11infoflagsl = -1; -static int hf_bit12infoflagsl = -1; -static int hf_bit13infoflagsl = -1; -static int hf_bit14infoflagsl = -1; -static int hf_bit15infoflagsl = -1; -static int hf_bit16infoflagsl = -1; -static int hf_infoflagsh = -1; -static int hf_bit1infoflagsh = -1; -static int hf_bit2infoflagsh = -1; -static int hf_bit3infoflagsh = -1; -static int hf_bit4infoflagsh = -1; -static int hf_bit5infoflagsh = -1; -static int hf_bit6infoflagsh = -1; -static int hf_bit7infoflagsh = -1; -static int hf_bit8infoflagsh = -1; -static int hf_bit9infoflagsh = -1; -static int hf_bit10infoflagsh = -1; -static int hf_bit11infoflagsh = -1; -static int hf_bit12infoflagsh = -1; -static int hf_bit13infoflagsh = -1; -static int hf_bit14infoflagsh = -1; -static int hf_bit15infoflagsh = -1; -static int hf_bit16infoflagsh = -1; -static int hf_retinfoflagsh = -1; -static int hf_bit1retinfoflagsh = -1; -static int hf_bit2retinfoflagsh = -1; -static int hf_bit3retinfoflagsh = -1; -static int hf_bit4retinfoflagsh = -1; -static int hf_bit5retinfoflagsh = -1; -static int hf_bit6retinfoflagsh = -1; -static int hf_bit7retinfoflagsh = -1; -static int hf_bit8retinfoflagsh = -1; -static int hf_bit9retinfoflagsh = -1; -static int hf_bit10retinfoflagsh = -1; -static int hf_bit11retinfoflagsh = -1; -static int hf_bit12retinfoflagsh = -1; -static int hf_bit13retinfoflagsh = -1; -static int hf_bit14retinfoflagsh = -1; -static int hf_bit15retinfoflagsh = -1; -static int hf_bit16retinfoflagsh = -1; -static int hf_bit1lflags = -1; -static int hf_bit2lflags = -1; -static int hf_bit3lflags = -1; -static int hf_bit4lflags = -1; -static int hf_bit5lflags = -1; -static int hf_bit6lflags = -1; -static int hf_bit7lflags = -1; -static int hf_bit8lflags = -1; -static int hf_bit9lflags = -1; -static int hf_bit10lflags = -1; -static int hf_bit11lflags = -1; -static int hf_bit12lflags = -1; -static int hf_bit13lflags = -1; -static int hf_bit14lflags = -1; -static int hf_bit15lflags = -1; -static int hf_bit16lflags = -1; -static int hf_l1flagsl = -1; -static int hf_l1flagsh = -1; -static int hf_bit1l1flagsl = -1; -static int hf_bit2l1flagsl = -1; -static int hf_bit3l1flagsl = -1; -static int hf_bit4l1flagsl = -1; -static int hf_bit5l1flagsl = -1; -static int hf_bit6l1flagsl = -1; -static int hf_bit7l1flagsl = -1; -static int hf_bit8l1flagsl = -1; -static int hf_bit9l1flagsl = -1; -static int hf_bit10l1flagsl = -1; -static int hf_bit11l1flagsl = -1; -static int hf_bit12l1flagsl = -1; -static int hf_bit13l1flagsl = -1; -static int hf_bit14l1flagsl = -1; -static int hf_bit15l1flagsl = -1; -static int hf_bit16l1flagsl = -1; -static int hf_bit1l1flagsh = -1; -static int hf_bit2l1flagsh = -1; -static int hf_bit3l1flagsh = -1; -static int hf_bit4l1flagsh = -1; -static int hf_bit5l1flagsh = -1; -static int hf_bit6l1flagsh = -1; -static int hf_bit7l1flagsh = -1; -static int hf_bit8l1flagsh = -1; -static int hf_bit9l1flagsh = -1; -static int hf_bit10l1flagsh = -1; -static int hf_bit11l1flagsh = -1; -static int hf_bit12l1flagsh = -1; -static int hf_bit13l1flagsh = -1; -static int hf_bit14l1flagsh = -1; -static int hf_bit15l1flagsh = -1; -static int hf_bit16l1flagsh = -1; -static int hf_nds_tree_name = -1; -static int hf_nds_reply_error = -1; -static int hf_nds_net = -1; -static int hf_nds_node = -1; -static int hf_nds_socket = -1; -static int hf_add_ref_ip = -1; -static int hf_add_ref_udp = -1; -static int hf_add_ref_tcp = -1; -static int hf_referral_record = -1; -static int hf_referral_addcount = -1; -static int hf_nds_port = -1; -static int hf_mv_string = -1; -static int hf_nds_syntax = -1; -static int hf_value_string = -1; -static int hf_nds_buffer_size = -1; -static int hf_nds_ver = -1; -static int hf_nds_nflags = -1; -static int hf_nds_scope = -1; -static int hf_nds_name = -1; -static int hf_nds_comm_trans = -1; -static int hf_nds_tree_trans = -1; -static int hf_nds_iteration = -1; -static int hf_nds_eid = -1; -static int hf_nds_info_type = -1; -static int hf_nds_all_attr = -1; -static int hf_nds_req_flags = -1; -static int hf_nds_attr = -1; -static int hf_nds_crc = -1; -static int hf_nds_referrals = -1; -static int hf_nds_result_flags = -1; -static int hf_nds_tag_string = -1; -static int hf_value_bytes = -1; -static int hf_replica_type = -1; -static int hf_replica_state = -1; -static int hf_replica_number = -1; -static int hf_min_nds_ver = -1; -static int hf_nds_ver_include = -1; -static int hf_nds_ver_exclude = -1; -/* static int hf_nds_es = -1; */ -static int hf_es_type = -1; -/* static int hf_delim_string = -1; */ -static int hf_rdn_string = -1; -static int hf_nds_revent = -1; -static int hf_nds_rnum = -1; -static int hf_nds_name_type = -1; -static int hf_nds_rflags = -1; -static int hf_nds_eflags = -1; -static int hf_nds_depth = -1; -static int hf_nds_class_def_type = -1; -static int hf_nds_classes = -1; -static int hf_nds_return_all_classes = -1; -static int hf_nds_stream_flags = -1; -static int hf_nds_stream_name = -1; -static int hf_nds_file_handle = -1; -static int hf_nds_file_size = -1; -static int hf_nds_dn_output_type = -1; -static int hf_nds_nested_output_type = -1; -static int hf_nds_output_delimiter = -1; -static int hf_nds_output_entry_specifier = -1; -static int hf_es_value = -1; -static int hf_es_rdn_count = -1; -static int hf_nds_replica_num = -1; -static int hf_nds_event_num = -1; -static int hf_es_seconds = -1; -static int hf_nds_compare_results = -1; -static int hf_nds_parent = -1; -static int hf_nds_name_filter = -1; -static int hf_nds_class_filter = -1; -static int hf_nds_time_filter = -1; -static int hf_nds_partition_root_id = -1; -static int hf_nds_replicas = -1; -static int hf_nds_purge = -1; -static int hf_nds_local_partition = -1; -static int hf_partition_busy = -1; -static int hf_nds_number_of_changes = -1; -static int hf_sub_count = -1; -static int hf_nds_revision = -1; -static int hf_nds_base_class = -1; -static int hf_nds_relative_dn = -1; -/* static int hf_nds_root_dn = -1; */ -/* static int hf_nds_parent_dn = -1; */ -static int hf_deref_base = -1; -/* static int hf_nds_entry_info = -1; */ -static int hf_nds_base = -1; -static int hf_nds_privileges = -1; -static int hf_nds_vflags = -1; -static int hf_nds_value_len = -1; -static int hf_nds_cflags = -1; -static int hf_nds_acflags = -1; -static int hf_nds_asn1 = -1; -static int hf_nds_upper = -1; -static int hf_nds_lower = -1; -static int hf_nds_trustee_dn = -1; -static int hf_nds_attribute_dn = -1; -static int hf_nds_acl_add = -1; -static int hf_nds_acl_del = -1; -static int hf_nds_att_add = -1; -static int hf_nds_att_del = -1; -static int hf_nds_keep = -1; -static int hf_nds_new_rdn = -1; -static int hf_nds_time_delay = -1; -static int hf_nds_root_name = -1; -static int hf_nds_new_part_id = -1; -static int hf_nds_child_part_id = -1; -static int hf_nds_master_part_id = -1; -static int hf_nds_target_name = -1; -static int hf_nds_super = -1; -static int hf_pingflags2 = -1; -static int hf_bit1pingflags2 = -1; -static int hf_bit2pingflags2 = -1; -static int hf_bit3pingflags2 = -1; -static int hf_bit4pingflags2 = -1; -static int hf_bit5pingflags2 = -1; -static int hf_bit6pingflags2 = -1; -static int hf_bit7pingflags2 = -1; -static int hf_bit8pingflags2 = -1; -static int hf_bit9pingflags2 = -1; -static int hf_bit10pingflags2 = -1; -static int hf_bit11pingflags2 = -1; -static int hf_bit12pingflags2 = -1; -static int hf_bit13pingflags2 = -1; -static int hf_bit14pingflags2 = -1; -static int hf_bit15pingflags2 = -1; -static int hf_bit16pingflags2 = -1; -static int hf_pingflags1 = -1; -static int hf_bit1pingflags1 = -1; -static int hf_bit2pingflags1 = -1; -static int hf_bit3pingflags1 = -1; -static int hf_bit4pingflags1 = -1; -static int hf_bit5pingflags1 = -1; -static int hf_bit6pingflags1 = -1; -static int hf_bit7pingflags1 = -1; -static int hf_bit8pingflags1 = -1; -static int hf_bit9pingflags1 = -1; -static int hf_bit10pingflags1 = -1; -static int hf_bit11pingflags1 = -1; -static int hf_bit12pingflags1 = -1; -static int hf_bit13pingflags1 = -1; -static int hf_bit14pingflags1 = -1; -static int hf_bit15pingflags1 = -1; -static int hf_bit16pingflags1 = -1; -static int hf_pingpflags1 = -1; -static int hf_bit1pingpflags1 = -1; -static int hf_bit2pingpflags1 = -1; -static int hf_bit3pingpflags1 = -1; -static int hf_bit4pingpflags1 = -1; -static int hf_bit5pingpflags1 = -1; -static int hf_bit6pingpflags1 = -1; -static int hf_bit7pingpflags1 = -1; -static int hf_bit8pingpflags1 = -1; -static int hf_bit9pingpflags1 = -1; -static int hf_bit10pingpflags1 = -1; -static int hf_bit11pingpflags1 = -1; -static int hf_bit12pingpflags1 = -1; -static int hf_bit13pingpflags1 = -1; -static int hf_bit14pingpflags1 = -1; -static int hf_bit15pingpflags1 = -1; -static int hf_bit16pingpflags1 = -1; -static int hf_pingvflags1 = -1; -static int hf_bit1pingvflags1 = -1; -static int hf_bit2pingvflags1 = -1; -static int hf_bit3pingvflags1 = -1; -static int hf_bit4pingvflags1 = -1; -static int hf_bit5pingvflags1 = -1; -static int hf_bit6pingvflags1 = -1; -static int hf_bit7pingvflags1 = -1; -static int hf_bit8pingvflags1 = -1; -static int hf_bit9pingvflags1 = -1; -static int hf_bit10pingvflags1 = -1; -static int hf_bit11pingvflags1 = -1; -static int hf_bit12pingvflags1 = -1; -static int hf_bit13pingvflags1 = -1; -static int hf_bit14pingvflags1 = -1; -static int hf_bit15pingvflags1 = -1; -static int hf_bit16pingvflags1 = -1; -static int hf_nds_letter_ver = -1; -static int hf_nds_os_majver = -1; -static int hf_nds_os_minver = -1; -static int hf_nds_lic_flags = -1; -static int hf_nds_ds_time = -1; -static int hf_nds_ping_version = -1; -static int hf_nds_search_scope = -1; -static int hf_nds_num_objects = -1; -static int hf_siflags = -1; -static int hf_bit1siflags = -1; -static int hf_bit2siflags = -1; -static int hf_bit3siflags = -1; -static int hf_bit4siflags = -1; -static int hf_bit5siflags = -1; -static int hf_bit6siflags = -1; -static int hf_bit7siflags = -1; -static int hf_bit8siflags = -1; -static int hf_bit9siflags = -1; -static int hf_bit10siflags = -1; -static int hf_bit11siflags = -1; -static int hf_bit12siflags = -1; -static int hf_bit13siflags = -1; -static int hf_bit14siflags = -1; -static int hf_bit15siflags = -1; -static int hf_bit16siflags = -1; -static int hf_nds_segments = -1; -static int hf_nds_segment = -1; -static int hf_nds_segment_overlap = -1; -static int hf_nds_segment_overlap_conflict = -1; -static int hf_nds_segment_multiple_tails = -1; -static int hf_nds_segment_too_long_segment = -1; -static int hf_nds_segment_error = -1; -static int hf_nds_segment_count = -1; -static int hf_nds_reassembled_length = -1; -static int hf_nds_verb2b_req_flags = -1; -static int hf_ncp_ip_address = -1; -static int hf_ncp_copyright = -1; -static int hf_ndsprot1flag = -1; -static int hf_ndsprot2flag = -1; -static int hf_ndsprot3flag = -1; -static int hf_ndsprot4flag = -1; -static int hf_ndsprot5flag = -1; -static int hf_ndsprot6flag = -1; -static int hf_ndsprot7flag = -1; -static int hf_ndsprot8flag = -1; -static int hf_ndsprot9flag = -1; -static int hf_ndsprot10flag = -1; -static int hf_ndsprot11flag = -1; -static int hf_ndsprot12flag = -1; -static int hf_ndsprot13flag = -1; -static int hf_ndsprot14flag = -1; -static int hf_ndsprot15flag = -1; -static int hf_ndsprot16flag = -1; -static int hf_nds_svr_dst_name = -1; -static int hf_nds_tune_mark = -1; -/* static int hf_nds_create_time = -1; */ -static int hf_srvr_param_number = -1; -static int hf_srvr_param_boolean = -1; -static int hf_srvr_param_string = -1; -static int hf_nds_svr_time = -1; -static int hf_nds_crt_time = -1; -static int hf_nds_number_of_items = -1; -static int hf_nds_compare_attributes = -1; -static int hf_nds_read_attribute = -1; -static int hf_nds_write_add_delete_attribute = -1; -static int hf_nds_add_delete_self = -1; -static int hf_nds_privilege_not_defined = -1; -static int hf_nds_supervisor = -1; -static int hf_nds_inheritance_control = -1; -static int hf_nds_browse_entry = -1; -static int hf_nds_add_entry = -1; -static int hf_nds_delete_entry = -1; -static int hf_nds_rename_entry = -1; -static int hf_nds_supervisor_entry = -1; -static int hf_nds_entry_privilege_not_defined = -1; -static int hf_nds_iterator = -1; -static int hf_ncp_nds_iterverb = -1; -static int hf_iter_completion_code = -1; -/* static int hf_nds_iterobj = -1; */ -static int hf_iter_verb_completion_code = -1; -static int hf_iter_ans = -1; -static int hf_positionable = -1; -static int hf_num_skipped = -1; -static int hf_num_to_skip = -1; -static int hf_timelimit = -1; -static int hf_iter_index = -1; -static int hf_num_to_get = -1; -/* static int hf_ret_info_type = -1; */ -static int hf_data_size = -1; -static int hf_this_count = -1; -static int hf_max_entries = -1; -static int hf_move_position = -1; -static int hf_iter_copy = -1; -static int hf_iter_position = -1; -static int hf_iter_search = -1; -static int hf_iter_other = -1; -static int hf_nds_oid = -1; -static int hf_ncp_bytes_actually_trans_64 = -1; -static int hf_sap_name = -1; -static int hf_os_name = -1; -static int hf_vendor_name = -1; -static int hf_hardware_name = -1; -static int hf_no_request_record_found = -1; -static int hf_search_modifier = -1; -static int hf_search_pattern = -1; -static int hf_nds_acl_protected_attribute = -1; -static int hf_nds_acl_subject = -1; -static int hf_nds_acl_privileges = -1; - -static expert_field ei_ncp_file_rights_change = EI_INIT; -static expert_field ei_ncp_completion_code = EI_INIT; -static expert_field ei_nds_reply_error = EI_INIT; -static expert_field ei_ncp_destroy_connection = EI_INIT; -static expert_field ei_nds_iteration = EI_INIT; -static expert_field ei_ncp_eid = EI_INIT; -static expert_field ei_ncp_file_handle = EI_INIT; -static expert_field ei_ncp_connection_destroyed = EI_INIT; -static expert_field ei_ncp_no_request_record_found = EI_INIT; -static expert_field ei_ncp_file_rights = EI_INIT; -static expert_field ei_iter_verb_completion_code = EI_INIT; -static expert_field ei_ncp_connection_request = EI_INIT; -static expert_field ei_ncp_connection_status = EI_INIT; -static expert_field ei_ncp_op_lock_handle = EI_INIT; -static expert_field ei_ncp_effective_rights = EI_INIT; -static expert_field ei_ncp_server = EI_INIT; -static expert_field ei_ncp_invalid_offset = EI_INIT; -static expert_field ei_ncp_address_type = EI_INIT; -static expert_field ei_ncp_value_too_large = EI_INIT; +static int hf_ncp_number_of_data_streams_long; +static int hf_ncp_func; +static int hf_ncp_length; +static int hf_ncp_subfunc; +static int hf_ncp_group; +static int hf_ncp_fragment_handle; +static int hf_ncp_completion_code; +static int hf_ncp_connection_status; +static int hf_ncp_req_frame_num; +static int hf_ncp_req_frame_time; +static int hf_ncp_fragment_size; +static int hf_ncp_message_size; +static int hf_ncp_nds_flag; +static int hf_ncp_nds_verb; +static int hf_ping_version; +/* static int hf_nds_version; */ +/* static int hf_nds_flags; */ +static int hf_nds_reply_depth; +static int hf_nds_reply_rev; +static int hf_nds_reply_flags; +static int hf_nds_p1type; +static int hf_nds_uint32value; +static int hf_nds_bit1; +static int hf_nds_bit2; +static int hf_nds_bit3; +static int hf_nds_bit4; +static int hf_nds_bit5; +static int hf_nds_bit6; +static int hf_nds_bit7; +static int hf_nds_bit8; +static int hf_nds_bit9; +static int hf_nds_bit10; +static int hf_nds_bit11; +static int hf_nds_bit12; +static int hf_nds_bit13; +static int hf_nds_bit14; +static int hf_nds_bit15; +static int hf_nds_bit16; +static int hf_outflags; +static int hf_bit1outflags; +static int hf_bit2outflags; +static int hf_bit3outflags; +static int hf_bit4outflags; +static int hf_bit5outflags; +static int hf_bit6outflags; +static int hf_bit7outflags; +static int hf_bit8outflags; +static int hf_bit9outflags; +static int hf_bit10outflags; +static int hf_bit11outflags; +static int hf_bit12outflags; +static int hf_bit13outflags; +static int hf_bit14outflags; +static int hf_bit15outflags; +static int hf_bit16outflags; +static int hf_bit1nflags; +static int hf_bit2nflags; +static int hf_bit3nflags; +static int hf_bit4nflags; +static int hf_bit5nflags; +static int hf_bit6nflags; +static int hf_bit7nflags; +static int hf_bit8nflags; +static int hf_bit9nflags; +static int hf_bit10nflags; +static int hf_bit11nflags; +static int hf_bit12nflags; +static int hf_bit13nflags; +static int hf_bit14nflags; +static int hf_bit15nflags; +static int hf_bit16nflags; +static int hf_bit1rflags; +static int hf_bit2rflags; +static int hf_bit3rflags; +static int hf_bit4rflags; +static int hf_bit5rflags; +static int hf_bit6rflags; +static int hf_bit7rflags; +static int hf_bit8rflags; +static int hf_bit9rflags; +static int hf_bit10rflags; +static int hf_bit11rflags; +static int hf_bit12rflags; +static int hf_bit13rflags; +static int hf_bit14rflags; +static int hf_bit15rflags; +static int hf_bit16rflags; +static int hf_cflags; +static int hf_bit1cflags; +static int hf_bit2cflags; +static int hf_bit3cflags; +static int hf_bit4cflags; +static int hf_bit5cflags; +static int hf_bit6cflags; +static int hf_bit7cflags; +static int hf_bit8cflags; +static int hf_bit9cflags; +static int hf_bit10cflags; +static int hf_bit11cflags; +static int hf_bit12cflags; +static int hf_bit13cflags; +static int hf_bit14cflags; +static int hf_bit15cflags; +static int hf_bit16cflags; +static int hf_bit1acflags; +static int hf_bit2acflags; +static int hf_bit3acflags; +static int hf_bit4acflags; +static int hf_bit5acflags; +static int hf_bit6acflags; +static int hf_bit7acflags; +static int hf_bit8acflags; +static int hf_bit9acflags; +static int hf_bit10acflags; +static int hf_bit11acflags; +static int hf_bit12acflags; +static int hf_bit13acflags; +static int hf_bit14acflags; +static int hf_bit15acflags; +static int hf_bit16acflags; +static int hf_vflags; +static int hf_bit1vflags; +static int hf_bit2vflags; +static int hf_bit3vflags; +static int hf_bit4vflags; +static int hf_bit5vflags; +static int hf_bit6vflags; +static int hf_bit7vflags; +static int hf_bit8vflags; +static int hf_bit9vflags; +static int hf_bit10vflags; +static int hf_bit11vflags; +static int hf_bit12vflags; +static int hf_bit13vflags; +static int hf_bit14vflags; +static int hf_bit15vflags; +static int hf_bit16vflags; +static int hf_eflags; +static int hf_bit1eflags; +static int hf_bit2eflags; +static int hf_bit3eflags; +static int hf_bit4eflags; +static int hf_bit5eflags; +static int hf_bit6eflags; +static int hf_bit7eflags; +static int hf_bit8eflags; +static int hf_bit9eflags; +static int hf_bit10eflags; +static int hf_bit11eflags; +static int hf_bit12eflags; +static int hf_bit13eflags; +static int hf_bit14eflags; +static int hf_bit15eflags; +static int hf_bit16eflags; +static int hf_infoflagsl; +static int hf_retinfoflagsl; +static int hf_bit1infoflagsl; +static int hf_bit2infoflagsl; +static int hf_bit3infoflagsl; +static int hf_bit4infoflagsl; +static int hf_bit5infoflagsl; +static int hf_bit6infoflagsl; +static int hf_bit7infoflagsl; +static int hf_bit8infoflagsl; +static int hf_bit9infoflagsl; +static int hf_bit10infoflagsl; +static int hf_bit11infoflagsl; +static int hf_bit12infoflagsl; +static int hf_bit13infoflagsl; +static int hf_bit14infoflagsl; +static int hf_bit15infoflagsl; +static int hf_bit16infoflagsl; +static int hf_infoflagsh; +static int hf_bit1infoflagsh; +static int hf_bit2infoflagsh; +static int hf_bit3infoflagsh; +static int hf_bit4infoflagsh; +static int hf_bit5infoflagsh; +static int hf_bit6infoflagsh; +static int hf_bit7infoflagsh; +static int hf_bit8infoflagsh; +static int hf_bit9infoflagsh; +static int hf_bit10infoflagsh; +static int hf_bit11infoflagsh; +static int hf_bit12infoflagsh; +static int hf_bit13infoflagsh; +static int hf_bit14infoflagsh; +static int hf_bit15infoflagsh; +static int hf_bit16infoflagsh; +static int hf_retinfoflagsh; +static int hf_bit1retinfoflagsh; +static int hf_bit2retinfoflagsh; +static int hf_bit3retinfoflagsh; +static int hf_bit4retinfoflagsh; +static int hf_bit5retinfoflagsh; +static int hf_bit6retinfoflagsh; +static int hf_bit7retinfoflagsh; +static int hf_bit8retinfoflagsh; +static int hf_bit9retinfoflagsh; +static int hf_bit10retinfoflagsh; +static int hf_bit11retinfoflagsh; +static int hf_bit12retinfoflagsh; +static int hf_bit13retinfoflagsh; +static int hf_bit14retinfoflagsh; +static int hf_bit15retinfoflagsh; +static int hf_bit16retinfoflagsh; +static int hf_bit1lflags; +static int hf_bit2lflags; +static int hf_bit3lflags; +static int hf_bit4lflags; +static int hf_bit5lflags; +static int hf_bit6lflags; +static int hf_bit7lflags; +static int hf_bit8lflags; +static int hf_bit9lflags; +static int hf_bit10lflags; +static int hf_bit11lflags; +static int hf_bit12lflags; +static int hf_bit13lflags; +static int hf_bit14lflags; +static int hf_bit15lflags; +static int hf_bit16lflags; +static int hf_l1flagsl; +static int hf_l1flagsh; +static int hf_bit1l1flagsl; +static int hf_bit2l1flagsl; +static int hf_bit3l1flagsl; +static int hf_bit4l1flagsl; +static int hf_bit5l1flagsl; +static int hf_bit6l1flagsl; +static int hf_bit7l1flagsl; +static int hf_bit8l1flagsl; +static int hf_bit9l1flagsl; +static int hf_bit10l1flagsl; +static int hf_bit11l1flagsl; +static int hf_bit12l1flagsl; +static int hf_bit13l1flagsl; +static int hf_bit14l1flagsl; +static int hf_bit15l1flagsl; +static int hf_bit16l1flagsl; +static int hf_bit1l1flagsh; +static int hf_bit2l1flagsh; +static int hf_bit3l1flagsh; +static int hf_bit4l1flagsh; +static int hf_bit5l1flagsh; +static int hf_bit6l1flagsh; +static int hf_bit7l1flagsh; +static int hf_bit8l1flagsh; +static int hf_bit9l1flagsh; +static int hf_bit10l1flagsh; +static int hf_bit11l1flagsh; +static int hf_bit12l1flagsh; +static int hf_bit13l1flagsh; +static int hf_bit14l1flagsh; +static int hf_bit15l1flagsh; +static int hf_bit16l1flagsh; +static int hf_nds_tree_name; +static int hf_nds_reply_error; +static int hf_nds_net; +static int hf_nds_node; +static int hf_nds_socket; +static int hf_add_ref_ip; +static int hf_add_ref_udp; +static int hf_add_ref_tcp; +static int hf_referral_record; +static int hf_referral_addcount; +static int hf_nds_port; +static int hf_mv_string; +static int hf_nds_syntax; +static int hf_value_string; +static int hf_nds_buffer_size; +static int hf_nds_ver; +static int hf_nds_nflags; +static int hf_nds_scope; +static int hf_nds_name; +static int hf_nds_comm_trans; +static int hf_nds_tree_trans; +static int hf_nds_iteration; +static int hf_nds_eid; +static int hf_nds_info_type; +static int hf_nds_all_attr; +static int hf_nds_req_flags; +static int hf_nds_attr; +static int hf_nds_crc; +static int hf_nds_referrals; +static int hf_nds_result_flags; +static int hf_nds_tag_string; +static int hf_value_bytes; +static int hf_replica_type; +static int hf_replica_state; +static int hf_replica_number; +static int hf_min_nds_ver; +static int hf_nds_ver_include; +static int hf_nds_ver_exclude; +/* static int hf_nds_es; */ +static int hf_es_type; +/* static int hf_delim_string; */ +static int hf_rdn_string; +static int hf_nds_revent; +static int hf_nds_rnum; +static int hf_nds_name_type; +static int hf_nds_rflags; +static int hf_nds_eflags; +static int hf_nds_depth; +static int hf_nds_class_def_type; +static int hf_nds_classes; +static int hf_nds_return_all_classes; +static int hf_nds_stream_flags; +static int hf_nds_stream_name; +static int hf_nds_file_handle; +static int hf_nds_file_size; +static int hf_nds_dn_output_type; +static int hf_nds_nested_output_type; +static int hf_nds_output_delimiter; +static int hf_nds_output_entry_specifier; +static int hf_es_value; +static int hf_es_rdn_count; +static int hf_nds_replica_num; +static int hf_nds_event_num; +static int hf_es_seconds; +static int hf_nds_compare_results; +static int hf_nds_parent; +static int hf_nds_name_filter; +static int hf_nds_class_filter; +static int hf_nds_time_filter; +static int hf_nds_partition_root_id; +static int hf_nds_replicas; +static int hf_nds_purge; +static int hf_nds_local_partition; +static int hf_partition_busy; +static int hf_nds_number_of_changes; +static int hf_sub_count; +static int hf_nds_revision; +static int hf_nds_base_class; +static int hf_nds_relative_dn; +/* static int hf_nds_root_dn; */ +/* static int hf_nds_parent_dn; */ +static int hf_deref_base; +/* static int hf_nds_entry_info; */ +static int hf_nds_base; +static int hf_nds_privileges; +static int hf_nds_vflags; +static int hf_nds_value_len; +static int hf_nds_cflags; +static int hf_nds_acflags; +static int hf_nds_asn1; +static int hf_nds_upper; +static int hf_nds_lower; +static int hf_nds_trustee_dn; +static int hf_nds_attribute_dn; +static int hf_nds_acl_add; +static int hf_nds_acl_del; +static int hf_nds_att_add; +static int hf_nds_att_del; +static int hf_nds_keep; +static int hf_nds_new_rdn; +static int hf_nds_time_delay; +static int hf_nds_root_name; +static int hf_nds_new_part_id; +static int hf_nds_child_part_id; +static int hf_nds_master_part_id; +static int hf_nds_target_name; +static int hf_nds_super; +static int hf_pingflags2; +static int hf_bit1pingflags2; +static int hf_bit2pingflags2; +static int hf_bit3pingflags2; +static int hf_bit4pingflags2; +static int hf_bit5pingflags2; +static int hf_bit6pingflags2; +static int hf_bit7pingflags2; +static int hf_bit8pingflags2; +static int hf_bit9pingflags2; +static int hf_bit10pingflags2; +static int hf_bit11pingflags2; +static int hf_bit12pingflags2; +static int hf_bit13pingflags2; +static int hf_bit14pingflags2; +static int hf_bit15pingflags2; +static int hf_bit16pingflags2; +static int hf_pingflags1; +static int hf_bit1pingflags1; +static int hf_bit2pingflags1; +static int hf_bit3pingflags1; +static int hf_bit4pingflags1; +static int hf_bit5pingflags1; +static int hf_bit6pingflags1; +static int hf_bit7pingflags1; +static int hf_bit8pingflags1; +static int hf_bit9pingflags1; +static int hf_bit10pingflags1; +static int hf_bit11pingflags1; +static int hf_bit12pingflags1; +static int hf_bit13pingflags1; +static int hf_bit14pingflags1; +static int hf_bit15pingflags1; +static int hf_bit16pingflags1; +static int hf_pingpflags1; +static int hf_bit1pingpflags1; +static int hf_bit2pingpflags1; +static int hf_bit3pingpflags1; +static int hf_bit4pingpflags1; +static int hf_bit5pingpflags1; +static int hf_bit6pingpflags1; +static int hf_bit7pingpflags1; +static int hf_bit8pingpflags1; +static int hf_bit9pingpflags1; +static int hf_bit10pingpflags1; +static int hf_bit11pingpflags1; +static int hf_bit12pingpflags1; +static int hf_bit13pingpflags1; +static int hf_bit14pingpflags1; +static int hf_bit15pingpflags1; +static int hf_bit16pingpflags1; +static int hf_pingvflags1; +static int hf_bit1pingvflags1; +static int hf_bit2pingvflags1; +static int hf_bit3pingvflags1; +static int hf_bit4pingvflags1; +static int hf_bit5pingvflags1; +static int hf_bit6pingvflags1; +static int hf_bit7pingvflags1; +static int hf_bit8pingvflags1; +static int hf_bit9pingvflags1; +static int hf_bit10pingvflags1; +static int hf_bit11pingvflags1; +static int hf_bit12pingvflags1; +static int hf_bit13pingvflags1; +static int hf_bit14pingvflags1; +static int hf_bit15pingvflags1; +static int hf_bit16pingvflags1; +static int hf_nds_letter_ver; +static int hf_nds_os_majver; +static int hf_nds_os_minver; +static int hf_nds_lic_flags; +static int hf_nds_ds_time; +static int hf_nds_ping_version; +static int hf_nds_search_scope; +static int hf_nds_num_objects; +static int hf_siflags; +static int hf_bit1siflags; +static int hf_bit2siflags; +static int hf_bit3siflags; +static int hf_bit4siflags; +static int hf_bit5siflags; +static int hf_bit6siflags; +static int hf_bit7siflags; +static int hf_bit8siflags; +static int hf_bit9siflags; +static int hf_bit10siflags; +static int hf_bit11siflags; +static int hf_bit12siflags; +static int hf_bit13siflags; +static int hf_bit14siflags; +static int hf_bit15siflags; +static int hf_bit16siflags; +static int hf_nds_segments; +static int hf_nds_segment; +static int hf_nds_segment_overlap; +static int hf_nds_segment_overlap_conflict; +static int hf_nds_segment_multiple_tails; +static int hf_nds_segment_too_long_segment; +static int hf_nds_segment_error; +static int hf_nds_segment_count; +static int hf_nds_reassembled_length; +static int hf_nds_verb2b_req_flags; +static int hf_ncp_ip_address; +static int hf_ncp_copyright; +static int hf_ndsprot1flag; +static int hf_ndsprot2flag; +static int hf_ndsprot3flag; +static int hf_ndsprot4flag; +static int hf_ndsprot5flag; +static int hf_ndsprot6flag; +static int hf_ndsprot7flag; +static int hf_ndsprot8flag; +static int hf_ndsprot9flag; +static int hf_ndsprot10flag; +static int hf_ndsprot11flag; +static int hf_ndsprot12flag; +static int hf_ndsprot13flag; +static int hf_ndsprot14flag; +static int hf_ndsprot15flag; +static int hf_ndsprot16flag; +static int hf_nds_svr_dst_name; +static int hf_nds_tune_mark; +/* static int hf_nds_create_time; */ +static int hf_srvr_param_number; +static int hf_srvr_param_boolean; +static int hf_srvr_param_string; +static int hf_nds_svr_time; +static int hf_nds_crt_time; +static int hf_nds_number_of_items; +static int hf_nds_compare_attributes; +static int hf_nds_read_attribute; +static int hf_nds_write_add_delete_attribute; +static int hf_nds_add_delete_self; +static int hf_nds_privilege_not_defined; +static int hf_nds_supervisor; +static int hf_nds_inheritance_control; +static int hf_nds_browse_entry; +static int hf_nds_add_entry; +static int hf_nds_delete_entry; +static int hf_nds_rename_entry; +static int hf_nds_supervisor_entry; +static int hf_nds_entry_privilege_not_defined; +static int hf_nds_iterator; +static int hf_ncp_nds_iterverb; +static int hf_iter_completion_code; +/* static int hf_nds_iterobj; */ +static int hf_iter_verb_completion_code; +static int hf_iter_ans; +static int hf_positionable; +static int hf_num_skipped; +static int hf_num_to_skip; +static int hf_timelimit; +static int hf_iter_index; +static int hf_num_to_get; +/* static int hf_ret_info_type; */ +static int hf_data_size; +static int hf_this_count; +static int hf_max_entries; +static int hf_move_position; +static int hf_iter_copy; +static int hf_iter_position; +static int hf_iter_search; +static int hf_iter_other; +static int hf_nds_oid; +static int hf_ncp_bytes_actually_trans_64; +static int hf_sap_name; +static int hf_os_name; +static int hf_vendor_name; +static int hf_hardware_name; +static int hf_no_request_record_found; +static int hf_search_modifier; +static int hf_search_pattern; +static int hf_nds_acl_protected_attribute; +static int hf_nds_acl_subject; +static int hf_nds_acl_privileges; + +static expert_field ei_ncp_file_rights_change; +static expert_field ei_ncp_completion_code; +static expert_field ei_nds_reply_error; +static expert_field ei_ncp_destroy_connection; +static expert_field ei_nds_iteration; +static expert_field ei_ncp_eid; +static expert_field ei_ncp_file_handle; +static expert_field ei_ncp_connection_destroyed; +static expert_field ei_ncp_no_request_record_found; +static expert_field ei_ncp_file_rights; +static expert_field ei_iter_verb_completion_code; +static expert_field ei_ncp_connection_request; +static expert_field ei_ncp_connection_status; +static expert_field ei_ncp_op_lock_handle; +static expert_field ei_ncp_effective_rights; +static expert_field ei_ncp_server; +static expert_field ei_ncp_invalid_offset; +static expert_field ei_ncp_address_type; +static expert_field ei_ncp_value_too_large; """) # Look at all packet types in the packets collection, and cull information @@ -6553,7 +6553,7 @@ static expert_field ei_ncp_value_too_large = EI_INIT; sorted_vars = list(variables_used_hash.values()) sorted_vars.sort() for var in sorted_vars: - print("static int " + var.HFName() + " = -1;") + print("static int " + var.HFName() + ";") # Print the value_string's @@ -8486,7 +8486,7 @@ proto_register_ncp2222(void) { "Vendor Name", "ncp.vendor_name", FT_STRING, BASE_NONE, NULL, 0x0, NULL, HFILL }}, { &hf_hardware_name, - { "Hardware Name", "ncp.harware_name", FT_STRING, BASE_NONE, NULL, 0x0, NULL, HFILL }}, + { "Hardware Name", "ncp.hardware_name", FT_STRING, BASE_NONE, NULL, 0x0, NULL, HFILL }}, { &hf_no_request_record_found, { "No request record found. Parsing is impossible.", "ncp.no_request_record_found", FT_NONE, BASE_NONE, NULL, 0x0, NULL, HFILL }}, diff --git a/tools/netscreen2dump.py b/tools/netscreen2dump.py deleted file mode 100755 index 7aaac94b..00000000 --- a/tools/netscreen2dump.py +++ /dev/null @@ -1,137 +0,0 @@ -#!/usr/bin/env python -""" -Converts netscreen snoop hex-dumps to a hex-dump that text2pcap can read. - -Copyright (c) 2004 by Gilbert Ramirez - -SPDX-License-Identifier: GPL-2.0-or-later -""" - -import sys -import re -import os -import stat -import time - - -class OutputFile: - TIMER_MAX = 99999.9 - - def __init__(self, name, base_time): - try: - self.fh = open(name, "w") - except IOError, err: - sys.exit(err) - - self.base_time = base_time - self.prev_timestamp = 0.0 - - def PrintPacket(self, timestamp, datalines): - # What do to with the timestamp? I need more data about what - # the netscreen timestamp is, then I can generate one for the text file. - # print("TS:", timestamp.group("time")) - try: - timestamp = float(timestamp.group("time")) - except ValueError: - sys.exit("Unable to convert '%s' to floating point." % - (timestamp,)) - - # Did we wrap around the timeer max? - if timestamp < self.prev_timestamp: - self.base_time += self.TIMER_MAX - - self.prev_timestamp = timestamp - - packet_timestamp = self.base_time + timestamp - - # Determine the time string to print - gmtime = time.gmtime(packet_timestamp) - subsecs = packet_timestamp - int(packet_timestamp) - assert subsecs <= 0 - subsecs = int(subsecs * 10) - - print >> self.fh, "%s.%d" % (time.strftime("%Y-%m-%d %H:%M:%S", gmtime), \ - subsecs) - - # Print the packet data - offset = 0 - for lineno, hexgroup in datalines: - hexline = hexgroup.group("hex") - hexpairs = hexline.split() - print >> self.fh, "%08x %s" % (offset, hexline) - offset += len(hexpairs) - - # Blank line - print >> self.fh - - -# Find a timestamp line -re_timestamp = re.compile(r"^(?P