summaryrefslogtreecommitdiffstats
path: root/tools
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-09-19 04:14:53 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-09-19 04:14:53 +0000
commita86c5f7cae7ec9a3398300555a0b644689d946a1 (patch)
tree39fe4b107c71174fd1e8a8ceb9a4d2aa14116248 /tools
parentReleasing progress-linux version 4.2.6-1~progress7.99u1. (diff)
downloadwireshark-a86c5f7cae7ec9a3398300555a0b644689d946a1.tar.xz
wireshark-a86c5f7cae7ec9a3398300555a0b644689d946a1.zip
Merging upstream version 4.4.0.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tools')
-rw-r--r--tools/.clang-tidy10
-rw-r--r--tools/Get-HardenFlags.ps12
-rwxr-xr-xtools/alpine-setup.sh15
-rwxr-xr-xtools/arch-setup.sh83
-rwxr-xr-xtools/asn2wrs.py161
-rw-r--r--tools/asterix/README.md2
-rwxr-xr-xtools/asterix/convertspec.py339
-rw-r--r--tools/asterix/packet-asterix-template.c113
-rwxr-xr-xtools/asterix/update-specs.py189
-rwxr-xr-xtools/bsd-setup.sh21
-rwxr-xr-xtools/checkAPIs.pl12
-rwxr-xr-xtools/check_col_apis.py310
-rwxr-xr-xtools/check_dissector.py25
-rwxr-xr-xtools/check_dissector_urls.py65
-rwxr-xr-xtools/check_help_urls.py2
-rwxr-xr-xtools/check_spelling.py179
-rwxr-xr-xtools/check_static.py85
-rwxr-xr-xtools/check_tfs.py179
-rwxr-xr-xtools/check_typed_item_calls.py594
-rwxr-xr-xtools/check_val_to_str.py35
-rwxr-xr-xtools/checkfiltername.pl1
-rwxr-xr-xtools/checkhf.pl2
-rwxr-xr-xtools/checklicenses.py12
-rwxr-xr-xtools/convert-glib-types.py62
-rwxr-xr-xtools/convert-proto-init.py73
-rwxr-xr-xtools/convert_expert_add_info_format.pl5
-rwxr-xr-xtools/convert_proto_tree_add_text.pl21
-rwxr-xr-xtools/debian-setup.sh265
-rwxr-xr-xtools/delete_includes.py3
-rw-r--r--tools/detect_bad_alloc_patterns.py8
-rwxr-xr-xtools/eti2wireshark.py80
-rwxr-xr-xtools/fuzz-test.sh2
-rwxr-xr-xtools/generate-bacnet-vendors.py21
-rwxr-xr-xtools/generate-dissector.py2
-rwxr-xr-xtools/generate-nl80211-fields.py28
-rwxr-xr-xtools/generate-sysdig-event.py6
-rwxr-xr-xtools/generate_authors.py6
-rwxr-xr-xtools/indexcap.py8
-rwxr-xr-xtools/json2pcap/json2pcap.py2
-rw-r--r--tools/lemon/CMakeLists.txt2
-rw-r--r--tools/lemon/lemon.c3
-rw-r--r--tools/lemon/patches/01-lemon-dashdash.patch14
-rwxr-xr-xtools/macos-setup-brew.sh45
-rw-r--r--tools/macos-setup-patches/falco-include-dirs.patch15
-rw-r--r--tools/macos-setup-patches/falco-uthash_h-install.patch9
-rw-r--r--tools/macos-setup-patches/glib-pkgconfig.patch10
-rw-r--r--tools/macos-setup-patches/gnutls-pkgconfig.patch8
-rwxr-xr-xtools/macos-setup-patches/qt-fix-pc-file24
-rwxr-xr-xtools/macos-setup-patches/qt-fix-pc-files21
-rw-r--r--tools/macos-setup-patches/snappy-signed.patch11
-rw-r--r--tools/macos-setup-patches/spandsp-configure-patch53
-rwxr-xr-xtools/macos-setup.sh2118
-rwxr-xr-xtools/make-bluetooth.py368
-rwxr-xr-xtools/make-enterprises.py29
-rwxr-xr-xtools/make-enums.py2
-rwxr-xr-xtools/make-iana-ip.py209
-rwxr-xr-x[-rw-r--r--]tools/make-isobus.py15
-rwxr-xr-xtools/make-manuf.py10
-rwxr-xr-xtools/make-no-reassembly-profile.py4
-rwxr-xr-xtools/make-packet-dcm.py24
-rwxr-xr-xtools/make-pci-ids.py39
-rwxr-xr-xtools/make-plugin-reg.py18
-rwxr-xr-xtools/make-regs.py10
-rwxr-xr-xtools/make-services.py33
-rwxr-xr-xtools/make-usb.py2
-rwxr-xr-xtools/make-version.py45
-rwxr-xr-xtools/make-wsluarm.py458
-rwxr-xr-xtools/ncp2222.py1132
-rwxr-xr-xtools/netscreen2dump.py137
-rwxr-xr-xtools/parse_xml2skinny_dissector.py32
-rw-r--r--tools/pidl/idl.yp29
-rw-r--r--tools/pidl/lib/Parse/Pidl.pm2
-rw-r--r--tools/pidl/lib/Parse/Pidl/CUtil.pm1
-rw-r--r--tools/pidl/lib/Parse/Pidl/Compat.pm1
-rw-r--r--tools/pidl/lib/Parse/Pidl/Dump.pm1
-rw-r--r--tools/pidl/lib/Parse/Pidl/Expr.pm1
-rw-r--r--tools/pidl/lib/Parse/Pidl/IDL.pm30
-rw-r--r--tools/pidl/lib/Parse/Pidl/NDR.pm187
-rw-r--r--tools/pidl/lib/Parse/Pidl/ODL.pm3
-rw-r--r--tools/pidl/lib/Parse/Pidl/Samba3/ClientNDR.pm11
-rw-r--r--tools/pidl/lib/Parse/Pidl/Samba3/ServerNDR.pm7
-rw-r--r--tools/pidl/lib/Parse/Pidl/Samba4.pm1
-rw-r--r--tools/pidl/lib/Parse/Pidl/Samba4/COM/Header.pm1
-rw-r--r--tools/pidl/lib/Parse/Pidl/Samba4/COM/Proxy.pm3
-rw-r--r--tools/pidl/lib/Parse/Pidl/Samba4/COM/Stub.pm7
-rw-r--r--tools/pidl/lib/Parse/Pidl/Samba4/Header.pm5
-rw-r--r--tools/pidl/lib/Parse/Pidl/Samba4/NDR/Client.pm23
-rw-r--r--tools/pidl/lib/Parse/Pidl/Samba4/NDR/Parser.pm522
-rw-r--r--tools/pidl/lib/Parse/Pidl/Samba4/NDR/Server.pm40
-rw-r--r--tools/pidl/lib/Parse/Pidl/Samba4/NDR/ServerCompat.pm624
-rw-r--r--tools/pidl/lib/Parse/Pidl/Samba4/Python.pm403
-rw-r--r--tools/pidl/lib/Parse/Pidl/Samba4/TDR.pm13
-rw-r--r--tools/pidl/lib/Parse/Pidl/Samba4/Template.pm1
-rw-r--r--tools/pidl/lib/Parse/Pidl/Typelist.pm62
-rw-r--r--tools/pidl/lib/Parse/Pidl/Util.pm38
-rw-r--r--tools/pidl/lib/Parse/Pidl/Wireshark/Conformance.pm1
-rw-r--r--tools/pidl/lib/Parse/Pidl/Wireshark/NDR.pm127
-rw-r--r--tools/pidl/lib/Parse/Yapp/Driver.pm471
-rw-r--r--tools/pidl/lib/wscript_build37
-rwxr-xr-xtools/pidl/pidl24
-rw-r--r--tools/pidl/tests/Util.pm1
-rwxr-xr-xtools/pidl/tests/header.pl21
-rwxr-xr-xtools/pidl/tests/ndr.pl3
-rwxr-xr-xtools/pidl/tests/ndr_align.pl11
-rwxr-xr-xtools/pidl/tests/ndr_alloc.pl1
-rwxr-xr-xtools/pidl/tests/ndr_array.pl1
-rwxr-xr-xtools/pidl/tests/ndr_compat.pl1
-rwxr-xr-xtools/pidl/tests/ndr_fullptr.pl3
-rwxr-xr-xtools/pidl/tests/ndr_refptr.pl37
-rwxr-xr-xtools/pidl/tests/ndr_represent.pl1
-rwxr-xr-xtools/pidl/tests/ndr_simple.pl1
-rwxr-xr-xtools/pidl/tests/ndr_string.pl1
-rwxr-xr-xtools/pidl/tests/ndr_tagtype.pl8
-rwxr-xr-xtools/pidl/tests/parse_idl.pl2
-rwxr-xr-xtools/pidl/tests/samba-ndr.pl1
-rwxr-xr-xtools/pidl/tests/samba3-cli.pl2
-rwxr-xr-xtools/pidl/tests/tdr.pl12
-rwxr-xr-xtools/pidl/tests/test_util.pl1
-rwxr-xr-xtools/pidl/tests/typelist.pl4
-rwxr-xr-xtools/pidl/tests/wireshark-ndr.pl48
-rw-r--r--tools/pidl/wscript62
-rwxr-xr-xtools/pre-commit-ignore.py5
-rwxr-xr-xtools/process-x11-fields.pl2
-rwxr-xr-xtools/process-x11-xcb.pl133
-rw-r--r--tools/radiotap-gen/radiotap-gen.c2
-rwxr-xr-xtools/rpm-setup.sh126
-rwxr-xr-xtools/update-appdata.py7
-rwxr-xr-xtools/update-tools-help.py12
-rwxr-xr-xtools/validate-commit.py13
-rw-r--r--tools/win-setup.ps164
-rwxr-xr-xtools/wireshark_gen.py151
-rw-r--r--tools/wireshark_words.txt1001
-rw-r--r--tools/ws-coding-style.cfg2
-rw-r--r--tools/yacc.py159
134 files changed, 8363 insertions, 4140 deletions
diff --git a/tools/.clang-tidy b/tools/.clang-tidy
new file mode 100644
index 00000000..84ea0537
--- /dev/null
+++ b/tools/.clang-tidy
@@ -0,0 +1,10 @@
+InheritParentConfig: true
+
+# We don't want to do any checks in this directory yet so hack around
+# the fact that Clang-Tidy won't let us disable all checks.
+# https://stackoverflow.com/a/58379342/82195
+Checks:
+ - '-*'
+ - 'misc-definitions-in-headers'
+CheckOptions:
+ - { key: 'HeaderFileExtensions', value: 'DISABLED' }
diff --git a/tools/Get-HardenFlags.ps1 b/tools/Get-HardenFlags.ps1
index c0785659..eb73b40f 100644
--- a/tools/Get-HardenFlags.ps1
+++ b/tools/Get-HardenFlags.ps1
@@ -16,7 +16,7 @@
# on all the binaries in the distribution, and then filters
# for the NXCOMPAT and DYNAMICBASE flags.
-# This script will probably fail for the forseeable future.
+# This script will probably fail for the foreseeable future.
#
# Many of our third-party libraries are compiled using MinGW-w64. Its version
# of `ld` doesn't enable the dynamicbase, nxcompat, or high-entropy-va flags
diff --git a/tools/alpine-setup.sh b/tools/alpine-setup.sh
index b5cd5a17..0cdb7799 100755
--- a/tools/alpine-setup.sh
+++ b/tools/alpine-setup.sh
@@ -13,7 +13,7 @@
set -e -u -o pipefail
-function print_usage() {
+print_usage() {
printf "\\nUtility to setup a alpine system for Wireshark Development.\\n"
printf "The basic usage installs the needed software\\n\\n"
printf "Usage: %s [--install-optional] [...other options...]\\n" "$0"
@@ -86,7 +86,7 @@ ADDITIONAL_LIST="
snappy-dev
nghttp2-dev
nghttp3-dev
- lua5.2-dev
+ lua5.4-dev
libnl3-dev
sbc-dev
minizip-dev
@@ -98,10 +98,10 @@ ADDITIONAL_LIST="
"
# Uncomment to add PNG compression utilities used by compress-pngs:
-# ADDITIONAL_LIST="$ADDITIONAL_LIST \
-# advancecomp \
-# optipng \
-# oxipng \
+# ADDITIONAL_LIST="$ADDITIONAL_LIST
+# advancecomp
+# optipng
+# oxipng
# pngcrush"
# Adds package $2 to list variable $1 if the package is found.
@@ -110,7 +110,7 @@ add_package() {
local list="$1" pkgname="$2"
# fail if the package is not known
- apk list $pkgname &> /dev/null || return 1
+ apk list "$pkgname" &> /dev/null || return 1
# package is found, append it to list
eval "${list}=\"\${${list}} \${pkgname}\""
@@ -125,6 +125,7 @@ then
fi
apk update || exit 2
+# shellcheck disable=SC2086
apk add $ACTUAL_LIST $OPTIONS || exit 2
if [ $ADDITIONAL -eq 0 ]
diff --git a/tools/arch-setup.sh b/tools/arch-setup.sh
index 1443c528..8c8e9f1a 100755
--- a/tools/arch-setup.sh
+++ b/tools/arch-setup.sh
@@ -58,51 +58,51 @@ then
exit 1
fi
-BASIC_LIST="base-devel \
- bcg729 \
- brotli \
- c-ares \
- cmake \
- git \
- glib2 \
- gnutls \
- krb5 \
- libcap \
- libgcrypt \
- libilbc \
- libmaxminddb \
- libnghttp2 \
- libnghttp3 \
- libnl \
- libpcap \
- libssh \
- libxml2 \
- lua52 \
- lz4 \
- minizip \
- ninja \
- pcre2 \
- python \
- qt6-base \
- qt6-multimedia \
- qt6-tools \
- qt6-5compat \
- sbc \
- snappy \
- spandsp \
- speexdsp \
- zlib \
+BASIC_LIST="base-devel
+ bcg729
+ brotli
+ c-ares
+ cmake
+ git
+ glib2
+ gnutls
+ krb5
+ libcap
+ libgcrypt
+ libilbc
+ libmaxminddb
+ libnghttp2
+ libnghttp3
+ libnl
+ libpcap
+ libssh
+ libxml2
+ lua
+ lz4
+ minizip
+ ninja
+ pcre2
+ python
+ qt6-base
+ qt6-multimedia
+ qt6-tools
+ qt6-5compat
+ sbc
+ snappy
+ spandsp
+ speexdsp
+ zlib
zstd"
-ADDITIONAL_LIST="asciidoctor \
- ccache \
- docbook-xml \
- docbook-xsl \
- doxygen \
- libxslt \
+ADDITIONAL_LIST="asciidoctor
+ ccache
+ docbook-xml
+ docbook-xsl
+ doxygen
+ libxslt
perl"
-TESTDEPS_LIST="python-pytest \
+TESTDEPS_LIST="python-pytest
python-pytest-xdist"
ACTUAL_LIST=$BASIC_LIST
@@ -118,6 +118,7 @@ then
fi
# Partial upgrades are unsupported.
+# shellcheck disable=SC2086
pacman --sync --refresh --sysupgrade --needed $ACTUAL_LIST $OPTIONS || exit 2
if [ $ADDITIONAL -eq 0 ]
diff --git a/tools/asn2wrs.py b/tools/asn2wrs.py
index 7f72443b..6d03a229 100755
--- a/tools/asn2wrs.py
+++ b/tools/asn2wrs.py
@@ -45,7 +45,7 @@ import os
import os.path
import time
import getopt
-import traceback
+#import traceback
try:
from ply import lex
@@ -160,6 +160,7 @@ input_file = None
g_conform = None
lexer = None
in_oid = False
+quiet = False
class LexError(Exception):
def __init__(self, tok, filename=None):
@@ -644,9 +645,9 @@ class EthCtx:
return False
def value_max(self, a, b):
- if (a == 'MAX') or (b == 'MAX'): return 'MAX';
- if a == 'MIN': return b;
- if b == 'MIN': return a;
+ if (a == 'MAX') or (b == 'MAX'): return 'MAX'
+ if a == 'MIN': return b
+ if b == 'MIN': return a
try:
if (int(a) > int(b)):
return a
@@ -657,9 +658,9 @@ class EthCtx:
return "MAX((%s),(%s))" % (a, b)
def value_min(self, a, b):
- if (a == 'MIN') or (b == 'MIN'): return 'MIN';
- if a == 'MAX': return b;
- if b == 'MAX': return a;
+ if (a == 'MIN') or (b == 'MIN'): return 'MIN'
+ if a == 'MAX': return b
+ if b == 'MAX': return a
try:
if (int(a) < int(b)):
return a
@@ -723,7 +724,7 @@ class EthCtx:
val = self.type[t]['val']
(ftype, display) = val.eth_ftype(self)
attr.update({ 'TYPE' : ftype, 'DISPLAY' : display,
- 'STRINGS' : val.eth_strings(), 'BITMASK' : '0' });
+ 'STRINGS' : val.eth_strings(), 'BITMASK' : '0' })
else:
attr.update(self.type[t]['attr'])
attr.update(self.eth_type[self.type[t]['ethname']]['attr'])
@@ -1060,7 +1061,7 @@ class EthCtx:
#--- eth_clean --------------------------------------------------------------
def eth_clean(self):
- self.proto = self.proto_opt;
+ self.proto = self.proto_opt
#--- ASN.1 tables ----------------
self.assign = {}
self.assign_ord = []
@@ -1242,14 +1243,12 @@ class EthCtx:
for t in self.eth_type_ord:
bits = self.eth_type[t]['val'].eth_named_bits()
if (bits):
- old_val = 0
for (val, id) in bits:
self.named_bit.append({'name' : id, 'val' : val,
'ethname' : 'hf_%s_%s_%s' % (self.eproto, t, asn2c(id)),
'ftype' : 'FT_BOOLEAN', 'display' : '8',
'strings' : 'NULL',
'bitmask' : '0x'+('80','40','20','10','08','04','02','01')[val%8]})
- old_val = val + 1
if self.eth_type[t]['val'].eth_need_tree():
self.eth_type[t]['tree'] = "ett_%s_%s" % (self.eth_type[t]['proto'], t)
else:
@@ -1560,7 +1559,7 @@ class EthCtx:
if len(cycle_funcs) > 1:
out += f'''\
- // {' → '.join(cycle_funcs)}
+ // {' -> '.join(cycle_funcs)}
actx->pinfo->dissection_depth += {len(cycle_funcs) - 1};
increment_dissection_depth(actx->pinfo);
'''
@@ -1613,7 +1612,7 @@ class EthCtx:
#--- eth_out_pdu_decl ----------------------------------------------------------
def eth_out_pdu_decl(self, f):
- t = self.eth_hf[f]['ethtype']
+ #t = self.eth_hf[f]['ethtype']
out = ''
if (not self.eth_hf[f]['pdu']['export']):
out += 'static '
@@ -1626,13 +1625,13 @@ class EthCtx:
if not len(self.eth_hf_ord) and not len(self.eth_hfpdu_ord) and not len(self.named_bit): return
fx = self.output.file_open('hf')
for f in (self.eth_hfpdu_ord + self.eth_hf_ord):
- fx.write("%-50s/* %s */\n" % ("static int %s = -1; " % (self.eth_hf[f]['fullname']), self.eth_hf[f]['ethtype']))
+ fx.write("%-50s/* %s */\n" % ("static int %s; " % (self.eth_hf[f]['fullname']), self.eth_hf[f]['ethtype']))
if (self.named_bit):
fx.write('/* named bits */\n')
for nb in self.named_bit:
- fx.write("static int %s = -1;\n" % (nb['ethname']))
+ fx.write("static int %s;\n" % (nb['ethname']))
if (self.dummy_eag_field):
- fx.write("static int %s = -1; /* never registered */\n" % (self.dummy_eag_field))
+ fx.write("static int %s; /* never registered */\n" % (self.dummy_eag_field))
self.output.file_close(fx)
#--- eth_output_hf_arr ------------------------------------------------------
@@ -1685,10 +1684,10 @@ class EthCtx:
def eth_output_ett (self):
fx = self.output.file_open('ett')
fempty = True
- #fx.write("static gint ett_%s = -1;\n" % (self.eproto))
+ #fx.write("static int ett_%s;\n" % (self.eproto))
for t in self.eth_type_ord:
if self.eth_type[t]['tree']:
- fx.write("static gint %s = -1;\n" % (self.eth_type[t]['tree']))
+ fx.write("static int %s;\n" % (self.eth_type[t]['tree']))
fempty = False
self.output.file_close(fx, discard=fempty)
@@ -1808,7 +1807,7 @@ class EthCtx:
def eth_output_types(self):
def out_pdu(f):
t = self.eth_hf[f]['ethtype']
- impl = 'FALSE'
+ impl = 'false'
out = ''
if (not self.eth_hf[f]['pdu']['export']):
out += 'static '
@@ -1821,20 +1820,20 @@ class EthCtx:
ret_par = 'offset'
if (self.Per()):
if (self.Aligned()):
- aligned = 'TRUE'
+ aligned = 'true'
else:
- aligned = 'FALSE'
+ aligned = 'false'
out += " asn1_ctx_t asn1_ctx;\n"
out += self.eth_fn_call('asn1_ctx_init', par=(('&asn1_ctx', 'ASN1_ENC_PER', aligned, 'pinfo'),))
if (self.Ber()):
out += " asn1_ctx_t asn1_ctx;\n"
- out += self.eth_fn_call('asn1_ctx_init', par=(('&asn1_ctx', 'ASN1_ENC_BER', 'TRUE', 'pinfo'),))
+ out += self.eth_fn_call('asn1_ctx_init', par=(('&asn1_ctx', 'ASN1_ENC_BER', 'true', 'pinfo'),))
par=((impl, 'tvb', off_par,'&asn1_ctx', 'tree', self.eth_hf[f]['fullname']),)
elif (self.Per()):
par=(('tvb', off_par, '&asn1_ctx', 'tree', self.eth_hf[f]['fullname']),)
elif (self.Oer()):
out += " asn1_ctx_t asn1_ctx;\n"
- out += self.eth_fn_call('asn1_ctx_init', par=(('&asn1_ctx', 'ASN1_ENC_OER', 'TRUE', 'pinfo'),))
+ out += self.eth_fn_call('asn1_ctx_init', par=(('&asn1_ctx', 'ASN1_ENC_OER', 'true', 'pinfo'),))
par=(('tvb', off_par,'&asn1_ctx', 'tree', self.eth_hf[f]['fullname']),)
else:
par=((),)
@@ -1993,7 +1992,7 @@ class EthCtx:
if first_decl:
fx.write(' /*--- Syntax registrations ---*/\n')
first_decl = False
- fx.write(' %sregister_ber_syntax_dissector(%s, proto_%s, dissect_%s_PDU);\n' % (new_prefix, k, self.eproto, reg['pdu']));
+ fx.write(' %sregister_ber_syntax_dissector(%s, proto_%s, dissect_%s_PDU);\n' % (new_prefix, k, self.eproto, reg['pdu']))
fempty=False
self.output.file_close(fx, discard=fempty)
@@ -2068,6 +2067,8 @@ class EthCtx:
#--- dupl_report -----------------------------------------------------
def dupl_report(self):
+ if quiet:
+ return
# types
tmplist = sorted(self.eth_type_dupl.keys())
for t in tmplist:
@@ -2211,7 +2212,7 @@ class EthCtx:
print(', '.join(dep))
# end of print_mod()
(mod_ord, mod_cyc) = dependency_compute(self.module_ord, self.module, ignore_fn = lambda t: t not in self.module)
- print("\n# ASN.1 Moudules")
+ print("\n# ASN.1 Modules")
print("Module name Dependency")
print("-" * 100)
new_ord = False
@@ -2219,7 +2220,7 @@ class EthCtx:
print_mod(m)
new_ord = new_ord or (self.module_ord.index(m) != mod_ord.index(m))
if new_ord:
- print("\n# ASN.1 Moudules - in dependency order")
+ print("\n# ASN.1 Modules - in dependency order")
print("Module name Dependency")
print("-" * 100)
for m in (mod_ord):
@@ -2335,13 +2336,13 @@ class EthCnf:
return name in self.fn and self.fn[name]['FN_BODY']
def get_fn_text(self, name, ctx):
if (name not in self.fn):
- return '';
+ return ''
if (not self.fn[name][ctx]):
- return '';
+ return ''
self.fn[name][ctx]['used'] = True
out = self.fn[name][ctx]['text']
if (not self.suppress_line):
- out = '#line %u "%s"\n%s\n' % (self.fn[name][ctx]['lineno'], rel_dissector_path(self.fn[name][ctx]['fn']), out);
+ out = '#line %u "%s"\n%s\n' % (self.fn[name][ctx]['lineno'], rel_dissector_path(self.fn[name][ctx]['fn']), out)
return out
def add_pdu(self, par, fn, lineno):
@@ -2963,7 +2964,7 @@ class EthOut:
#--- output_fname -------------------------------------------------------
def output_fname(self, ftype, ext='c'):
fn = ''
- if not ext in ('cnf',):
+ if ext not in ('cnf',):
fn += 'packet-'
fn += self.outnm
if (ftype):
@@ -3060,10 +3061,10 @@ class EthOut:
include = re.compile(r'^\s*#\s*include\s+[<"](?P<fname>[^>"]+)[>"]', re.IGNORECASE)
- cont_linenum = 0;
+ cont_linenum = 0
while (True):
- cont_linenum = cont_linenum + 1;
+ cont_linenum = cont_linenum + 1
line = fin.readline()
if (line == ''): break
ifile = None
@@ -3372,8 +3373,8 @@ class Type (Node):
(minv, maxv, ext) = self.constr.subtype[1].GetSize(ectx)
if minv == 'MIN': minv = 'NO_BOUND'
if maxv == 'MAX': maxv = 'NO_BOUND'
- if (ext): ext = 'TRUE'
- else: ext = 'FALSE'
+ if (ext): ext = 'true'
+ else: ext = 'false'
return (minv, maxv, ext)
def eth_get_value_constr(self, ectx):
@@ -3386,16 +3387,16 @@ class Type (Node):
minv += 'U'
elif (str(minv)[0] == "-") and str(minv)[1:].isdigit():
if (int(minv) == -(2**31)):
- minv = "G_MININT32"
+ minv = "INT32_MIN"
elif (int(minv) < -(2**31)):
- minv = "G_GINT64_CONSTANT(%s)" % (str(minv))
+ minv = "INT64_C(%s)" % (str(minv))
if str(maxv).isdigit():
if (int(maxv) >= 2**32):
- maxv = "G_GUINT64_CONSTANT(%s)" % (str(maxv))
+ maxv = "UINT64_C(%s)" % (str(maxv))
else:
maxv += 'U'
- if (ext): ext = 'TRUE'
- else: ext = 'FALSE'
+ if (ext): ext = 'true'
+ else: ext = 'false'
return (minv, maxv, ext)
def eth_get_alphabet_constr(self, ectx):
@@ -3745,9 +3746,9 @@ class Module (Node):
class Module_Body (Node):
def to_python (self, ctx):
# XXX handle exports, imports.
- l = [x.to_python (ctx) for x in self.assign_list]
- l = [a for a in l if a != '']
- return "\n".join (l)
+ list = [x.to_python (ctx) for x in self.assign_list]
+ list = [a for a in list if a != '']
+ return "\n".join(list)
def to_eth(self, ectx):
# Exports
@@ -3983,9 +3984,9 @@ class TaggedType (Type):
pars['TYPE_REF_FN'] = 'dissect_%(TYPE_REF_PROTO)s_%(TYPE_REF_TNAME)s'
(pars['TAG_CLS'], pars['TAG_TAG']) = self.GetTag(ectx)
if self.HasImplicitTag(ectx):
- pars['TAG_IMPL'] = 'TRUE'
+ pars['TAG_IMPL'] = 'true'
else:
- pars['TAG_IMPL'] = 'FALSE'
+ pars['TAG_IMPL'] = 'false'
return pars
def eth_type_default_body(self, ectx, tname):
@@ -4090,7 +4091,8 @@ class SeqType (SqType):
autotag = True
lst = self.all_components()
for e in (self.elt_list):
- if e.val.HasOwnTag(): autotag = False; break;
+ if e.val.HasOwnTag(): autotag = False
+ break
# expand COMPONENTS OF
if self.need_components():
if components_available:
@@ -4110,7 +4112,7 @@ class SeqType (SqType):
e.val.SetName("eag_v%s" % (e.val.ver))
else:
e.val.SetName("eag_%d" % (eag_num))
- eag_num += 1;
+ eag_num += 1
else: # expand
new_ext_list = []
for e in (self.ext_list):
@@ -4495,10 +4497,10 @@ class ChoiceType (Type):
if (ectx.NeedTags() and (ectx.tag_def == 'AUTOMATIC')):
autotag = True
for e in (self.elt_list):
- if e.HasOwnTag(): autotag = False; break;
+ if e.HasOwnTag(): autotag = False; break
if autotag and hasattr(self, 'ext_list'):
for e in (self.ext_list):
- if e.HasOwnTag(): autotag = False; break;
+ if e.HasOwnTag(): autotag = False; break
# do autotag
if autotag:
atag = 0
@@ -4811,9 +4813,9 @@ class EnumeratedType (Type):
pars = Type.eth_type_default_pars(self, ectx, tname)
(root_num, ext_num, map_table) = self.get_vals_etc(ectx)[1:]
if self.ext is not None:
- ext = 'TRUE'
+ ext = 'true'
else:
- ext = 'FALSE'
+ ext = 'false'
pars['ROOT_NUM'] = str(root_num)
pars['EXT'] = ext
pars['EXT_NUM'] = str(ext_num)
@@ -5610,7 +5612,7 @@ class BitStringType (Type):
if (self.named_list):
sorted_list = self.named_list
sorted_list.sort()
- expected_bit_no = 0;
+ expected_bit_no = 0
for e in (sorted_list):
# Fill the table with "spare_bit" for "un named bits"
if (int(e.val) != 0) and (expected_bit_no != int(e.val)):
@@ -5973,7 +5975,7 @@ def p_Reference_1 (t):
def p_Reference_2 (t):
'''Reference : LCASE_IDENT_ASSIGNED
- | identifier ''' # instead of valuereference wich causes reduce/reduce conflict
+ | identifier ''' # instead of valuereference which causes reduce/reduce conflict
t[0] = Value_Ref(val=t[1])
def p_AssignmentList_1 (t):
@@ -6014,7 +6016,7 @@ def p_DefinedValue_1(t):
t[0] = t[1]
def p_DefinedValue_2(t):
- '''DefinedValue : identifier ''' # instead of valuereference wich causes reduce/reduce conflict
+ '''DefinedValue : identifier ''' # instead of valuereference which causes reduce/reduce conflict
t[0] = Value_Ref(val=t[1])
# 13.6
@@ -6040,7 +6042,7 @@ def p_ValueAssignment (t):
'ValueAssignment : LCASE_IDENT ValueType ASSIGNMENT Value'
t[0] = ValueAssignment(ident = t[1], typ = t[2], val = t[4])
-# only "simple" types are supported to simplify grammer
+# only "simple" types are supported to simplify grammar
def p_ValueType (t):
'''ValueType : type_ref
| BooleanType
@@ -7320,11 +7322,11 @@ def p_cls_syntax_list_2 (t):
# X.681
def p_cls_syntax_1 (t):
'cls_syntax : Type IDENTIFIED BY Value'
- t[0] = { get_class_fieled(' ') : t[1], get_class_fieled(' '.join((t[2], t[3]))) : t[4] }
+ t[0] = { get_class_field(' ') : t[1], get_class_field(' '.join((t[2], t[3]))) : t[4] }
def p_cls_syntax_2 (t):
'cls_syntax : HAS PROPERTY Value'
- t[0] = { get_class_fieled(' '.join(t[1:-1])) : t[-1:][0] }
+ t[0] = { get_class_field(' '.join(t[1:-1])) : t[-1:][0] }
# X.880
def p_cls_syntax_3 (t):
@@ -7337,17 +7339,17 @@ def p_cls_syntax_3 (t):
| PRIORITY Value
| ALWAYS RESPONDS BooleanValue
| IDEMPOTENT BooleanValue '''
- t[0] = { get_class_fieled(' '.join(t[1:-1])) : t[-1:][0] }
+ t[0] = { get_class_field(' '.join(t[1:-1])) : t[-1:][0] }
def p_cls_syntax_4 (t):
'''cls_syntax : ARGUMENT Type
| RESULT Type
| PARAMETER Type '''
- t[0] = { get_class_fieled(t[1]) : t[2] }
+ t[0] = { get_class_field(t[1]) : t[2] }
def p_cls_syntax_5 (t):
'cls_syntax : CODE Value'
- fld = get_class_fieled(t[1]);
+ fld = get_class_field(t[1])
t[0] = { fld : t[2] }
if isinstance(t[2], ChoiceValue):
fldt = fld + '.' + t[2].choice
@@ -7357,7 +7359,7 @@ def p_cls_syntax_6 (t):
'''cls_syntax : ARGUMENT Type OPTIONAL BooleanValue
| RESULT Type OPTIONAL BooleanValue
| PARAMETER Type OPTIONAL BooleanValue '''
- t[0] = { get_class_fieled(t[1]) : t[2], get_class_fieled(' '.join((t[1], t[3]))) : t[4] }
+ t[0] = { get_class_field(t[1]) : t[2], get_class_field(' '.join((t[1], t[3]))) : t[4] }
# 12 Information object set definition and assignment
@@ -7503,7 +7505,7 @@ def is_class_syntax(name):
return False
return name in class_syntaxes[class_current_syntax]
-def get_class_fieled(name):
+def get_class_field(name):
if not class_current_syntax:
return None
return class_syntaxes[class_current_syntax][name]
@@ -8064,13 +8066,14 @@ def ignore_comments(string):
return ''.join(chunks)
-def eth_main():
+def asn2wrs_main():
global input_file
global g_conform
global lexer
- print("ASN.1 to Wireshark dissector compiler");
+ global quiet
+
try:
- opts, args = getopt.getopt(sys.argv[1:], "h?d:D:buXp:FTo:O:c:I:eESs:kLCr:");
+ opts, args = getopt.getopt(sys.argv[1:], "h?d:D:buXp:qFTo:O:c:I:eESs:kLCr:")
except getopt.GetoptError:
eth_usage(); sys.exit(2)
if len(args) < 1:
@@ -8093,10 +8096,10 @@ def eth_main():
ectx.merge_modules = False
ectx.group_by_prot = False
ectx.conform.last_group = 0
- ectx.conform.suppress_line = False;
+ ectx.conform.suppress_line = False
ectx.output.outnm = None
ectx.output.single_file = None
- ectx.constraints_check = False;
+ ectx.constraints_check = False
for o, a in opts:
if o in ("-h", "-?"):
eth_usage(); sys.exit(2)
@@ -8112,24 +8115,29 @@ def eth_main():
if o in ("-C",):
ectx.constraints_check = True
if o in ("-L",):
- ectx.suppress_line = True
+ ectx.conform.suppress_line = True
+ if o in ("-q",):
+ quiet = True
if o in ("-X",):
warnings.warn("Command line option -X is obsolete and can be removed")
if o in ("-T",):
warnings.warn("Command line option -T is obsolete and can be removed")
+ if not quiet:
+ print("ASN.1 to Wireshark dissector compiler")
+
if conf_to_read:
ectx.conform.read(conf_to_read)
for o, a in opts:
- if o in ("-h", "-?", "-c", "-I", "-E", "-D", "-C", "-X", "-T"):
+ if o in ("-h", "-?", "-c", "-I", "-E", "-D", "-C", "-q", "-X", "-T"):
pass # already processed
else:
par = []
if a: par.append(a)
ectx.conform.set_opt(o, par, "commandline", 0)
- (ld, yd, pd) = (0, 0, 0);
+ (ld, yd, pd) = (0, 0, 0)
if ectx.dbg('l'): ld = 1
if ectx.dbg('y'): yd = 1
if ectx.dbg('p'): pd = 2
@@ -8144,12 +8152,11 @@ def eth_main():
if (ectx.srcdir): fn = ectx.srcdir + '/' + fn
# Read ASN.1 definition, trying one of the common encodings.
data = open(fn, "rb").read()
- for encoding in ('utf-8', 'windows-1252'):
- try:
- data = data.decode(encoding)
- break
- except Exception:
- warnings.warn_explicit("Decoding %s as %s failed, trying next." % (fn, encoding), UserWarning, '', 0)
+ try:
+ data = data.decode('utf-8')
+ except UnicodeDecodeError:
+ warnings.warn_explicit(f"Decoding {fn} as UTF-8 failed.", UnicodeWarning, '', 0)
+ sys.exit(3)
# Py2 compat, name.translate in eth_output_hf_arr fails with unicode
if not isinstance(data, str):
data = data.encode('utf-8')
@@ -8197,7 +8204,7 @@ def eth_main():
if ectx.dbg('o'):
ectx.output.dbg_print()
- ectx.output.make_single_file(ectx.suppress_line)
+ ectx.output.make_single_file(ectx.conform.suppress_line)
# Python compiler
@@ -8226,7 +8233,7 @@ def main():
if __name__ == '__main__':
if (os.path.splitext(os.path.basename(sys.argv[0]))[0].lower() in ('asn2wrs', 'asn2eth')):
- eth_main()
+ asn2wrs_main()
else:
main()
diff --git a/tools/asterix/README.md b/tools/asterix/README.md
index d7b2101f..e936930d 100644
--- a/tools/asterix/README.md
+++ b/tools/asterix/README.md
@@ -3,7 +3,7 @@
*Asterix* is a set of standards, where each standard is defined
as so called *asterix category*.
In addition, each *asterix category* is potentially released
-in number of editions. There is no guarantie about backward
+in number of editions. There is no guarantee about backward
compatibility between the editions.
The structured version of asterix specifications is maintained
diff --git a/tools/asterix/convertspec.py b/tools/asterix/convertspec.py
new file mode 100755
index 00000000..18f81798
--- /dev/null
+++ b/tools/asterix/convertspec.py
@@ -0,0 +1,339 @@
+#!/usr/bin/env python3
+#
+# By Zoran Bošnjak <zoran.bosnjak@sloveniacontrol.si>
+#
+# Convert json from new to old format
+#
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+
+import sys
+import argparse
+import json
+
+def split(obj):
+ return (obj['tag'], obj['contents'])
+
+def handle_uap(obj):
+ t, cont = split(obj)
+ def f(i):
+ t, name = split(i)
+ if t == 'UapItem':
+ return name
+ elif t == 'UapItemRFS':
+ return 'RFS'
+ else:
+ return None
+ if t == 'Uap':
+ return {
+ 'type': 'uap',
+ 'items': [f(i) for i in cont],
+ }
+ elif t == 'Uaps':
+ def var(i):
+ name, lst = i
+ return {
+ 'name': name,
+ 'items': [f(i) for i in lst],
+ }
+ return {
+ 'type': 'uaps',
+ 'selector': {
+ 'name': cont['selector']['item'],
+ 'rules': cont['selector']['cases'],
+ },
+ 'variations': [var(i) for i in cont['cases']],
+ }
+ else:
+ raise Exception('unexpected', t)
+
+def handle_number(obj):
+ t, cont = split(obj)
+ if t == 'NumInt':
+ return {
+ 'type': 'Integer',
+ 'value': cont,
+ }
+ elif t == 'NumDiv':
+ return {
+ 'type': 'Div',
+ 'numerator': handle_number(cont['numerator']),
+ 'denominator': handle_number(cont['denominator']),
+ }
+ elif t == 'NumPow':
+ return {
+ 'type': 'Pow',
+ 'base': cont['base'],
+ 'exponent': cont['exponent'],
+ }
+ else:
+ raise Exception('unexpected', t)
+
+def handle_signedness(obj):
+ t, cont = split(obj)
+ if t == 'Signed':
+ return True
+ elif t == 'Unsigned':
+ return False
+ else:
+ raise Exception('unexpected', t)
+
+def handle_constrain(obj):
+ t, cont = split(obj)
+ if t == 'EqualTo': s = '=='
+ elif t == 'NotEqualTo': s = '/='
+ elif t == 'GreaterThan': s = '>'
+ elif t == 'GreaterThanOrEqualTo': s = '>='
+ elif t == 'LessThan': s = '<'
+ elif t == 'LessThanOrEqualTo': s = '<='
+ else:
+ raise Exception('unexpected', t)
+ return {
+ 'type': s,
+ 'value': handle_number(cont),
+ }
+
+def handle_content(obj):
+ t, cont = split(obj)
+ if t == 'ContentRaw':
+ return {
+ 'type': 'Raw',
+ }
+ elif t == 'ContentTable':
+ return {
+ 'type': 'Table',
+ 'values': cont,
+ }
+ elif t == 'ContentString':
+ return {
+ 'type': 'String',
+ 'variation': cont['tag'],
+ }
+ elif t == 'ContentInteger':
+ return {
+ 'type': 'Integer',
+ 'signed': handle_signedness(cont['signedness']),
+ 'constraints': [handle_constrain(i) for i in cont['constraints']],
+ }
+ elif t == 'ContentQuantity':
+ return {
+ 'type': 'Quantity',
+ 'constraints': [handle_constrain(i) for i in cont['constraints']],
+ 'lsb': handle_number(cont['lsb']),
+ 'signed': handle_signedness(cont['signedness']),
+ 'unit': cont['unit'],
+ }
+ elif t == 'ContentBds':
+ def f(obj):
+ t, cont = split(obj)
+ if t == 'BdsWithAddress':
+ return {
+ 'type': 'BdsWithAddress',
+ }
+ elif t == 'BdsAt':
+ return {
+ 'type': 'BdsAt',
+ 'address': hex(cont)[2:] if cont is not None else None,
+ }
+ else:
+ raise Exception('unexpected', t)
+ return {
+ 'type': 'Bds',
+ 'variation': f(cont),
+ }
+ else:
+ raise Exception('unexpected', t)
+
+def handle_rule(f, obj):
+ t, cont = split(obj)
+ if t == 'ContextFree':
+ return {
+ 'type': 'ContextFree',
+ 'value': f(cont)
+ }
+ elif t == 'Dependent':
+ def g(i):
+ a, b = i
+ return [
+ a,
+ f(b),
+ ]
+ return {
+ 'type': 'Dependent',
+ 'items': cont['path'],
+ 'default': f(cont['default']),
+ 'cases': [g(i) for i in cont['cases']],
+ }
+ else:
+ raise Exception('unexpected', t)
+
+def handle_item(obj):
+ t, cont = split(obj)
+ if t == 'Spare':
+ return {
+ 'length': cont,
+ 'spare': True,
+ }
+ elif t == 'Item':
+ return handle_nonspare(cont)
+ else:
+ raise Exception('unexpected', t)
+
+def handle_maybe(f, obj):
+ if obj is None:
+ return None
+ return f(obj)
+
+def handle_variation(obj):
+ t, cont = split(obj)
+ if t == 'Element':
+ return {
+ 'type': t,
+ 'size': cont['bitSize'],
+ 'rule': handle_rule(handle_content, cont['rule']),
+ }
+ elif t == 'Group':
+ return {
+ 'type': t,
+ 'items': [handle_item(i) for i in cont]
+ }
+ elif t == 'Extended':
+ return {
+ 'type': t,
+ 'items': [handle_maybe(handle_item, i) for i in cont],
+ }
+ elif t == 'Repetitive':
+ def f(obj):
+ t, cont = split(obj)
+ if t == 'RepetitiveRegular':
+ return {
+ 'type': 'Regular',
+ 'size': cont['byteSize']*8,
+ }
+ elif t == 'RepetitiveFx':
+ return {
+ 'type': 'Fx',
+ }
+ else:
+ raise Exception('unexpected', t)
+ return {
+ 'type': t,
+ 'rep': f(cont['type']),
+ 'variation': handle_variation(cont['variation']),
+ }
+ elif t == 'Explicit':
+ def f(obj):
+ if obj is None:
+ return None
+ t, cont = split(obj)
+ if t == 'ReservedExpansion':
+ return 'RE'
+ elif t == 'SpecialPurpose':
+ return 'SP'
+ else:
+ raise Exception('unexpected', t)
+ return {
+ 'type': t,
+ 'expl': f(cont),
+ }
+ elif t == 'Compound':
+ return {
+ 'type': t,
+ 'fspec': None,
+ 'items': [handle_maybe(handle_nonspare, i) for i in cont],
+ }
+ else:
+ raise Exception('unexpected', t)
+
+def handle_nonspare(obj):
+ doc = obj['documentation']
+ return {
+ 'definition': doc['definition'],
+ 'description': doc['description'],
+ 'name': obj['name'],
+ 'remark': doc['remark'],
+ 'rule': handle_rule(handle_variation, obj['rule']),
+ 'spare': False,
+ 'title': obj['title'],
+ }
+
+def has_rfs(obj):
+ t, cont = split(obj)
+ def check(obj):
+ t, cont = split(obj)
+ return t == 'UapItemRFS'
+ if t == 'Uap':
+ return any(check(i) for i in cont)
+ elif t == 'Uaps':
+ for (uap_name, lst) in cont['cases']:
+ if any(check(i) for i in lst):
+ return True
+ return False
+ else:
+ raise Exception('unexpected', t)
+
+def handle_asterix(obj):
+ t, cont = split(obj)
+ if t == 'AsterixBasic':
+ catalogue = [handle_nonspare(i) for i in cont['catalogue']]
+ if has_rfs(cont['uap']):
+ catalogue.append({
+ "definition": "Random Field Sequencing\n",
+ "description": None,
+ "name": "RFS",
+ "remark": None,
+ "rule": {
+ "type": "ContextFree",
+ "value": {
+ "type": "Rfs"
+ }
+ },
+ "spare": False,
+ "title": "Random Field Sequencing",
+ })
+ return {
+ 'catalogue': catalogue,
+ 'date': cont['date'],
+ 'edition': cont['edition'],
+ 'number': cont['category'],
+ 'preamble': cont['preamble'],
+ 'title': cont['title'],
+ 'type': 'Basic',
+ 'uap': handle_uap(cont['uap']),
+ }
+ elif t == 'AsterixExpansion':
+ return {
+ 'date': cont['date'],
+ 'edition': cont['edition'],
+ 'number': cont['category'],
+ 'title': cont['title'],
+ 'type': 'Expansion',
+ 'variation': {
+ 'fspec': cont['fspecByteSize']*8,
+ 'items': [handle_maybe(handle_nonspare, i) for i in cont['items']],
+ 'type': 'Compound',
+ },
+ }
+ else:
+ raise Exception('unexpected', t)
+
+def main():
+ parser = argparse.ArgumentParser(description='Convert json from new to old format.')
+ parser.add_argument('--in-place', action='store_true')
+ parser.add_argument('path')
+ args = parser.parse_args()
+
+ with open(args.path, 'r') as f:
+ s1 = f.read()
+
+ obj = handle_asterix(json.loads(s1))
+ s2 = json.dumps(obj, ensure_ascii=False, sort_keys=True, indent=4)
+
+ if args.in_place:
+ with open(args.path, 'w') as f:
+ f.write(s2)
+ else:
+ print(s2)
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/asterix/packet-asterix-template.c b/tools/asterix/packet-asterix-template.c
index e655cfd7..d584f1cf 100644
--- a/tools/asterix/packet-asterix-template.c
+++ b/tools/asterix/packet-asterix-template.c
@@ -49,22 +49,22 @@ void proto_reg_handoff_asterix(void);
#define MAX_DISSECT_STR 1024
#define MAX_BUFFER 256
-static int proto_asterix = -1;
-
-static int hf_asterix_category = -1;
-static int hf_asterix_length = -1;
-static int hf_asterix_message = -1;
-static int hf_asterix_fspec = -1;
-static int hf_re_field_len = -1;
-static int hf_spare = -1;
-static int hf_counter = -1;
-static int hf_XXX_FX = -1;
-
-static int ett_asterix = -1;
-static int ett_asterix_category = -1;
-static int ett_asterix_length = -1;
-static int ett_asterix_message = -1;
-static int ett_asterix_subtree = -1;
+static int proto_asterix;
+
+static int hf_asterix_category;
+static int hf_asterix_length;
+static int hf_asterix_message;
+static int hf_asterix_fspec;
+static int hf_re_field_len;
+static int hf_spare;
+static int hf_counter;
+static int hf_XXX_FX;
+
+static int ett_asterix;
+static int ett_asterix_category;
+static int ett_asterix_length;
+static int ett_asterix_message;
+static int ett_asterix_subtree;
static dissector_handle_t asterix_handle;
/* The following defines tell us how to decode the length of
@@ -102,22 +102,20 @@ struct FieldPart_s {
const char *format_string; /* format string for showing float values */
};
-DIAG_OFF_PEDANTIC
typedef struct AsterixField_s AsterixField;
struct AsterixField_s {
- uint8_t type; /* type of field */
- unsigned length; /* fixed length */
- unsigned repetition_counter_size; /* size of repetition counter, length of one item is in length */
- unsigned header_length; /* the size is in first header_length bytes of the field */
- int *hf; /* pointer to Wireshark hf_register_info */
- const FieldPart **part; /* Look declaration and description of FieldPart above. */
- const AsterixField *field[]; /* subfields */
+ uint8_t type; /* type of field */
+ unsigned length; /* fixed length */
+ unsigned repetition_counter_size; /* size of repetition counter, length of one item is in length */
+ unsigned header_length; /* the size is in first header_length bytes of the field */
+ int *hf; /* pointer to Wireshark hf_register_info */
+ const FieldPart * const *part; /* Look declaration and description of FieldPart above. */
+ const AsterixField * const field[]; /* subfields */
};
-DIAG_ON_PEDANTIC
static void dissect_asterix_packet (tvbuff_t *, packet_info *pinfo, proto_tree *);
static void dissect_asterix_data_block (tvbuff_t *tvb, packet_info *pinfo, unsigned, proto_tree *, uint8_t, int);
-static int dissect_asterix_fields (tvbuff_t *, packet_info *pinfo, unsigned, proto_tree *, uint8_t, const AsterixField *[]);
+static int dissect_asterix_fields (tvbuff_t *, packet_info *pinfo, unsigned, proto_tree *, uint8_t, const AsterixField * const []);
static void asterix_build_subtree (tvbuff_t *, packet_info *pinfo, unsigned, proto_tree *, const AsterixField *);
static void twos_complement (int64_t *, int);
@@ -125,8 +123,8 @@ static uint8_t asterix_bit (uint8_t, uint8_t);
static unsigned asterix_fspec_len (tvbuff_t *, unsigned);
static uint8_t asterix_field_exists (tvbuff_t *, unsigned, int);
static uint8_t asterix_get_active_uap (tvbuff_t *, unsigned, uint8_t);
-static int asterix_field_length (tvbuff_t *, unsigned, const AsterixField *);
-static int asterix_field_offset (tvbuff_t *, unsigned, const AsterixField *[], int);
+static int asterix_field_length (tvbuff_t *, unsigned, const AsterixField * const);
+static int asterix_field_offset (tvbuff_t *, unsigned, const AsterixField * const [], int);
static int asterix_message_length (tvbuff_t *, unsigned, uint8_t, uint8_t);
static const char AISCode[] = { ' ', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O',
@@ -149,7 +147,6 @@ static const FieldPart IXXX_6bit_spare = { 6, 1.0, FIELD_PART_UINT, NULL, NULL }
static const FieldPart IXXX_7bit_spare = { 7, 1.0, FIELD_PART_UINT, NULL, NULL };
/* Spare Item */
-DIAG_OFF_PEDANTIC
static const AsterixField IX_SPARE = { FIXED, 0, 0, 0, &hf_spare, NULL, { NULL } };
/* insert1 */
@@ -469,8 +466,8 @@ static void dissect_asterix_packet (tvbuff_t *tvb, packet_info *pinfo, proto_tre
* The User Application Profile (UAP) is simply a mapping from the
* FSPEC to fields. Each category has its own UAP.
*/
- category = tvb_get_guint8 (tvb, i);
- length = (tvb_get_guint8 (tvb, i + 1) << 8) + tvb_get_guint8 (tvb, i + 2) - 3; /* -3 for category and length */
+ category = tvb_get_uint8 (tvb, i);
+ length = (tvb_get_uint8 (tvb, i + 1) << 8) + tvb_get_uint8 (tvb, i + 2) - 3; /* -3 for category and length */
asterix_packet_item = proto_tree_add_item (tree, proto_asterix, tvb, i, length + 3, ENC_NA);
proto_item_append_text (asterix_packet_item, ", Category %03d", category);
@@ -513,7 +510,9 @@ static void dissect_asterix_data_block (tvbuff_t *tvb, packet_info *pinfo, unsig
}
}
-static int dissect_asterix_fields (tvbuff_t *tvb, packet_info *pinfo, unsigned offset, proto_tree *tree, uint8_t category, const AsterixField *current_uap[])
+// We're transported over UDP and our offset always advances.
+// NOLINTNEXTLINE(misc-no-recursion)
+static int dissect_asterix_fields (tvbuff_t *tvb, packet_info *pinfo, unsigned offset, proto_tree *tree, uint8_t category, const AsterixField * const current_uap [])
{
unsigned i, j, size, start, len, inner_offset, fspec_len;
uint64_t counter;
@@ -536,13 +535,13 @@ static int dissect_asterix_fields (tvbuff_t *tvb, packet_info *pinfo, unsigned o
asterix_field_tree = proto_item_add_subtree (asterix_field_item, ett_asterix_subtree);
fspec_len = asterix_fspec_len (tvb, offset + start);
proto_tree_add_item (asterix_field_tree, hf_asterix_fspec, tvb, offset + start, fspec_len, ENC_NA);
- dissect_asterix_fields (tvb, pinfo, offset + start, asterix_field_tree, category, (const AsterixField **)current_uap[i]->field);
+ dissect_asterix_fields (tvb, pinfo, offset + start, asterix_field_tree, category, current_uap[i]->field);
break;
case REPETITIVE:
asterix_field_item = proto_tree_add_item (tree, *current_uap[i]->hf, tvb, offset + start, len, ENC_NA);
asterix_field_tree = proto_item_add_subtree (asterix_field_item, ett_asterix_subtree);
for (j = 0, counter = 0; j < current_uap[i]->repetition_counter_size; j++) {
- counter = (counter << 8) + tvb_get_guint8 (tvb, offset + start + j);
+ counter = (counter << 8) + tvb_get_uint8 (tvb, offset + start + j);
}
proto_tree_add_item (asterix_field_tree, hf_counter, tvb, offset + start, current_uap[i]->repetition_counter_size, ENC_BIG_ENDIAN);
for (j = 0, inner_offset = 0; j < counter; j++, inner_offset += current_uap[i]->length) {
@@ -559,7 +558,7 @@ static int dissect_asterix_fields (tvbuff_t *tvb, packet_info *pinfo, unsigned o
start++;
fspec_len = asterix_fspec_len (tvb, offset + start);
proto_tree_add_item (asterix_field_tree, hf_asterix_fspec, tvb, offset + start, fspec_len, ENC_NA);
- dissect_asterix_fields (tvb, pinfo, offset + start, asterix_field_tree, category, (const AsterixField **)current_uap[i]->field);
+ dissect_asterix_fields (tvb, pinfo, offset + start, asterix_field_tree, category, current_uap[i]->field);
break;*/
default: /* FIXED, FX, FX_1, FX_UAP */
asterix_field_item = proto_tree_add_item (tree, *current_uap[i]->hf, tvb, offset + start, len, ENC_NA);
@@ -650,7 +649,7 @@ static void asterix_build_subtree (tvbuff_t *tvb, packet_info *pinfo, unsigned o
case FIELD_PART_IAS_IM:
/* special processing for I021/150 and I062/380#4 because Air Speed depends on IM subfield */
air_speed_im_bit = wmem_new (pinfo->pool, uint8_t);
- *air_speed_im_bit = (tvb_get_guint8 (tvb, offset_in_tvb) & 0x80) >> 7;
+ *air_speed_im_bit = (tvb_get_uint8 (tvb, offset_in_tvb) & 0x80) >> 7;
/* Save IM info for the packet. key = 21150. */
p_add_proto_data (pinfo->pool, pinfo, proto_asterix, 21150, air_speed_im_bit);
proto_tree_add_item (parent, *field->part[i]->hf, tvb, offset_in_tvb, length_in_tvb, ENC_BIG_ENDIAN);
@@ -683,8 +682,8 @@ static uint8_t asterix_bit (uint8_t b, uint8_t bitNo)
* If the number is positive, all other bits must remain 0. */
static void twos_complement (int64_t *v, int bit_len)
{
- if (*v & (G_GUINT64_CONSTANT(1) << (bit_len - 1))) {
- *v |= (G_GUINT64_CONSTANT(0xffffffffffffffff) << bit_len);
+ if (*v & (UINT64_C(1) << (bit_len - 1))) {
+ *v |= (UINT64_C(0xffffffffffffffff) << bit_len);
}
}
@@ -692,7 +691,7 @@ static unsigned asterix_fspec_len (tvbuff_t *tvb, unsigned offset)
{
unsigned i;
unsigned max_length = tvb_reported_length (tvb) - offset;
- for (i = 0; (tvb_get_guint8 (tvb, offset + i) & 1) && i < max_length; i++);
+ for (i = 0; (tvb_get_uint8 (tvb, offset + i) & 1) && i < max_length; i++);
return i + 1;
}
@@ -701,16 +700,20 @@ static uint8_t asterix_field_exists (tvbuff_t *tvb, unsigned offset, int bitInde
uint8_t bitNo, i;
bitNo = bitIndex + bitIndex / 7;
for (i = 0; i < bitNo / 8; i++) {
- if (!(tvb_get_guint8 (tvb, offset + i) & 1)) return 0;
+ if (!(tvb_get_uint8 (tvb, offset + i) & 1)) return 0;
}
- return asterix_bit (tvb_get_guint8 (tvb, offset + i), bitNo % 8);
+ return asterix_bit (tvb_get_uint8 (tvb, offset + i), bitNo % 8);
}
-static int asterix_field_length (tvbuff_t *tvb, unsigned offset, const AsterixField *field)
+// We're transported over UDP and our offset always advances.
+// NOLINTNEXTLINE(misc-no-recursion)
+static int asterix_field_length (tvbuff_t *tvb, unsigned offset, const AsterixField * const field)
{
+ unsigned bit_size;
unsigned size;
uint64_t count;
uint8_t i;
+ bool should_break;
size = 0;
switch(field->type) {
@@ -719,20 +722,26 @@ static int asterix_field_length (tvbuff_t *tvb, unsigned offset, const AsterixFi
break;
case REPETITIVE:
for (i = 0, count = 0; i < field->repetition_counter_size && i < sizeof (count); i++)
- count = (count << 8) + tvb_get_guint8 (tvb, offset + i);
+ count = (count << 8) + tvb_get_uint8 (tvb, offset + i);
size = (unsigned)(field->repetition_counter_size + count * field->length);
break;
case FX:
- for (size = field->length + field->header_length; tvb_get_guint8 (tvb, offset + size - 1) & 1; size += field->length);
+ for (i = 0, bit_size = 0; field->part[i] != NULL; i++) {
+ // We don't need to shift value as FX bits are always at the end
+ should_break = field->part[i]->type == FIELD_PART_FX && !(tvb_get_uint8 (tvb, offset + bit_size / 8) & 1);
+ bit_size += field->part[i]->bit_length;
+ if (should_break) break;
+ }
+ size = bit_size / 8;
break;
case EXP:
for (i = 0, size = 0; i < field->header_length; i++) {
- size = (size << 8) + tvb_get_guint8 (tvb, offset + i);
+ size = (size << 8) + tvb_get_uint8 (tvb, offset + i);
}
break;
case COMPOUND:
/* FSPEC */
- for (size = 0; tvb_get_guint8 (tvb, offset + size) & 1; size++);
+ for (size = 0; tvb_get_uint8 (tvb, offset + size) & 1; size++);
size++;
for (i = 0; field->field[i] != NULL; i++) {
@@ -748,17 +757,17 @@ static int asterix_field_length (tvbuff_t *tvb, unsigned offset, const AsterixFi
static uint8_t asterix_get_active_uap (tvbuff_t *tvb, unsigned offset, uint8_t category)
{
int i, inner_offset;
- AsterixField **current_uap;
+ AsterixField const * const *current_uap;
if ((category == 1) && (categories[category] != NULL)) { /* if category is supported */
if (categories[category][global_categories_version[category]][1] != NULL) { /* if exists another uap */
- current_uap = (AsterixField **)categories[category][global_categories_version[category]][0];
+ current_uap = categories[category][global_categories_version[category]][0];
if (current_uap != NULL) {
inner_offset = asterix_fspec_len (tvb, offset);
for (i = 0; current_uap[i] != NULL; i++) {
if (asterix_field_exists (tvb, offset, i)) {
if (i == 1) { /* uap selector (I001/020) is always at index '1' */
- return tvb_get_guint8 (tvb, offset + inner_offset) >> 7;
+ return tvb_get_uint8 (tvb, offset + inner_offset) >> 7;
}
inner_offset += asterix_field_length (tvb, offset + inner_offset, current_uap[i]);
}
@@ -769,7 +778,7 @@ static uint8_t asterix_get_active_uap (tvbuff_t *tvb, unsigned offset, uint8_t c
return 0;
}
-static int asterix_field_offset (tvbuff_t *tvb, unsigned offset, const AsterixField *current_uap[], int field_index)
+static int asterix_field_offset (tvbuff_t *tvb, unsigned offset, const AsterixField * const current_uap[], int field_index)
{
int i, inner_offset;
inner_offset = 0;
@@ -786,10 +795,10 @@ static int asterix_field_offset (tvbuff_t *tvb, unsigned offset, const AsterixFi
static int asterix_message_length (tvbuff_t *tvb, unsigned offset, uint8_t category, uint8_t active_uap)
{
int i, size;
- AsterixField **current_uap;
+ AsterixField const * const *current_uap;
if (categories[category] != NULL) { /* if category is supported */
- current_uap = (AsterixField **)categories[category][global_categories_version[category]][active_uap];
+ current_uap = categories[category][global_categories_version[category]][active_uap];
if (current_uap != NULL) {
size = asterix_fspec_len (tvb, offset);
for (i = 0; current_uap[i] != NULL; i++) {
diff --git a/tools/asterix/update-specs.py b/tools/asterix/update-specs.py
index 7af735dc..03850c50 100755
--- a/tools/asterix/update-specs.py
+++ b/tools/asterix/update-specs.py
@@ -20,6 +20,8 @@ import os
import sys
import re
+import convertspec as convert
+
# Path to default upstream repository
upstream_repo = 'https://zoranbosnjak.github.io/asterix-specs'
dissector_file = 'epan/dissectors/packet-asterix.c'
@@ -68,42 +70,15 @@ class Context(object):
self.offset = Offset()
def get_number(value):
- """Get Natural/Real/Rational number as an object."""
- class Integer(object):
- def __init__(self, val):
- self.val = val
- def __str__(self):
- return '{}'.format(self.val)
- def __float__(self):
- return float(self.val)
-
- class Ratio(object):
- def __init__(self, a, b):
- self.a = a
- self.b = b
- def __str__(self):
- return '{}/{}'.format(self.a, self.b)
- def __float__(self):
- return float(self.a) / float(self.b)
-
- class Real(object):
- def __init__(self, val):
- self.val = val
- def __str__(self):
- return '{0:f}'.format(self.val).rstrip('0')
- def __float__(self):
- return float(self.val)
-
t = value['type']
- val = value['value']
-
if t == 'Integer':
- return Integer(int(val))
- if t == 'Ratio':
- x, y = val['numerator'], val['denominator']
- return Ratio(x, y)
- if t == 'Real':
- return Real(float(val))
+ return float(value['value'])
+ if t == 'Div':
+ a = get_number(value['numerator'])
+ b = get_number(value['denominator'])
+ return a/b
+ if t == 'Pow':
+ return float(pow(value['base'], value['exponent']))
raise Exception('unexpected value type {}'.format(t))
def replace_string(s, mapping):
@@ -132,19 +107,10 @@ def safe_string(s):
def get_scaling(content):
"""Get scaling factor from the content."""
- k = content.get('scaling')
- if k is None:
+ lsb = content.get('lsb')
+ if lsb is None:
return None
- k = get_number(k)
-
- fract = content['fractionalBits']
-
- if fract > 0:
- scale = format(float(k) / (pow(2, fract)), '.29f')
- scale = scale.rstrip('0')
- else:
- scale = format(float(k))
- return scale
+ return '{}'.format(get_number(lsb))
def get_fieldpart(content):
"""Get FIELD_PART* from the content."""
@@ -297,14 +263,12 @@ def reference(cat, edition, path):
return('{:03d}_{}'.format(cat, name))
return('{:03d}_V{}_{}_{}'.format(cat, edition['major'], edition['minor'], name))
-def get_content(rule):
+def get_rule(rule):
t = rule['type']
- # Most cases are 'ContextFree', use as specified.
if t == 'ContextFree':
- return rule['content']
- # Handle 'Dependent' contents as 'Raw'.
+ return rule['value']
elif t == 'Dependent':
- return {'type': "Raw"}
+ return rule['default']
else:
raise Exception('unexpected type: {}'.format(t))
@@ -313,7 +277,7 @@ def get_bit_size(item):
if item['spare']:
return item['length']
else:
- return item['variation']['size']
+ return get_rule(item['rule'])['size']
def get_description(item, content=None):
"""Return item description."""
@@ -336,12 +300,18 @@ def generate_group(item, variation=None):
level2['is_generated'] = True
if variation is None:
level1 = copy(item)
- level1['variation'] = {
- 'type': 'Group',
- 'items': [level2],
+ level1['rule'] = {
+ 'type': 'ContextFree',
+ 'value': {
+ 'type': 'Group',
+ 'items': [level2],
+ },
}
else:
- level2['variation'] = variation['variation']
+ level2['rule'] = {
+ 'type': 'ContextFree',
+ 'value': variation,
+ }
level1 = {
'type': "Group",
'items': [level2],
@@ -353,15 +323,18 @@ def is_generated(item):
def ungroup(item):
"""Convert group of items of known size to element"""
- n = sum([get_bit_size(i) for i in item['variation']['items']])
+ n = sum([get_bit_size(i) for i in get_rule(item['rule'])['items']])
result = copy(item)
- result['variation'] = {
- 'rule': {
- 'content': {'type': 'Raw'},
- 'type': 'ContextFree',
+ result['rule'] = {
+ 'type': 'ContextFree',
+ 'value': {
+ 'type': 'Element',
+ 'size': n,
+ 'rule': {
+ 'type': 'ContextFree',
+ 'value': {'type': 'Raw'},
+ },
},
- 'size': n,
- 'type': 'Element',
}
return result
@@ -397,9 +370,9 @@ def part1(ctx, get_ref, catalogue):
return '&I{}_{}'.format(ref, item['name'])
if t == 'Element':
- tell('static int hf_{} = -1;'.format(ref))
+ tell('static int hf_{};'.format(ref))
n = variation['size']
- content = get_content(variation['rule'])
+ content = get_rule(variation['rule'])
scaling = get_scaling(content)
scaling = scaling if scaling is not None else 1.0
fp = get_fieldpart(content)
@@ -425,12 +398,12 @@ def part1(ctx, get_ref, catalogue):
description = get_description(item)
tell_pr(' {} &hf_{}, {} "{}", "asterix.{}", FT_NONE, BASE_NONE, NULL, 0x00, NULL, HFILL {} {},'.format('{', ref, '{', description, ref, '}', '}'))
- tell('static int hf_{} = -1;'.format(ref))
+ tell('static int hf_{};'.format(ref))
for i in variation['items']:
handle_item(path, i)
# FieldPart[]
- tell('static const FieldPart *I{}_PARTS[] = {}'.format(ref,'{'))
+ tell('static const FieldPart * const I{}_PARTS[] = {}'.format(ref,'{'))
for i in variation['items']:
tell(' {},'.format(part_of(i)))
tell(' NULL')
@@ -450,15 +423,15 @@ def part1(ctx, get_ref, catalogue):
description = get_description(item)
tell_pr(' {} &hf_{}, {} "{}", "asterix.{}", FT_NONE, BASE_NONE, NULL, 0x00, NULL, HFILL {} {},'.format('{', ref, '{', description, ref, '}', '}'))
- tell('static int hf_{} = -1;'.format(ref))
+ tell('static int hf_{};'.format(ref))
items = []
for i in variation['items']:
if i is None:
items.append(i)
continue
- if i.get('variation') is not None:
- if i['variation']['type'] == 'Group':
+ if i.get('rule') is not None:
+ if get_rule(i['rule'])['type'] == 'Group':
i = ungroup(i)
items.append(i)
@@ -468,7 +441,7 @@ def part1(ctx, get_ref, catalogue):
else:
handle_item(path, i)
- tell('static const FieldPart *I{}_PARTS[] = {}'.format(ref,'{'))
+ tell('static const FieldPart * const I{}_PARTS[] = {}'.format(ref,'{'))
for i in items:
if i is None:
tell(' &IXXX_FX,')
@@ -479,12 +452,10 @@ def part1(ctx, get_ref, catalogue):
tell('};')
# AsterixField
- first_part = list(takewhile(lambda x: x is not None, items))
- n = (sum([get_bit_size(i) for i in first_part]) + 1) // 8
parts = 'I{}_PARTS'.format(ref)
comp = '{ NULL }'
- tell('static const AsterixField I{} = {} FX, {}, 0, {}, &hf_{}, {}, {} {};'.format
- (ref, '{', n, 0, ref, parts, comp, '}'))
+ tell('static const AsterixField I{} = {} FX, 0, 0, 0, &hf_{}, {}, {} {};'.format
+ (ref, '{', ref, parts, comp, '}'))
elif t == 'Repetitive':
ctx.reset_offset()
@@ -492,7 +463,7 @@ def part1(ctx, get_ref, catalogue):
# Group is required below this item.
if variation['variation']['type'] == 'Element':
- subvar = generate_group(item, variation)
+ subvar = generate_group(item, variation['variation'])
else:
subvar = variation['variation']
handle_variation(path, subvar)
@@ -509,14 +480,14 @@ def part1(ctx, get_ref, catalogue):
elif t == 'Explicit':
ctx.reset_offset()
- tell('static int hf_{} = -1;'.format(ref))
+ tell('static int hf_{};'.format(ref))
description = get_description(item)
tell_pr(' {} &hf_{}, {} "{}", "asterix.{}", FT_NONE, BASE_NONE, NULL, 0x00, NULL, HFILL {} {},'.format('{', ref, '{', description, ref, '}', '}'))
tell('static const AsterixField I{} = {} EXP, 0, 0, 1, &hf_{}, NULL, {} NULL {} {};'.format(ref, '{', ref, '{', '}', '}'))
elif t == 'Compound':
ctx.reset_offset()
- tell('static int hf_{} = -1;'.format(ref))
+ tell('static int hf_{};'.format(ref))
description = get_description(item)
tell_pr(' {} &hf_{}, {} "{}", "asterix.{}", FT_NONE, BASE_NONE, NULL, 0x00, NULL, HFILL {} {},'.format('{', ref, '{', description, ref, '}', '}'))
comp = '{'
@@ -525,7 +496,7 @@ def part1(ctx, get_ref, catalogue):
comp += ' &IX_SPARE,'
continue
# Group is required below this item.
- if i['variation']['type'] == 'Element':
+ if get_rule(i['rule'])['type'] == 'Element':
subitem = generate_group(i)
else:
subitem = i
@@ -545,30 +516,36 @@ def part1(ctx, get_ref, catalogue):
return
# Group is required on the first level.
- if path == [] and item['variation']['type'] == 'Element':
- variation = generate_group(item)['variation']
+ if path == [] and get_rule(item['rule'])['type'] == 'Element':
+ variation = get_rule(generate_group(item)['rule'])
else:
- variation = item['variation']
+ variation = get_rule(item['rule'])
handle_variation(path + [item['name']], variation)
for item in catalogue:
# adjust 'repetitive fx' item
- if item['variation']['type'] == 'Repetitive' and item['variation']['rep']['type'] == 'Fx':
- var = item['variation']['variation'].copy()
+ if get_rule(item['rule'])['type'] == 'Repetitive' and get_rule(item['rule'])['rep']['type'] == 'Fx':
+ var = get_rule(item['rule'])['variation'].copy()
if var['type'] != 'Element':
raise Exception("Expecting 'Element'")
item = item.copy()
- item['variation'] = {
- 'type': 'Extended',
- 'items': [{
- 'definition': None,
- 'description': None,
- 'name': 'Subitem',
- 'remark': None,
- 'spare': False,
- 'title': 'Subitem',
- 'variation': var,
+ item['rule'] = {
+ 'type': 'ContextFree',
+ 'value': {
+ 'type': 'Extended',
+ 'items': [{
+ 'definition': None,
+ 'description': None,
+ 'name': 'Subitem',
+ 'remark': None,
+ 'spare': False,
+ 'title': 'Subitem',
+ 'rule': {
+ 'type': 'ContextFree',
+ 'value': var,
+ },
}, None]
+ }
}
handle_item([], item)
tell('')
@@ -577,7 +554,6 @@ def part2(ctx, ref, uap):
"""Generate UAPs"""
tell = lambda s: ctx.tell('insert1', s)
- tell('DIAG_OFF_PEDANTIC')
ut = uap['type']
if ut == 'uap':
@@ -588,7 +564,7 @@ def part2(ctx, ref, uap):
raise Exception('unexpected uap type {}'.format(ut))
for var in variations:
- tell('static const AsterixField *I{}_{}[] = {}'.format(ref, var['name'], '{'))
+ tell('static const AsterixField * const I{}_{}[] = {}'.format(ref, var['name'], '{'))
for i in var['items']:
if i is None:
tell(' &IX_SPARE,')
@@ -597,12 +573,11 @@ def part2(ctx, ref, uap):
tell(' NULL')
tell('};')
- tell('static const AsterixField **I{}[] = {}'.format(ref, '{'))
+ tell('static const AsterixField * const * const I{}[] = {}'.format(ref, '{'))
for var in variations:
tell(' I{}_{},'.format(ref, var['name']))
tell(' NULL')
tell('};')
- tell('DIAG_ON_PEDANTIC')
tell('')
def part3(ctx, specs):
@@ -620,9 +595,7 @@ def part3(ctx, specs):
editions = sorted([val['edition'] for val in lst], key = lambda x: (x['major'], x['minor']), reverse=True)
editions_fmt = [fmt_edition(cat, edition) for edition in editions]
editions_str = ', '.join(['I{:03d}'.format(cat)] + editions_fmt)
- tell('DIAG_OFF_PEDANTIC')
- tell('static const AsterixField ***I{:03d}all[] = {} {} {};'.format(cat, '{', editions_str, '}'))
- tell('DIAG_ON_PEDANTIC')
+ tell('static const AsterixField * const * const * const I{:03d}all[] = {} {} {};'.format(cat, '{', editions_str, '}'))
tell('')
tell('static const enum_val_t I{:03d}_versions[] = {}'.format(cat, '{'))
@@ -646,7 +619,7 @@ def part4(ctx, cats):
tell = lambda s: ctx.tell('insert1', s)
tell_pr = lambda s: ctx.tell('insert3', s)
- tell('static const AsterixField ****categories[] = {')
+ tell('static const AsterixField * const * const * const * const categories[] = {')
for i in range(0, 256):
val = 'I{:03d}all'.format(i) if i in cats else 'NULL'
tell(' {}, /* {:03d} */'.format(val, i))
@@ -683,7 +656,7 @@ def remove_rfs(spec):
catalogue = [] # create new catalogue without RFS
rfs_items = []
for i in spec['catalogue']:
- if i['variation']['type'] == 'Rfs':
+ if get_rule(i['rule'])['type'] == 'Rfs':
rfs_items.append(i['name'])
else:
catalogue.append(i)
@@ -716,7 +689,7 @@ def is_valid(spec):
def check_item(item):
if item['spare']:
return True
- return check_variation(item['variation'])
+ return check_variation(get_rule(item['rule']))
def check_variation(variation):
t = variation['type']
if t == 'Element':
@@ -757,6 +730,7 @@ def main():
# read and json-decode input files
jsons = load_jsons(args.paths)
jsons = [json.loads(i) for i in jsons]
+ jsons = [convert.handle_asterix(i) for i in jsons]
jsons = sorted(jsons, key = lambda x: (x['number'], x['edition']['major'], x['edition']['minor']))
jsons = [spec for spec in jsons if spec['type'] == 'Basic']
jsons = [remove_rfs(spec) for spec in jsons]
@@ -780,13 +754,15 @@ def main():
for spec in jsons:
is_latest = spec['edition'] == latest_editions[spec['number']]
- ctx.tell('insert1', '/* Category {:03d}, edition {}.{} */'.format(spec['number'], spec['edition']['major'], spec['edition']['minor']))
+ ctx.tell('insert1', '/* Category {:03d}, edition {}.{} */'.format(
+ spec['number'], spec['edition']['major'], spec['edition']['minor']))
# handle part1
get_ref = lambda path: reference(spec['number'], spec['edition'], path)
part1(ctx, get_ref, spec['catalogue'])
if is_latest:
- ctx.tell('insert1', '/* Category {:03d}, edition {}.{} (latest) */'.format(spec['number'], spec['edition']['major'], spec['edition']['minor']))
+ ctx.tell('insert1', '/* Category {:03d}, edition {}.{} (latest) */'.format(
+ spec['number'], spec['edition']['major'], spec['edition']['minor']))
get_ref = lambda path: reference(spec['number'], None, path)
part1(ctx, get_ref, spec['catalogue'])
@@ -826,4 +802,3 @@ def main():
if __name__ == '__main__':
main()
-
diff --git a/tools/bsd-setup.sh b/tools/bsd-setup.sh
index 6b018c69..2a48b28d 100755
--- a/tools/bsd-setup.sh
+++ b/tools/bsd-setup.sh
@@ -21,7 +21,7 @@
print_usage() {
printf "\\nUtility to setup a bsd-based system for Wireshark Development.\\n"
printf "The basic usage installs the needed software\\n\\n"
- printf "Usage: $0 [--install-optional] [...other options...]\\n"
+ printf "Usage: %s [--install-optional] [...other options...]\\n" "$0"
printf "\\t--install-optional: install optional software as well\\n"
printf "\\t[other]: other options are passed as-is to pkg manager.\\n"
}
@@ -44,7 +44,7 @@ for arg; do
done
# Check if the user is root
-if [ $(id -u) -ne 0 ]
+if [ "$(id -u)" -ne 0 ]
then
echo "You must be root."
exit 1
@@ -66,7 +66,6 @@ ADDITIONAL_LIST="\
libsmi \
brotli \
zstd \
- lua52 \
"
# Uncomment to add PNG compression utilities used by compress-pngs:
@@ -76,7 +75,7 @@ ADDITIONAL_LIST="\
# pngcrush"
# Guess which package manager we will use
-PM=`which pkgin 2> /dev/null || which pkg 2> /dev/null || which pkg_add 2> /dev/null`
+PM=$( which pkgin 2> /dev/null || which pkg 2> /dev/null || which pkg_add 2> /dev/null )
case $PM in
*/pkgin)
@@ -101,6 +100,7 @@ echo "Using $PM ($PM_SEARCH)"
# Adds package $2 to list variable $1 if the package is found
add_package() {
+ # shellcheck disable=SC3043
local list="$1" pkgname="$2"
# fail if the package is not known
@@ -174,9 +174,17 @@ echo "ninja is unavailable"
add_package ADDITIONAL_LIST libilbc ||
echo "libilbc is unavailable"
+# lua: OpenBSD latest (current 5.4)
+# lua54: FreeBSD, NetBSD 5.4.x
+# lua53 is also acceptable
+add_package ADDITIONAL_LIST lua ||
+add_package ADDITIONAL_LIST lua54 ||
+add_package ADDITIONAL_LIST lua53 ||
+echo "lua >= 5.3 is unavailable"
+
# Add OS-specific required/optional packages
# Those not listed don't require additions.
-case `uname` in
+case $( uname ) in
FreeBSD | NetBSD)
add_package ADDITIONAL_LIST libgcrypt || echo "libgcrypt is unavailable"
;;
@@ -190,6 +198,7 @@ then
ACTUAL_LIST="$ACTUAL_LIST $ADDITIONAL_LIST"
fi
+# shellcheck disable=SC2086
$PM $PM_OPTIONS $ACTUAL_LIST $OPTIONS
if [ ! $? ]
then
@@ -198,5 +207,5 @@ fi
if [ $ADDITIONAL -eq 0 ]
then
- echo -e "\n*** Optional packages not installed. Rerun with --install-optional to have them.\n"
+ printf "\\n*** Optional packages not installed. Rerun with --install-optional to have them.\\n"
fi
diff --git a/tools/checkAPIs.pl b/tools/checkAPIs.pl
index c9570b58..855718f0 100755
--- a/tools/checkAPIs.pl
+++ b/tools/checkAPIs.pl
@@ -454,14 +454,14 @@ sub check_value_string_arrays($$$)
my $expectedTrailer;
my $trailerHint;
if ($type eq "string_string") {
- # XXX shouldn't we reject 0 since it is gchar*?
+ # XXX shouldn't we reject 0 since it is char *?
$expectedTrailer = "(NULL|0), NULL";
$trailerHint = "NULL, NULL";
} elsif ($type eq "range_string") {
$expectedTrailer = "0(x0+)?, 0(x0+)?, NULL";
$trailerHint = "0, 0, NULL";
} elsif ($type eq "bytes_string") {
- # XXX shouldn't we reject 0 since it is guint8*?
+ # XXX shouldn't we reject 0 since it is uint8_t *?
$expectedTrailer = "(NULL|0), 0, NULL";
$trailerHint = "NULL, NULL";
} else {
@@ -797,6 +797,10 @@ sub check_hf_entries($$)
print STDERR "Error: $hf: BASE_EXT_STRING should use VALS_EXT_PTR for 'strings' instead of '$convert' in $filename\n";
$errorCount++;
}
+ if ($display =~ /BASE_UNIT_STRING/ && ($convert !~ m/^((0[xX]0?)?0$|NULL$|UNS)/)) {
+ print STDERR "Error: $hf: BASE_UNIT_STRING with non-null 'convert' field missing UNS in $filename\n";
+ $errorCount++;
+ }
if ($ft =~ m/^FT_U?INT(8|16|24|32)$/ && $convert =~ m/^VALS64\(/) {
print STDERR "Error: $hf: 32-bit field must use VALS instead of VALS64 in $filename\n";
$errorCount++;
@@ -809,8 +813,8 @@ sub check_hf_entries($$)
print STDERR "Error: $hf is passing the address of a pointer to $1 in $filename\n";
$errorCount++;
}
- if ($convert !~ m/^((0[xX]0?)?0$|NULL$|VALS|VALS64|VALS_EXT_PTR|RVALS|TFS|CF_FUNC|FRAMENUM_TYPE|&|STRINGS_ENTERPRISES)/ && $display !~ /BASE_CUSTOM/) {
- print STDERR "Error: non-null $hf 'convert' field missing 'VALS|VALS64|RVALS|TFS|CF_FUNC|FRAMENUM_TYPE|&|STRINGS_ENTERPRISES' in $filename ?\n";
+ if ($convert !~ m/^((0[xX]0?)?0$|NULL$|VALS|VALS64|VALS_EXT_PTR|RVALS|TFS|UNS|CF_FUNC|FRAMENUM_TYPE|&|STRINGS_ENTERPRISES)/ && $display !~ /BASE_CUSTOM/) {
+ print STDERR "Error: non-null $hf 'convert' field missing 'VALS|VALS64|RVALS|TFS|UNS|CF_FUNC|FRAMENUM_TYPE|&|STRINGS_ENTERPRISES' in $filename ?\n";
$errorCount++;
}
## Benign...
diff --git a/tools/check_col_apis.py b/tools/check_col_apis.py
new file mode 100755
index 00000000..eb8e1850
--- /dev/null
+++ b/tools/check_col_apis.py
@@ -0,0 +1,310 @@
+#!/usr/bin/env python3
+# Wireshark - Network traffic analyzer
+# By Gerald Combs <gerald@wireshark.org>
+# Copyright 1998 Gerald Combs
+#
+# SPDX-License-Identifier: GPL-2.0-or-later
+
+# Scan dissectors for calls to col_[set|add|append]_[f]str
+# to check that most appropriate API is being used
+
+import os
+import re
+import subprocess
+import argparse
+import signal
+
+
+# Try to exit soon after Ctrl-C is pressed.
+should_exit = False
+
+def signal_handler(sig, frame):
+ global should_exit
+ should_exit = True
+ print('You pressed Ctrl+C - exiting')
+
+signal.signal(signal.SIGINT, signal_handler)
+
+
+# Test for whether the given file was automatically generated.
+def isGeneratedFile(filename):
+ # Check file exists - e.g. may have been deleted in a recent commit.
+ if not os.path.exists(filename):
+ return False
+
+ # Open file
+ f_read = open(os.path.join(filename), 'r', encoding="utf8")
+ lines_tested = 0
+ for line in f_read:
+ # The comment to say that its generated is near the top, so give up once
+ # get a few lines down.
+ if lines_tested > 10:
+ f_read.close()
+ return False
+ if (line.find('Generated automatically') != -1 or
+ line.find('Generated Automatically') != -1 or
+ line.find('Autogenerated from') != -1 or
+ line.find('is autogenerated') != -1 or
+ line.find('automatically generated by Pidl') != -1 or
+ line.find('Created by: The Qt Meta Object Compiler') != -1 or
+ line.find('This file was generated') != -1 or
+ line.find('This filter was automatically generated') != -1 or
+ line.find('This file is auto generated, do not edit!') != -1 or
+ line.find('This file is auto generated') != -1):
+
+ f_read.close()
+ return True
+ lines_tested = lines_tested + 1
+
+ # OK, looks like a hand-written file!
+ f_read.close()
+ return False
+
+
+def removeComments(code_string):
+ code_string = re.sub(re.compile(r"/\*.*?\*/",re.DOTALL ) ,"" ,code_string) # C-style comment
+ code_string = re.sub(re.compile(r"//.*?\n" ) ,"" ,code_string) # C++-style comment
+ return code_string
+
+
+def is_dissector_file(filename):
+ p = re.compile(r'.*(packet|file)-.*\.c')
+ return p.match(filename)
+
+def findDissectorFilesInFolder(folder, recursive=False):
+ dissector_files = []
+
+ if recursive:
+ for root, subfolders, files in os.walk(folder):
+ for f in files:
+ if should_exit:
+ return
+ f = os.path.join(root, f)
+ dissector_files.append(f)
+ else:
+ for f in sorted(os.listdir(folder)):
+ if should_exit:
+ return
+ filename = os.path.join(folder, f)
+ dissector_files.append(filename)
+
+ return [x for x in filter(is_dissector_file, dissector_files)]
+
+
+
+warnings_found = 0
+errors_found = 0
+
+class ColCall:
+ def __init__(self, file, line_number, name, last_args, generated, verbose):
+ self.filename = file
+ self.line_number = line_number
+ self.name = name
+ self.last_args = last_args
+ self.generated = generated
+ self.verbose = verbose
+
+ def issue_prefix(self):
+ generated = '(GENERATED) ' if self.generated else ''
+ return self.filename + ':' + generated + str(self.line_number) + ' : called ' + self.name + ' with ' + self.last_args
+
+ def check(self):
+ global warnings_found
+
+ self.last_args = self.last_args.replace('\\\"', "'")
+ self.last_args = self.last_args.strip()
+
+ # Empty string never a good idea
+ if self.last_args == r'""':
+ if self.name.find('append') == -1:
+ print('Warning:', self.issue_prefix(), '- if want to clear column, use col_clear() instead')
+ warnings_found += 1
+ else:
+ # TODO: pointless if appending, but unlikely to see
+ pass
+
+ # This is never a good idea..
+ if self.last_args.startswith(r'"%s"'):
+ print('Warning:', self.issue_prefix(), " - don't need fstr API?")
+ warnings_found += 1
+
+ # String should be static, or at least persist
+ if self.name == 'col_set_str':
+ # Literal strings are safe, as well as some other patterns..
+ if self.last_args.startswith('"'):
+ return
+ elif self.last_args.startswith('val_to_str_const') or self.last_args.startswith('val_to_str_ext_const'):
+ return
+ # TODO: substitute macros to avoid some special cases..
+ elif self.last_args.startswith('PSNAME') or self.last_args.startswith('PNAME') or self.last_args.startswith('PROTO_SHORT_NAME'):
+ return
+ # TODO; match ternary test with both outcomes being literal strings?
+ else:
+ if self.verbose:
+ # Not easy/possible to judge lifetime of string..
+ print('Note:', self.issue_prefix(), '- is this OK??')
+
+ if self.name == 'col_add_str':
+ # If literal string, could have used col_set_str instead?
+ self.last_args = self.last_args.replace('\\\"', "'")
+ self.last_args = self.last_args.strip()
+ if self.last_args.startswith('"'):
+ print('Warning:', self.issue_prefix(), '- could call col_set_str() instead')
+ warnings_found += 1
+ elif self.last_args.startswith('val_to_str_const'):
+ print('Warning:', self.issue_prefix(), '- const so could use col_set_str() instead')
+ warnings_found += 1
+ elif self.last_args.startswith('val_to_str_ext_const'):
+ print('Warning:', self.issue_prefix(), '- const so could use col_set_str() instead')
+ warnings_found += 1
+
+ if self.name == 'col_append_str':
+ pass
+ if self.name == 'col_add_fstr' or self.name == 'col_append_fstr':
+ # Look at format string
+ self.last_args = self.last_args.replace('\\\"', "'")
+ m = re.search(r'"(.*?)"', self.last_args)
+ if m:
+ # Should contain at least one format specifier!
+ format_string = m.group(1)
+ if format_string.find('%') == -1:
+ print('Warning:', self.issue_prefix(), 'with no format specifiers - "' + format_string + '" - use str() version instead')
+ warnings_found += 1
+
+
+# Check the given dissector file.
+def checkFile(filename, generated, verbose=False):
+ global warnings_found
+ global errors_found
+
+ # Check file exists - e.g. may have been deleted in a recent commit.
+ if not os.path.exists(filename):
+ print(filename, 'does not exist!')
+ return
+
+ with open(filename, 'r', encoding="utf8") as f:
+ full_contents = f.read()
+
+ # Remove comments so as not to trip up RE.
+ contents = removeComments(full_contents)
+
+ # Look for all calls in this file
+ matches = re.finditer(r'(col_set_str|col_add_str|col_add_fstr|col_append_str|col_append_fstr)\((.*?)\)\s*\;', contents, re.MULTILINE|re.DOTALL)
+ col_calls = []
+
+ last_line_number = 1
+ last_char_offset = 0
+
+ for m in matches:
+ args = m.group(2)
+
+ line_number = -1
+ # May fail to find there were comments inside call...
+ # Make search partial to:
+ # - avoid finding an earlier identical call
+ # - speed up searching by making it shorter
+ remaining_lines_text = full_contents[last_char_offset:]
+ match_offset = remaining_lines_text.find(m.group(0))
+ if match_offset != -1:
+ match_in_lines = len(remaining_lines_text[0:match_offset].splitlines())
+ line_number = last_line_number + match_in_lines-1
+ last_line_number = line_number
+ last_char_offset += match_offset + 1 # enough to not match again
+
+ # Match first 2 args plus remainer
+ args_m = re.match(r'(.*?),\s*(.*?),\s*(.*)', args)
+ if args_m:
+ col_calls.append(ColCall(filename, line_number, m.group(1), last_args=args_m.group(3),
+ generated=generated, verbose=verbose))
+
+ # Check them all
+ for call in col_calls:
+ call.check()
+
+
+
+#################################################################
+# Main logic.
+
+# command-line args. Controls which dissector files should be checked.
+# If no args given, will scan all dissectors.
+parser = argparse.ArgumentParser(description='Check calls in dissectors')
+parser.add_argument('--file', action='append',
+ help='specify individual dissector file to test')
+parser.add_argument('--commits', action='store',
+ help='last N commits to check')
+parser.add_argument('--open', action='store_true',
+ help='check open files')
+parser.add_argument('--verbose', action='store_true',
+ help='show extra info')
+
+
+args = parser.parse_args()
+
+
+# Get files from wherever command-line args indicate.
+files = []
+if args.file:
+ # Add specified file(s)
+ for f in args.file:
+ if not os.path.isfile(f) and not f.startswith('epan'):
+ f = os.path.join('epan', 'dissectors', f)
+ if not os.path.isfile(f):
+ print('Chosen file', f, 'does not exist.')
+ exit(1)
+ else:
+ files.append(f)
+elif args.commits:
+ # Get files affected by specified number of commits.
+ command = ['git', 'diff', '--name-only', 'HEAD~' + args.commits]
+ files = [f.decode('utf-8')
+ for f in subprocess.check_output(command).splitlines()]
+ # Will examine dissector files only
+ files = list(filter(lambda f : is_dissector_file(f), files))
+elif args.open:
+ # Unstaged changes.
+ command = ['git', 'diff', '--name-only']
+ files = [f.decode('utf-8')
+ for f in subprocess.check_output(command).splitlines()]
+ # Only interested in dissector files.
+ files = list(filter(lambda f : is_dissector_file(f), files))
+ # Staged changes.
+ command = ['git', 'diff', '--staged', '--name-only']
+ files_staged = [f.decode('utf-8')
+ for f in subprocess.check_output(command).splitlines()]
+ # Only interested in dissector files.
+ files_staged = list(filter(lambda f : is_dissector_file(f), files_staged))
+ for f in files_staged:
+ if f not in files:
+ files.append(f)
+else:
+ # Find all dissector files from folder.
+ files = findDissectorFilesInFolder(os.path.join('epan', 'dissectors'))
+ files += findDissectorFilesInFolder(os.path.join('plugins', 'epan'), recursive=True)
+ files += findDissectorFilesInFolder(os.path.join('epan', 'dissectors', 'asn1'), recursive=True)
+
+
+# If scanning a subset of files, list them here.
+print('Examining:')
+if args.file or args.commits or args.open:
+ if files:
+ print(' '.join(files), '\n')
+ else:
+ print('No files to check.\n')
+else:
+ print('All dissectors\n')
+
+
+# Now check the chosen files
+for f in files:
+ if should_exit:
+ exit(1)
+
+ checkFile(f, isGeneratedFile(f), verbose=args.verbose)
+
+
+# Show summary.
+print(warnings_found, 'warnings found')
+if errors_found:
+ print(errors_found, 'errors found')
+ exit(1)
diff --git a/tools/check_dissector.py b/tools/check_dissector.py
index af1dc648..1461f66e 100755
--- a/tools/check_dissector.py
+++ b/tools/check_dissector.py
@@ -83,18 +83,19 @@ if args.file_list:
# Boolean arg is for whether build-dir is needed in order to run it.
# 3rd is Windows support.
tools = [
- ('tools/delete_includes.py --folder .', True, True),
- ('tools/check_spelling.py', False, True),
- ('tools/check_tfs.py --check-value-strings', False, True),
- ('tools/check_typed_item_calls.py --all-checks', False, True),
- ('tools/check_static.py', True, False),
- ('tools/check_dissector_urls.py', False, True),
- ('tools/check_val_to_str.py', False, True),
- ('tools/cppcheck/cppcheck.sh', False, True),
- ('tools/checkhf.pl', False, True),
- ('tools/checkAPIs.pl', False, True),
- ('tools/fix-encoding-args.pl', False, True),
- ('tools/checkfiltername.pl', False, True)
+ ('tools/delete_includes.py --folder .', True, True),
+ ('tools/check_spelling.py --comments --no-wikipedia', False, True),
+ ('tools/check_tfs.py --check-value-strings', False, True),
+ ('tools/check_typed_item_calls.py --all-checks', False, True),
+ ('tools/check_static.py', True, False),
+ ('tools/check_dissector_urls.py', False, True),
+ ('tools/check_val_to_str.py', False, True),
+ ('tools/check_col_apis.py', False, True),
+ ('tools/cppcheck/cppcheck.sh', False, True),
+ ('tools/checkhf.pl', False, True),
+ ('tools/checkAPIs.pl', False, True),
+ ('tools/fix-encoding-args.pl', False, True),
+ ('tools/checkfiltername.pl', False, True)
]
diff --git a/tools/check_dissector_urls.py b/tools/check_dissector_urls.py
index 373d88b8..96ee4d65 100755
--- a/tools/check_dissector_urls.py
+++ b/tools/check_dissector_urls.py
@@ -122,6 +122,9 @@ files = []
all_urls = set()
def find_links_in_file(filename):
+ if os.path.isdir(filename):
+ return
+
with open(filename, 'r', encoding="utf8") as f:
for line_number, line in enumerate(f, start=1):
# TODO: not matching
@@ -141,14 +144,21 @@ def find_links_in_file(filename):
all_urls.add(url)
-# Scan the given folder for links to test.
+# Scan the given folder for links to test. Recurses.
def find_links_in_folder(folder):
- # Look at files in sorted order, to give some idea of how far through it
- # is.
- for filename in sorted(os.listdir(folder)):
- if filename.endswith('.c'):
- global links
- find_links_in_file(os.path.join(folder, filename))
+ files_to_check = []
+ for root,subfolders,files in os.walk(folder):
+ for f in files:
+ if should_exit:
+ return
+ file = os.path.join(root, f)
+ if file.endswith('.c') or file.endswith('.adoc'):
+ files_to_check.append(file)
+
+ # Deal with files in sorted order.
+ for file in sorted(files_to_check):
+ find_links_in_file(file)
+
async def populate_cache(sem, session, url):
@@ -181,8 +191,8 @@ async def check_all_links(links):
except (asyncio.CancelledError):
await session.close()
- for l in links:
- l.validate()
+ for link in links:
+ link.validate()
#################################################################
@@ -199,12 +209,15 @@ parser.add_argument('--open', action='store_true',
help='check open files')
parser.add_argument('--verbose', action='store_true',
help='when enabled, show more output')
+parser.add_argument('--docs', action='store_true',
+ help='when enabled, also check document folders')
+
args = parser.parse_args()
def is_dissector_file(filename):
- p = re.compile(r'epan/dissectors/packet-.*\.c')
+ p = re.compile(r'.*(packet|file)-.*\.c')
return p.match(filename)
@@ -212,7 +225,7 @@ def is_dissector_file(filename):
if args.file:
# Add specified file(s)
for f in args.file:
- if not f.startswith('epan'):
+ if not os.path.isfile(f) and not f.startswith('epan'):
f = os.path.join('epan', 'dissectors', f)
if not os.path.isfile(f):
print('Chosen file', f, 'does not exist.')
@@ -246,10 +259,13 @@ elif args.open:
if f not in files:
find_links_in_file(f)
files.append(f)
+elif args.docs:
+ # Find links from doc folder(s)
+ find_links_in_folder(os.path.join(os.path.dirname(__file__), '..', 'doc'))
+
else:
# Find links from dissector folder.
- find_links_in_folder(os.path.join(os.path.dirname(
- __file__), '..', 'epan', 'dissectors'))
+ find_links_in_folder(os.path.join(os.path.dirname(__file__), '..', 'epan', 'dissectors'))
# If scanning a subset of files, list them here.
@@ -260,7 +276,10 @@ if args.file or args.commits or args.open:
else:
print('No files to check.\n')
else:
- print('All dissector modules\n')
+ if not args.docs:
+ print('All dissector modules\n')
+ else:
+ print('Document sources')
asyncio.run(check_all_links(links))
@@ -268,21 +287,21 @@ asyncio.run(check_all_links(links))
if os.path.exists('failures.txt'):
shutil.copyfile('failures.txt', 'failures_last_run.txt')
with open('failures.txt', 'w') as f_f:
- for l in links:
- if l.tested and not l.success:
- f_f.write(str(l) + '\n')
+ for link in links:
+ if link.tested and not link.success:
+ f_f.write(str(link) + '\n')
# And successes
with open('successes.txt', 'w') as f_s:
- for l in links:
- if l.tested and l.success:
- f_s.write(str(l) + '\n')
+ for link in links:
+ if link.tested and link.success:
+ f_s.write(str(link) + '\n')
# Count and show overall stats.
passed, failed = 0, 0
-for l in links:
- if l.tested:
- if l.success:
+for link in links:
+ if link.tested:
+ if link.success:
passed += 1
else:
failed += 1
diff --git a/tools/check_help_urls.py b/tools/check_help_urls.py
index ddf3673e..c9ad6f3f 100755
--- a/tools/check_help_urls.py
+++ b/tools/check_help_urls.py
@@ -22,7 +22,7 @@ with open("ui/help_url.c") as f:
chapter = url.group(1)
found[chapter] = False
-adoc_files = glob("docbook/wsug_src/*.adoc")
+adoc_files = glob("doc/wsug_src/*.adoc")
for adoc_file in adoc_files:
with open(adoc_file) as f:
diff --git a/tools/check_spelling.py b/tools/check_spelling.py
index 7e319081..be0bbf82 100755
--- a/tools/check_spelling.py
+++ b/tools/check_spelling.py
@@ -11,10 +11,18 @@ import re
import subprocess
import argparse
import signal
+import glob
+
+from spellchecker import SpellChecker
from collections import Counter
+from html.parser import HTMLParser
+import urllib.request
# Looks for spelling errors among strings found in source or documentation files.
-# N.B. To run this script, you should install pyspellchecker (not spellchecker) using pip.
+# N.B.,
+# - To run this script, you should install pyspellchecker (not spellchecker) using pip.
+# - Because of colouring, you may want to pipe into less -R
+
# TODO: check structured doxygen comments?
@@ -44,12 +52,12 @@ signal.signal(signal.SIGINT, signal_handler)
# Create spellchecker, and augment with some Wireshark words.
-from spellchecker import SpellChecker
# Set up our dict with words from text file.
spell = SpellChecker()
spell.word_frequency.load_text_file('./tools/wireshark_words.txt')
+
# Track words that were not found.
missing_words = []
@@ -67,7 +75,8 @@ class File:
self.values = []
filename, extension = os.path.splitext(file)
- self.code_file = extension in {'.c', '.cpp'}
+ # TODO: add '.lua'? Would also need to check string and comment formats...
+ self.code_file = extension in {'.c', '.cpp', '.h' }
with open(file, 'r', encoding="utf8") as f:
@@ -124,7 +133,6 @@ class File:
def checkMultiWordsRecursive(self, word):
length = len(word)
- #print('word=', word)
if length < 4:
return False
@@ -159,6 +167,12 @@ class File:
v = str(v)
+ # Sometimes parentheses used to show optional letters, so don't leave space
+ #if re.compile(r"^[\S]*\(").search(v):
+ # v = v.replace('(', '')
+ #if re.compile(r"\S\)").search(v):
+ # v = v.replace(')', '')
+
# Ignore includes.
if v.endswith('.h'):
continue
@@ -191,17 +205,19 @@ class File:
v = v.replace('?', ' ')
v = v.replace('=', ' ')
v = v.replace('*', ' ')
+ v = v.replace('%u', '')
+ v = v.replace('%d', '')
+ v = v.replace('%s', '')
v = v.replace('%', ' ')
v = v.replace('#', ' ')
v = v.replace('&', ' ')
v = v.replace('@', ' ')
v = v.replace('$', ' ')
+ v = v.replace('^', ' ')
v = v.replace('®', '')
v = v.replace("'", ' ')
v = v.replace('"', ' ')
- v = v.replace('%u', '')
- v = v.replace('%d', '')
- v = v.replace('%s', '')
+ v = v.replace('~', ' ')
# Split into words.
value_words = v.split()
@@ -225,11 +241,14 @@ class File:
if word.endswith("s’"):
word = word[:-2]
+
if self.numberPlusUnits(word):
continue
if len(word) > 4 and spell.unknown([word]) and not self.checkMultiWords(word) and not self.wordBeforeId(word):
- print(self.file, value_index, '/', num_values, '"' + original + '"', bcolors.FAIL + word + bcolors.ENDC,
+ # Highlight words that appeared in Wikipedia list.
+ print(bcolors.BOLD if word in wiki_db else '',
+ self.file, value_index, '/', num_values, '"' + original + '"', bcolors.FAIL + word + bcolors.ENDC,
' -> ', '?')
# TODO: this can be interesting, but takes too long!
@@ -261,9 +280,24 @@ def removeContractions(code_string):
def removeComments(code_string):
code_string = re.sub(re.compile(r"/\*.*?\*/", re.DOTALL), "" , code_string) # C-style comment
# Avoid matching // where it is allowed, e.g., https://www... or file:///...
- code_string = re.sub(re.compile(r"(?<!:)(?<!/)(?<!\")(?<!\"\s\s)(?<!file:/)//.*?\n" ) ,"" , code_string) # C++-style comment
+ code_string = re.sub(re.compile(r"(?<!:)(?<!/)(?<!\")(?<!\")(?<!\"\s\s)(?<!file:/)(?<!\,\s)//.*?\n" ) ,"" , code_string) # C++-style comment
return code_string
+def getCommentWords(code_string):
+ words = []
+
+ # C++ comments
+ matches = re.finditer(r'//\s(.*?)\n', code_string)
+ for m in matches:
+ words += m.group(1).split()
+
+ # C comments
+ matches = re.finditer(r'/\*(.*?)\*/', code_string)
+ for m in matches:
+ words += m.group(1).split()
+
+ return words
+
def removeSingleQuotes(code_string):
code_string = code_string.replace('\\\\', " ") # Separate at \\
code_string = code_string.replace('\"\\\\\"', "")
@@ -287,7 +321,7 @@ def removeHexSpecifiers(code_string):
# Create a File object that knows about all of the strings in the given file.
-def findStrings(filename):
+def findStrings(filename, check_comments=False):
with open(filename, 'r', encoding="utf8") as f:
contents = f.read()
@@ -302,7 +336,14 @@ def findStrings(filename):
# What we check depends upon file type.
if file.code_file:
+ # May want to check comments for selected dissectors
+ if check_comments:
+ comment_words = getCommentWords(contents)
+ for w in comment_words:
+ file.add(w)
+
contents = removeComments(contents)
+
# Code so only checking strings.
matches = re.finditer(r'\"([^\"]*)\"', contents)
for m in matches:
@@ -328,6 +369,9 @@ def isGeneratedFile(filename):
if filename.endswith('pci-ids.c') or filename.endswith('services-data.c') or filename.endswith('manuf-data.c'):
return True
+ if filename.endswith('packet-woww.c'):
+ return True
+
# Open file
f_read = open(os.path.join(filename), 'r', encoding="utf8")
for line_no,line in enumerate(f_read):
@@ -358,7 +402,8 @@ def isAppropriateFile(filename):
file, extension = os.path.splitext(filename)
if filename.find('CMake') != -1:
return False
- return extension in { '.adoc', '.c', '.cpp', '.pod', '.nsi', '.txt'} or file.endswith('README')
+ # TODO: add , '.lua' ?
+ return extension in { '.adoc', '.c', '.cpp', '.pod', '.txt' } or file.endswith('README')
def findFilesInFolder(folder, recursive=True):
@@ -382,13 +427,13 @@ def findFilesInFolder(folder, recursive=True):
# Check the given file.
-def checkFile(filename):
+def checkFile(filename, check_comments=False):
# Check file exists - e.g. may have been deleted in a recent commit.
if not os.path.exists(filename):
print(filename, 'does not exist!')
return
- file = findStrings(filename)
+ file = findStrings(filename, check_comments)
file.spellCheck()
@@ -401,17 +446,82 @@ def checkFile(filename):
parser = argparse.ArgumentParser(description='Check spellings in specified files')
parser.add_argument('--file', action='append',
help='specify individual file to test')
-parser.add_argument('--folder', action='store', default='',
+parser.add_argument('--folder', action='append',
help='specify folder to test')
+parser.add_argument('--glob', action='append',
+ help='specify glob to test - should give in "quotes"')
parser.add_argument('--no-recurse', action='store_true', default='',
- help='do not recurse inside chosen folder')
+ help='do not recurse inside chosen folder(s)')
parser.add_argument('--commits', action='store',
help='last N commits to check')
parser.add_argument('--open', action='store_true',
help='check open files')
+parser.add_argument('--comments', action='store_true',
+ help='check comments in source files')
+parser.add_argument('--no-wikipedia', action='store_true',
+ help='skip checking known bad words from wikipedia - can be slow')
+
args = parser.parse_args()
+class TypoSourceDocumentParser(HTMLParser):
+ def __init__(self):
+ super().__init__()
+ self.capturing = False
+ self.content = ''
+
+ def handle_starttag(self, tag, attrs):
+ if tag == 'pre':
+ self.capturing = True
+
+ def handle_endtag(self, tag):
+ if tag == 'pre':
+ self.capturing = False
+
+ def handle_data(self, data):
+ if self.capturing:
+ self.content += data
+
+
+# Fetch some common mispellings from wikipedia so we will definitely flag them.
+wiki_db = dict()
+if not args.no_wikipedia:
+ print('Fetching Wikipedia\'s list of common misspellings.')
+ req_headers = { 'User-Agent': 'Wireshark check-wikipedia-typos' }
+ req = urllib.request.Request('https://en.wikipedia.org/wiki/Wikipedia:Lists_of_common_misspellings/For_machines', headers=req_headers)
+ try:
+ response = urllib.request.urlopen(req)
+ content = response.read()
+ content = content.decode('UTF-8', 'replace')
+
+ # Extract the "<pre>...</pre>" part of the document.
+ parser = TypoSourceDocumentParser()
+ parser.feed(content)
+ content = parser.content.strip()
+
+ wiki_db = dict(line.lower().split('->', maxsplit=1) for line in content.splitlines())
+ del wiki_db['cmo'] # All false positives.
+ del wiki_db['ect'] # Too many false positives.
+ del wiki_db['thru'] # We'll let that one thru. ;-)
+ del wiki_db['sargeant'] # All false positives.
+
+ # Remove each word from dict
+ removed = 0
+ for word in wiki_db:
+ try:
+ if should_exit:
+ exit(1)
+ spell.word_frequency.remove_words([word])
+ #print('Removed', word)
+ removed += 1
+ except Exception:
+ pass
+
+ print('Removed', removed, 'known bad words')
+ except Exception:
+ print('Failed to fetch and/or parse Wikipedia mispellings!')
+
+
# Get files from wherever command-line args indicate.
files = []
@@ -423,14 +533,15 @@ if args.file:
exit(1)
else:
files.append(f)
-elif args.commits:
+if args.commits:
# Get files affected by specified number of commits.
command = ['git', 'diff', '--name-only', 'HEAD~' + args.commits]
files = [f.decode('utf-8')
for f in subprocess.check_output(command).splitlines()]
# Filter files
files = list(filter(lambda f : os.path.exists(f) and isAppropriateFile(f) and not isGeneratedFile(f), files))
-elif args.open:
+
+if args.open:
# Unstaged changes.
command = ['git', 'diff', '--name-only']
files = [f.decode('utf-8')
@@ -444,26 +555,42 @@ elif args.open:
# Filter files.
files_staged = list(filter(lambda f : isAppropriateFile(f) and not isGeneratedFile(f), files_staged))
for f in files_staged:
- if not f in files:
+ if f not in files:
files.append(f)
-else:
- # By default, scan dissectors directory
- folder = os.path.join('epan', 'dissectors')
- # But overwrite with any folder entry.
- if args.folder:
- folder = args.folder
+
+if args.glob:
+ # Add specified file(s)
+ for g in args.glob:
+ for f in glob.glob(g):
+ if not os.path.isfile(f):
+ print('Chosen file', f, 'does not exist.')
+ exit(1)
+ else:
+ files.append(f)
+
+if args.folder:
+ for folder in args.folder:
if not os.path.isdir(folder):
print('Folder', folder, 'not found!')
exit(1)
+ # Find files from folder.
+ print('Looking for files in', folder)
+ files += findFilesInFolder(folder, not args.no_recurse)
+
+# By default, scan dissector files.
+if not args.file and not args.open and not args.commits and not args.glob and not args.folder:
+ # By default, scan dissectors directory
+ folder = os.path.join('epan', 'dissectors')
# Find files from folder.
print('Looking for files in', folder)
files = findFilesInFolder(folder, not args.no_recurse)
+
# If scanning a subset of files, list them here.
print('Examining:')
-if args.file or args.folder or args.commits or args.open:
+if args.file or args.folder or args.commits or args.open or args.glob:
if files:
print(' '.join(files), '\n')
else:
@@ -475,7 +602,7 @@ else:
# Now check the chosen files.
for f in files:
# Check this file.
- checkFile(f)
+ checkFile(f, check_comments=args.comments)
# But get out if control-C has been pressed.
if should_exit:
exit(1)
diff --git a/tools/check_static.py b/tools/check_static.py
index fbd1d11c..773c0d60 100755
--- a/tools/check_static.py
+++ b/tools/check_static.py
@@ -14,6 +14,9 @@ import signal
# Look for dissector symbols that could/should be static.
# This will not run on Windows, unless/until we check the platform
# and use (I think) dumpbin.exe
+#
+# N.B. Will report false positives if symbols are extern'd rather than
+# declared in a header file.
# Try to exit soon after Ctrl-C is pressed.
should_exit = False
@@ -26,7 +29,8 @@ def signal_handler(sig, frame):
signal.signal(signal.SIGINT, signal_handler)
# Allow this as a default build folder name...
-build_folder = os.getcwd() + '-build'
+build_folder = os.getcwd() + '-build'
+
# Record which symbols are referred to (by a set of files).
class CalledSymbols:
@@ -34,6 +38,9 @@ class CalledSymbols:
self.referred = set()
def addCalls(self, file):
+ if should_exit:
+ exit(1)
+
# Make sure that file is built.
last_dir = os.path.split(os.path.dirname(file))[-1]
if file.find('ui/cli') != -1:
@@ -47,42 +54,54 @@ class CalledSymbols:
else:
object_file = os.path.join(build_folder, os.path.dirname(file), 'CMakeFiles', last_dir + '.dir', os.path.basename(file) + '.o')
if not os.path.exists(object_file):
+ # Not built for whatever reason..
#print('Warning -', object_file, 'does not exist')
return
+
+ # Run command to check symbols.
command = ['nm', object_file]
for f in subprocess.check_output(command).splitlines():
- l = str(f)[2:-1]
- # Lines might or might not have an address before letter and symbol.
+ line = str(f)[2:-1]
+ # Lines might, or might not, have an address before letter and symbol.
p1 = re.compile(r'[0-9a-f]* ([a-zA-Z]) (.*)')
p2 = re.compile(r'[ ]* ([a-zA-Z]) (.*)')
- m = p1.match(l)
+ m = p1.match(line)
if not m:
- m = p2.match(l)
+ m = p2.match(line)
if m:
letter = m.group(1)
function_name = m.group(2)
- # Only interested in undefined references to symbols.
+ # Only interested in undefined/external references to symbols.
if letter == 'U':
self.referred.add(function_name)
-# Record which symbols are defined in a single file.
+# Record which symbols are defined in a single dissector file.
class DefinedSymbols:
def __init__(self, file):
self.filename = file
- self.global_dict = {}
+ self.global_symbols = {} # map from defined symbol -> whole output-line
self.header_file_contents = None
+ self.from_generated_file = isGeneratedFile(file)
# Make sure that file is built.
- object_file = os.path.join(build_folder, 'epan', 'dissectors', 'CMakeFiles', 'dissectors.dir', os.path.basename(file) + '.o')
-
+ if self.filename.startswith('epan'):
+ object_file = os.path.join(build_folder, 'epan', 'dissectors', 'CMakeFiles', 'dissectors.dir', os.path.basename(file) + '.o')
+ elif self.filename.startswith('plugins'):
+ plugin_base_dir = os.path.dirname(file)
+ plugin_base_name = os.path.basename(plugin_base_dir)
+ object_file = os.path.join(build_folder, plugin_base_dir, 'CMakeFiles', plugin_base_name + '.dir', os.path.basename(file) + '.o')
+ else:
+ #print("Warning - can't determine object file for ", self.filename)
+ return
if not os.path.exists(object_file):
#print('Warning -', object_file, 'does not exist')
return
+ # Get header file contents if available
header_file= file.replace('.c', '.h')
try:
f = open(header_file, 'r')
@@ -90,29 +109,30 @@ class DefinedSymbols:
except IOError:
pass
-
+ # Run command to see which symbols are defined
command = ['nm', object_file]
for f in subprocess.check_output(command).splitlines():
# Line consists of whitespace, [address], letter, symbolName
- l = str(f)[2:-1]
+ line = str(f)[2:-1]
p = re.compile(r'[0-9a-f]* ([a-zA-Z]) (.*)')
- m = p.match(l)
+ m = p.match(line)
if m:
letter = m.group(1)
function_name = m.group(2)
- # globally-defined symbols. Would be 't' or 'd' if already static.
+ # Globally-defined symbols. Would be 't' or 'd' if already static..
if letter in 'TD':
- self.add(function_name, l)
+ self.addDefinedSymbol(function_name, line)
- def add(self, letter, function_name):
- self.global_dict[letter] = function_name
+ def addDefinedSymbol(self, symbol, line):
+ self.global_symbols[symbol] = line
+ # Check if a given symbol is mentioned in headers
def mentionedInHeaders(self, symbol):
if self.header_file_contents:
if self.header_file_contents.find(symbol) != -1:
return True
# Also check some of the 'common' header files that don't match the dissector file name.
- # TODO: could cache the contents of these files, but it's not that slow.
+ # TODO: could cache the contents of these files?
common_mismatched_headers = [ os.path.join('epan', 'dissectors', 'packet-ncp-int.h'),
os.path.join('epan', 'dissectors', 'packet-mq.h'),
os.path.join('epan', 'dissectors', 'packet-ip.h'),
@@ -133,13 +153,15 @@ class DefinedSymbols:
return False
- def check(self, called_symbols):
+ def checkIfSymbolsAreCalled(self, called_symbols):
global issues_found
- for f in self.global_dict:
- if not f in called_symbols:
+ for f in self.global_symbols:
+ if f not in called_symbols:
mentioned_in_header = self.mentionedInHeaders(f)
- fun = self.global_dict[f]
- print(self.filename, '(' + fun + ')', 'is not referred to so could be static?', '(in header)' if mentioned_in_header else '')
+ fun = self.global_symbols[f]
+ print(self.filename, '' if not self.from_generated_file else '(GENERATED)',
+ '(' + fun + ')',
+ 'is not referred to so could be static?', '(declared in header)' if mentioned_in_header else '')
issues_found += 1
@@ -147,6 +169,7 @@ class DefinedSymbols:
# Helper functions.
def isDissectorFile(filename):
+ # Ignoring usb.c & errno.c
p = re.compile(r'(packet|file)-.*\.c')
return p.match(filename)
@@ -212,12 +235,10 @@ def findFilesInFolder(folder):
def is_dissector_file(filename):
- p = re.compile(r'.*packet-.*\.c')
+ p = re.compile(r'.*(packet|file)-.*\.c')
return p.match(filename)
-issues_found = 0
-
#################################################################
@@ -237,6 +258,7 @@ parser.add_argument('--open', action='store_true',
args = parser.parse_args()
+issues_found = 0
# Get files from wherever command-line args indicate.
files = []
@@ -247,7 +269,7 @@ if args.build_folder:
if args.file:
# Add specified file(s)
for f in args.file:
- if not f.startswith('epan'):
+ if not os.path.isfile(f) and not f.startswith('epan'):
f = os.path.join('epan', 'dissectors', f)
if not os.path.isfile(f):
print('Chosen file', f, 'does not exist.')
@@ -277,12 +299,12 @@ elif args.open:
for f in files:
files.append(f)
for f in files_staged:
- if not f in files:
+ if f not in files:
files.append(f)
else:
# Find all dissector files from folder.
files = findDissectorFilesInFolder(os.path.join('epan', 'dissectors'),
- include_generated=False)
+ include_generated=True)
# If scanning a subset of files, list them here.
@@ -316,11 +338,12 @@ for d in findFilesInFolder(os.path.join('ui', 'cli')):
called.addCalls(d)
-# Now check identified files.
+# Now check identified dissector files.
for f in files:
if should_exit:
exit(1)
- DefinedSymbols(f).check(called.referred)
+ # Are these symbols called - or could they be deleted or static????
+ DefinedSymbols(f).checkIfSymbolsAreCalled(called.referred)
# Show summary.
print(issues_found, 'issues found')
diff --git a/tools/check_tfs.py b/tools/check_tfs.py
index cecf8d9d..f7c59377 100755
--- a/tools/check_tfs.py
+++ b/tools/check_tfs.py
@@ -12,12 +12,10 @@ import argparse
import signal
# This utility scans for tfs items, and works out if standard ones
-# could have been used intead (from epan/tfs.c)
+# could have been used instead (from epan/tfs.c)
# Can also check for value_string where common tfs could be used instead.
# TODO:
-# - check how many of the definitions in epan/tfs.c are used in other dissectors
-# - although even if unused, might be in external dissectors?
# - consider merging Item class with check_typed_item_calls.py ?
@@ -39,7 +37,7 @@ def isGeneratedFile(filename):
return False
# Open file
- f_read = open(os.path.join(filename), 'r')
+ f_read = open(os.path.join(filename), 'r', encoding="utf8", errors="ignore")
lines_tested = 0
for line in f_read:
# The comment to say that its generated is near the top, so give up once
@@ -70,60 +68,61 @@ def isGeneratedFile(filename):
# Keep track of custom entries that might appear in multiple dissectors,
# so we can consider adding them to tfs.c
custom_tfs_entries = {}
-def AddCustomEntry(val1, val2, file):
+def AddCustomEntry(true_val, false_val, file):
global custom_tfs_entries
- if (val1, val2) in custom_tfs_entries:
- custom_tfs_entries[(val1, val2)].append(file)
+ if (true_val, false_val) in custom_tfs_entries:
+ custom_tfs_entries[(true_val, false_val)].append(file)
else:
- custom_tfs_entries[(val1, val2)] = [file]
-
+ custom_tfs_entries[(true_val, false_val)] = [file]
+# Individual parsed TFS entry
class TFS:
- def __init__(self, file, name, val1, val2):
+ def __init__(self, file, name, true_val, false_val):
self.file = file
self.name = name
- self.val1 = val1
- self.val2 = val2
+ self.true_val = true_val
+ self.false_val = false_val
global warnings_found
# Should not be empty
- if not len(val1) or not len(val2):
+ if not len(true_val) or not len(false_val):
print('Warning:', file, name, 'has an empty field', self)
warnings_found += 1
#else:
# Strange if one begins with capital but other doesn't?
- #if val1[0].isalpha() and val2[0].isalpha():
- # if val1[0].isupper() != val2[0].isupper():
+ #if true_val[0].isalpha() and false_val[0].isalpha():
+ # if true_val[0].isupper() != false_val[0].isupper():
# print(file, name, 'one starts lowercase and the other upper', self)
# Leading or trailing space should not be needed.
- if val1.startswith(' ') or val1.endswith(' '):
- print('Note: ' + self.file + ' ' + self.name + ' - false val begins or ends with space \"' + self.val1 + '\"')
- if val2.startswith(' ') or val2.endswith(' '):
- print('Note: ' + self.file + ' ' + self.name + ' - true val begins or ends with space \"' + self.val2 + '\"')
+ if true_val.startswith(' ') or true_val.endswith(' '):
+ print('Note: ' + self.file + ' ' + self.name + ' - true val begins or ends with space \"' + self.true_val + '\"')
+ if false_val.startswith(' ') or false_val.endswith(' '):
+ print('Note: ' + self.file + ' ' + self.name + ' - false val begins or ends with space \"' + self.false_val + '\"')
# Should really not be identical...
- if val1.lower() == val2.lower():
+ if true_val.lower() == false_val.lower():
print('Warning:', file, name, 'true and false strings are the same', self)
warnings_found += 1
# Shouldn't both be negation (with exception..)
- if (file != os.path.join('epan', 'dissectors', 'packet-smb.c') and (val1.lower().find('not ') != -1) and (val2.lower().find('not ') != -1)):
+ if (file != os.path.join('epan', 'dissectors', 'packet-smb.c') and (true_val.lower().find('not ') != -1) and (false_val.lower().find('not ') != -1)):
print('Warning:', file, name, self, 'both strings contain not')
warnings_found += 1
# Not expecting full-stops inside strings..
- if val1.find('.') != -1 or val2.find('.') != -1:
+ if true_val.find('.') != -1 or false_val.find('.') != -1:
print('Warning:', file, name, 'Period found in string', self)
warnings_found += 1
def __str__(self):
- return '{' + '"' + self.val1 + '", "' + self.val2 + '"}'
+ return '{' + '"' + self.true_val + '", "' + self.false_val + '"}'
+# Only looking at in terms of could/should it be TFS instead.
class ValueString:
def __init__(self, file, name, vals):
self.file = file
@@ -198,7 +197,7 @@ class Item:
self.strings = strings
self.mask = mask
- # N.B. Not sestting mask by looking up macros.
+ # N.B. Not setting mask by looking up macros.
self.item_type = item_type
self.type_modifier = type_modifier
@@ -210,16 +209,10 @@ class Item:
if self.check_bit(self.mask_value, n):
self.bits_set += 1
- def check_bit(self, value, n):
- return (value & (0x1 << n)) != 0
-
-
def __str__(self):
return 'Item ({0} "{1}" {2} type={3}:{4} strings={5} mask={6})'.format(self.filename, self.label, self.filter,
self.item_type, self.type_modifier, self.strings, self.mask)
-
-
def set_mask_value(self, macros):
try:
self.mask_read = True
@@ -227,12 +220,11 @@ class Item:
# Substitute mask if found as a macro..
if self.mask in macros:
self.mask = macros[self.mask]
- elif any(not c in '0123456789abcdefABCDEFxX' for c in self.mask):
+ elif any(c not in '0123456789abcdefABCDEFxX' for c in self.mask):
self.mask_read = False
self.mask_value = 0
return
-
# Read according to the appropriate base.
if self.mask.startswith('0x'):
self.mask_value = int(self.mask, 16)
@@ -240,7 +232,7 @@ class Item:
self.mask_value = int(self.mask, 8)
else:
self.mask_value = int(self.mask, 10)
- except:
+ except Exception:
self.mask_read = False
self.mask_value = 0
@@ -262,8 +254,7 @@ class Item:
try:
# For FT_BOOLEAN, modifier is just numerical number of bits. Round up to next nibble.
return int((int(self.type_modifier) + 3)/4)*4
- except:
- #print('oops', self)
+ except Exception:
return 0
else:
if self.item_type in field_widths:
@@ -289,7 +280,7 @@ def removeComments(code_string):
def findTFS(filename):
tfs_found = {}
- with open(filename, 'r', encoding="utf8") as f:
+ with open(filename, 'r', encoding="utf8", errors="ignore") as f:
contents = f.read()
# Example: const true_false_string tfs_yes_no = { "Yes", "No" };
@@ -299,10 +290,10 @@ def findTFS(filename):
matches = re.finditer(r'\sconst\s*true_false_string\s*([a-zA-Z0-9_]*)\s*=\s*{\s*\"([a-zA-Z_0-9/:! ]*)\"\s*,\s*\"([a-zA-Z_0-9/:! ]*)\"', contents)
for m in matches:
name = m.group(1)
- val1 = m.group(2)
- val2 = m.group(3)
+ true_val = m.group(2)
+ false_val = m.group(3)
# Store this entry.
- tfs_found[name] = TFS(filename, name, val1, val2)
+ tfs_found[name] = TFS(filename, name, true_val, false_val)
return tfs_found
@@ -317,7 +308,7 @@ def findValueStrings(filename):
# { 0, NULL }
#};
- with open(filename, 'r', encoding="utf8") as f:
+ with open(filename, 'r', encoding="utf8", errors="ignore") as f:
contents = f.read()
# Remove comments so as not to trip up RE.
@@ -333,9 +324,8 @@ def findValueStrings(filename):
# Look for hf items (i.e. full item to be registered) in a dissector file.
def find_items(filename, macros, check_mask=False, mask_exact_width=False, check_label=False, check_consecutive=False):
- is_generated = isGeneratedFile(filename)
items = {}
- with open(filename, 'r', encoding="utf8") as f:
+ with open(filename, 'r', encoding="utf8", errors="ignore") as f:
contents = f.read()
# Remove comments so as not to trip up RE.
contents = removeComments(contents)
@@ -354,7 +344,7 @@ def find_items(filename, macros, check_mask=False, mask_exact_width=False, check
def find_macros(filename):
macros = {}
- with open(filename, 'r', encoding="utf8") as f:
+ with open(filename, 'r', encoding="utf8", errors="ignore") as f:
contents = f.read()
# Remove comments so as not to trip up RE.
contents = removeComments(contents)
@@ -368,31 +358,32 @@ def find_macros(filename):
def is_dissector_file(filename):
- p = re.compile(r'.*packet-.*\.c')
+ p = re.compile(r'.*(packet|file)-.*\.c')
return p.match(filename)
def findDissectorFilesInFolder(folder):
- # Look at files in sorted order, to give some idea of how far through is.
- files = []
+ files = set()
- for f in sorted(os.listdir(folder)):
- if should_exit:
- return
- if is_dissector_file(f):
- filename = os.path.join(folder, f)
- files.append(filename)
- return files
+ for path, tmp_unused, names in os.walk(folder):
+ for f in names:
+ if should_exit:
+ return
+ if is_dissector_file(f):
+ files.add(os.path.join(path, f))
+ return files
+# Global counts
warnings_found = 0
errors_found = 0
+# name -> count
+common_usage = {}
-tfs_found = 0
# Check the given dissector file.
-def checkFile(filename, common_tfs, look_for_common=False, check_value_strings=False):
+def checkFile(filename, common_tfs, look_for_common=False, check_value_strings=False, count_common_usage=False):
global warnings_found
global errors_found
@@ -422,14 +413,15 @@ def checkFile(filename, common_tfs, look_for_common=False, check_value_strings=F
#
if os.path.commonprefix([filename, 'plugin/epan/']) == '':
exact_case = False
- if file_tfs[f].val1 == common_tfs[c].val1 and file_tfs[f].val2 == common_tfs[c].val2:
+ if file_tfs[f].true_val == common_tfs[c].true_val and file_tfs[f].false_val == common_tfs[c].false_val:
found = True
exact_case = True
- elif file_tfs[f].val1.upper() == common_tfs[c].val1.upper() and file_tfs[f].val2.upper() == common_tfs[c].val2.upper():
+ elif file_tfs[f].true_val.upper() == common_tfs[c].true_val.upper() and file_tfs[f].false_val.upper() == common_tfs[c].false_val.upper():
found = True
if found:
- print("Error:" if exact_case else "Warn: ", filename, f, "- could have used", c, 'from tfs.c instead: ', common_tfs[c],
+ print("Error:" if exact_case else "Warning: ", filename, f,
+ "- could have used", c, 'from tfs.c instead: ', common_tfs[c],
'' if exact_case else ' (capitalisation differs)')
if exact_case:
errors_found += 1
@@ -438,7 +430,7 @@ def checkFile(filename, common_tfs, look_for_common=False, check_value_strings=F
break
if not found:
if look_for_common:
- AddCustomEntry(file_tfs[f].val1, file_tfs[f].val2, filename)
+ AddCustomEntry(file_tfs[f].true_val, file_tfs[f].false_val, filename)
if check_value_strings:
# Get macros
@@ -456,7 +448,6 @@ def checkFile(filename, common_tfs, look_for_common=False, check_value_strings=F
found = False
exact_case = False
- #print('Candidate', v, vs[v])
for c in common_tfs:
found = False
@@ -473,10 +464,10 @@ def checkFile(filename, common_tfs, look_for_common=False, check_value_strings=F
#
if os.path.commonprefix([filename, 'plugin/epan/']) == '':
exact_case = False
- if common_tfs[c].val1 == vs[v].parsed_vals[True] and common_tfs[c].val2 == vs[v].parsed_vals[False]:
+ if common_tfs[c].true_val == vs[v].parsed_vals[True] and common_tfs[c].false_val == vs[v].parsed_vals[False]:
found = True
exact_case = True
- elif common_tfs[c].val1.upper() == vs[v].parsed_vals[True].upper() and common_tfs[c].val2.upper() == vs[v].parsed_vals[False].upper():
+ elif common_tfs[c].true_val.upper() == vs[v].parsed_vals[True].upper() and common_tfs[c].false_val.upper() == vs[v].parsed_vals[False].upper():
found = True
# Do values match?
@@ -488,11 +479,24 @@ def checkFile(filename, common_tfs, look_for_common=False, check_value_strings=F
if re.match(r'VALS\(\s*'+v+r'\s*\)', items[i].strings):
if items[i].bits_set == 1:
print("Warn:" if exact_case else "Note:", filename, 'value_string', "'"+v+"'",
- "- could have used", c, 'from tfs.c instead: ', common_tfs[c], 'for', i,
- '' if exact_case else ' (capitalisation differs)')
+ '- could have used tfs.c entry instead: for', i,
+ ' - "FT_BOOLEAN,', str(items[i].get_field_width_in_bits()) + ', TFS(&' + c + '),"',
+ '' if exact_case else ' (capitalisation differs)')
if exact_case:
warnings_found += 1
+ if count_common_usage:
+ # Look for TFS(&<name>) in dissector
+ with open(filename, 'r') as f:
+ contents = f.read()
+ for c in common_tfs:
+ m = re.search(r'TFS\(\s*\&' + c + r'\s*\)', contents)
+ if m:
+ if c not in common_usage:
+ common_usage[c] = 1
+ else:
+ common_usage[c] += 1
+
#################################################################
@@ -512,46 +516,46 @@ parser.add_argument('--check-value-strings', action='store_true',
parser.add_argument('--common', action='store_true',
help='check for potential new entries for tfs.c')
-
+parser.add_argument('--common-usage', action='store_true',
+ help='count how many dissectors are using common tfs entries')
args = parser.parse_args()
# Get files from wherever command-line args indicate.
-files = []
+files = set()
if args.file:
# Add specified file(s)
for f in args.file:
- if not f.startswith('epan'):
+ if not os.path.isfile(f) and not f.startswith('epan'):
f = os.path.join('epan', 'dissectors', f)
if not os.path.isfile(f):
print('Chosen file', f, 'does not exist.')
exit(1)
else:
- files.append(f)
+ files.add(f)
elif args.commits:
# Get files affected by specified number of commits.
command = ['git', 'diff', '--name-only', 'HEAD~' + args.commits]
- files = [f.decode('utf-8')
- for f in subprocess.check_output(command).splitlines()]
+ files = {f.decode('utf-8')
+ for f in subprocess.check_output(command).splitlines()}
# Will examine dissector files only
- files = list(filter(lambda f : is_dissector_file(f), files))
+ files = set(filter(is_dissector_file, files))
elif args.open:
# Unstaged changes.
command = ['git', 'diff', '--name-only']
- files = [f.decode('utf-8')
- for f in subprocess.check_output(command).splitlines()]
+ files = {f.decode('utf-8')
+ for f in subprocess.check_output(command).splitlines()}
# Only interested in dissector files.
- files = list(filter(lambda f : is_dissector_file(f), files))
+ files = list(filter(is_dissector_file, files))
# Staged changes.
command = ['git', 'diff', '--staged', '--name-only']
- files_staged = [f.decode('utf-8')
- for f in subprocess.check_output(command).splitlines()]
+ files_staged = {f.decode('utf-8')
+ for f in subprocess.check_output(command).splitlines()}
# Only interested in dissector files.
- files_staged = list(filter(lambda f : is_dissector_file(f), files_staged))
+ files = set(filter(is_dissector_file, files_staged))
for f in files_staged:
- if not f in files:
- files.append(f)
+ files.add(f)
else:
# Find all dissector files from folder.
files = findDissectorFilesInFolder(os.path.join('epan', 'dissectors'))
@@ -561,7 +565,7 @@ else:
print('Examining:')
if args.file or args.commits or args.open:
if files:
- print(' '.join(files), '\n')
+ print(' '.join(sorted(files)), '\n')
else:
print('No files to check.\n')
else:
@@ -569,14 +573,17 @@ else:
# Get standard/ shared ones.
-tfs_entries = findTFS(os.path.join('epan', 'tfs.c'))
+common_tfs_entries = findTFS(os.path.join('epan', 'tfs.c'))
# Now check the files to see if they could have used shared ones instead.
-for f in files:
+# Look at files in sorted order, to give some idea of how far through we are.
+for f in sorted(files):
if should_exit:
exit(1)
if not isGeneratedFile(f):
- checkFile(f, tfs_entries, look_for_common=args.common, check_value_strings=args.check_value_strings)
+ checkFile(f, common_tfs_entries, look_for_common=args.common,
+ check_value_strings=args.check_value_strings,
+ count_common_usage=args.common_usage)
# Report on commonly-defined values.
if args.common:
@@ -587,6 +594,12 @@ if args.common:
if len(custom_tfs_entries[c]) > 2:
print(c, 'appears', len(custom_tfs_entries[c]), 'times, in: ', custom_tfs_entries[c])
+if args.common_usage:
+ for c in common_tfs_entries:
+ if c in common_usage:
+ print(c, 'used in', common_usage[c], 'dissectors')
+ else:
+ print('***', c, 'IS NOT USED! ***')
# Show summary.
print(warnings_found, 'warnings found')
diff --git a/tools/check_typed_item_calls.py b/tools/check_typed_item_calls.py
index 24520c6e..3923264e 100755
--- a/tools/check_typed_item_calls.py
+++ b/tools/check_typed_item_calls.py
@@ -47,12 +47,12 @@ class Call:
if length:
try:
self.length = int(length)
- except:
+ except Exception:
if length.isupper():
if length in macros:
try:
self.length = int(macros[length])
- except:
+ except Exception:
pass
pass
@@ -84,6 +84,9 @@ item_lengths['FT_INT56'] = 7
item_lengths['FT_UINT64'] = 8
item_lengths['FT_INT64'] = 8
item_lengths['FT_ETHER'] = 6
+item_lengths['FT_IPv4'] = 4
+item_lengths['FT_IPv6'] = 16
+
# TODO: other types...
@@ -97,16 +100,16 @@ class APICheck:
if fun_name.startswith('ptvcursor'):
# RE captures function name + 1st 2 args (always ptvc + hfindex)
- self.p = re.compile('[^\n]*' + self.fun_name + '\s*\(([a-zA-Z0-9_]+),\s*([a-zA-Z0-9_]+)')
+ self.p = re.compile('[^\n]*' + self.fun_name + r'\s*\(([a-zA-Z0-9_]+),\s*([a-zA-Z0-9_]+)')
elif fun_name.find('add_bitmask') == -1:
# Normal case.
# RE captures function name + 1st 2 args (always tree + hfindex + length)
- self.p = re.compile('[^\n]*' + self.fun_name + '\s*\(([a-zA-Z0-9_]+),\s*([a-zA-Z0-9_]+),\s*[a-zA-Z0-9_]+,\s*[a-zA-Z0-9_]+,\s*([a-zA-Z0-9_]+)')
+ self.p = re.compile('[^\n]*' + self.fun_name + r'\s*\(([a-zA-Z0-9_]+),\s*([a-zA-Z0-9_]+),\s*[a-zA-Z0-9_]+,\s*[a-zA-Z0-9_]+,\s*([a-zA-Z0-9_]+)')
else:
# _add_bitmask functions.
# RE captures function name + 1st + 4th args (always tree + hfindex)
# 6th arg is 'fields'
- self.p = re.compile('[^\n]*' + self.fun_name + '\s*\(([a-zA-Z0-9_]+),\s*[a-zA-Z0-9_]+,\s*[a-zA-Z0-9_]+,\s*([a-zA-Z0-9_]+)\s*,\s*[a-zA-Z0-9_]+\s*,\s*([a-zA-Z0-9_]+)\s*,')
+ self.p = re.compile('[^\n]*' + self.fun_name + r'\s*\(([a-zA-Z0-9_]+),\s*[a-zA-Z0-9_]+,\s*[a-zA-Z0-9_]+,\s*([a-zA-Z0-9_]+)\s*,\s*[a-zA-Z0-9_]+\s*,\s*([a-zA-Z0-9_]+)\s*,')
self.file = None
self.mask_allowed = True
@@ -145,7 +148,6 @@ class APICheck:
length = m.group(3)
# Add call. We have length if re had 3 groups.
- num_groups = self.p.groups
self.calls.append(Call(m.group(2),
macros,
line_number=line_number,
@@ -160,7 +162,6 @@ class APICheck:
# Walk past any l.s. 0 bits in value
n = 0
- mask_start = n
# Walk through any bits that are set and check they are in mask
while self.check_bit(value, n) and n <= 63:
if not self.check_bit(mask, n):
@@ -180,13 +181,15 @@ class APICheck:
if self.fun_name.find('add_bits') == -1 and call.hf_name in items_defined:
if call.length and items_defined[call.hf_name].item_type in item_lengths:
if item_lengths[items_defined[call.hf_name].item_type] < call.length:
- print('Warning:', self.file + ':' + str(call.line_number),
- self.fun_name + ' called for', call.hf_name, ' - ',
- 'item type is', items_defined[call.hf_name].item_type, 'but call has len', call.length)
- warnings_found += 1
+ # Don't warn if adding value - value is unlikely to just be bytes value
+ if self.fun_name.find('_add_uint') == -1:
+ print('Warning:', self.file + ':' + str(call.line_number),
+ self.fun_name + ' called for', call.hf_name, ' - ',
+ 'item type is', items_defined[call.hf_name].item_type, 'but call has len', call.length)
+ warnings_found += 1
# Needs a +ve length
- if self.positive_length and call.length != None:
+ if self.positive_length and call.length is not None:
if call.length != -1 and call.length <= 0:
print('Error: ' + self.fun_name + '(.., ' + call.hf_name + ', ...) called at ' +
self.file + ':' + str(call.line_number) +
@@ -195,7 +198,7 @@ class APICheck:
if call.hf_name in items_defined:
# Is type allowed?
- if not items_defined[call.hf_name].item_type in self.allowed_types:
+ if items_defined[call.hf_name].item_type not in self.allowed_types:
print('Error: ' + self.fun_name + '(.., ' + call.hf_name + ', ...) called at ' +
self.file + ':' + str(call.line_number) +
' with type ' + items_defined[call.hf_name].item_type)
@@ -221,7 +224,7 @@ class APICheck:
warnings_found += 1
if check_missing_items:
- if call.hf_name in items_declared and not call.hf_name in items_declared_extern:
+ if call.hf_name in items_declared and call.hf_name not in items_defined and call.hf_name not in items_declared_extern:
#not in common_hf_var_names:
print('Warning:', self.file + ':' + str(call.line_number),
self.fun_name + ' called for "' + call.hf_name + '"', ' - but no item found')
@@ -237,15 +240,15 @@ class ProtoTreeAddItemCheck(APICheck):
if not ptv:
# proto_item *
# proto_tree_add_item(proto_tree *tree, int hfindex, tvbuff_t *tvb,
- # const gint start, gint length, const guint encoding)
+ # const gint start, gint length, const unsigned encoding)
self.fun_name = 'proto_tree_add_item'
- self.p = re.compile('[^\n]*' + self.fun_name + '\s*\(\s*[a-zA-Z0-9_]+?,\s*([a-zA-Z0-9_]+?),\s*[a-zA-Z0-9_\+\s]+?,\s*[^,.]+?,\s*(.+),\s*([^,.]+?)\);')
+ self.p = re.compile('[^\n]*' + self.fun_name + r'\s*\(\s*[a-zA-Z0-9_]+?,\s*([a-zA-Z0-9_]+?),\s*[a-zA-Z0-9_\+\s]+?,\s*[^,.]+?,\s*(.+),\s*([^,.]+?)\);')
else:
# proto_item *
# ptvcursor_add(ptvcursor_t *ptvc, int hfindex, gint length,
- # const guint encoding)
+ # const unsigned encoding)
self.fun_name = 'ptvcursor_add'
- self.p = re.compile('[^\n]*' + self.fun_name + '\s*\([^,.]+?,\s*([^,.]+?),\s*([^,.]+?),\s*([a-zA-Z0-9_\-\>]+)')
+ self.p = re.compile('[^\n]*' + self.fun_name + r'\s*\([^,.]+?,\s*([^,.]+?),\s*([^,.]+?),\s*([a-zA-Z0-9_\-\>]+)')
def find_calls(self, file, macros):
@@ -279,7 +282,7 @@ class ProtoTreeAddItemCheck(APICheck):
enc = m.group(3)
hf_name = m.group(1)
if not enc.startswith('ENC_'):
- if not enc in { 'encoding', 'enc', 'client_is_le', 'cigi_byte_order', 'endian', 'endianess', 'machine_encoding', 'byte_order', 'bLittleEndian',
+ if enc not in { 'encoding', 'enc', 'client_is_le', 'cigi_byte_order', 'endian', 'endianess', 'machine_encoding', 'byte_order', 'bLittleEndian',
'p_mq_parm->mq_str_enc', 'p_mq_parm->mq_int_enc',
'iEnc', 'strid_enc', 'iCod', 'nl_data->encoding',
'argp->info->encoding', 'gquic_info->encoding', 'writer_encoding',
@@ -306,7 +309,9 @@ class ProtoTreeAddItemCheck(APICheck):
'BASE_SHOW_UTF_8_PRINTABLE',
'dhcp_secs_endian',
'is_mdns ? ENC_UTF_8|ENC_NA : ENC_ASCII|ENC_NA',
- 'xl_encoding'
+ 'xl_encoding',
+ 'my_frame_data->encoding_client', 'my_frame_data->encoding_results'
+
}:
global warnings_found
@@ -328,12 +333,15 @@ class ProtoTreeAddItemCheck(APICheck):
if call.hf_name in items_defined:
if call.length and items_defined[call.hf_name].item_type in item_lengths:
if item_lengths[items_defined[call.hf_name].item_type] < call.length:
- print('Warning:', self.file + ':' + str(call.line_number),
- self.fun_name + ' called for', call.hf_name, ' - ',
- 'item type is', items_defined[call.hf_name].item_type, 'but call has len', call.length)
- warnings_found += 1
+ # On balance, it is not worth complaining about these - the value is unlikely to be
+ # just the value found in these bytes..
+ if self.fun_name.find('_add_uint') == -1:
+ print('Warning:', self.file + ':' + str(call.line_number),
+ self.fun_name + ' called for', call.hf_name, ' - ',
+ 'item type is', items_defined[call.hf_name].item_type, 'but call has len', call.length)
+ warnings_found += 1
elif check_missing_items:
- if call.hf_name in items_declared and not call.hf_name in items_declared_extern:
+ if call.hf_name in items_declared and call.hf_name not in items_declared_extern:
#not in common_hf_var_names:
print('Warning:', self.file + ':' + str(call.line_number),
self.fun_name + ' called for "' + call.hf_name + '"', ' - but no item found')
@@ -371,7 +379,9 @@ known_non_contiguous_fields = { 'wlan.fixed.capabilities.cfpoll.sta',
'hf_hiqnet_flags',
'hf_hiqnet_flagmask',
'hf_h223_mux_mpl',
- 'rdp.flags.pkt'
+ 'rdp.flags.pkt',
+ 'erf.flags.if_raw', # confirmed by Stephen Donnelly
+ 'oran_fh_cus.sReSMask'
}
##################################################################################################
@@ -397,138 +407,157 @@ field_widths = {
'FT_INT64' : 64
}
-# TODO: most of these might as well be strings...
def is_ignored_consecutive_filter(filter):
+ ignore_filters = {
+ 'elf.sh_type',
+ 'elf.p_type',
+ 'btavrcp.pdu_id',
+ 'netlogon.dummy_string',
+ 'opa.reserved',
+ 'wassp.data.mu_mac',
+ 'thrift.type',
+ 'quake2.game.client.command.move.angles',
+ 'ipp.enum_value',
+ 'idrp.error.subcode',
+ 'ftdi-ft.lValue',
+ '6lowpan.src',
+ 'couchbase.flex_frame.frame.id',
+ 'rtps.param.id',
+ 'rtps.locator.port',
+ 'sigcomp.udvm.value',
+ 'opa.mad.attributemodifier.n',
+ 'smb.cmd',
+ 'sctp.checksum',
+ 'dhcp.option.end',
+ 'nfapi.num.bf.vector.bf.value',
+ 'dnp3.al.range.abs',
+ 'dnp3.al.range.quantity',
+ 'dnp3.al.index',
+ 'dnp3.al.size',
+ 'ftdi-ft.hValue',
+ 'homeplug_av.op_attr_cnf.data.sw_sub',
+ 'radiotap.he_mu.preamble_puncturing',
+ 'ndmp.file',
+ 'ocfs2.dlm.lvb',
+ 'oran_fh_cus.reserved',
+ 'qnet6.kif.msgsend.msg.read.xtypes0-7',
+ 'qnet6.kif.msgsend.msg.write.xtypes0-7',
+ 'mih.sig_strength',
+ 'couchbase.flex_frame.frame.len',
+ 'nvme-rdma.read_to_host_req',
+ 'rpcap.dummy',
+ 'sflow.flow_sample.output_interface',
+ 'socks.results',
+ 'opa.mad.attributemodifier.p',
+ 'v5ua.efa',
+ 'zbncp.data.tx_power',
+ 'zbncp.data.nwk_addr',
+ 'zbee_zcl_hvac.pump_config_control.attr.ctrl_mode',
+ 'nat-pmp.external_port',
+ 'zbee_zcl.attr.float',
+ 'wpan-tap.phr.fsk_ms.mode',
+ 'mysql.exec_flags',
+ 'pim.metric_pref',
+ 'modbus.regval_float',
+ 'alcap.cau.value',
+ 'bpv7.crc_field',
+ 'at.chld.mode',
+ 'btl2cap.psm',
+ 'srvloc.srvtypereq.nameauthlistlen',
+ 'a11.ext.code',
+ 'adwin_config.port',
+ 'afp.unknown',
+ 'ansi_a_bsmap.mid.digit_1',
+ 'ber.unknown.OCTETSTRING',
+ 'btatt.handle',
+ 'btl2cap.option_flushto',
+ 'cip.network_segment.prod_inhibit',
+ 'cql.result.rows.table_name',
+ 'dcom.sa.vartype',
+ 'f5ethtrailer.slot',
+ 'ipdr.cm_ipv6_addr',
+ 'mojito.kuid',
+ 'mtp3.priority',
+ 'pw.cw.length',
+ 'rlc.ciphered_data',
+ 'vp8.pld.pictureid',
+ 'gryphon.sched.channel',
+ 'pn_io.ioxs',
+ 'pn_dcp.block_qualifier_reset',
+ 'pn_dcp.suboption_device_instance',
+ 'nfs.attr',
+ 'nfs.create_session_flags',
+ 'rmt-lct.toi64',
+ 'gryphon.data.header_length',
+ 'quake2.game.client.command.move.movement',
+ 'isup.parameter_type',
+ 'cip.port',
+ 'adwin.fifo_no',
+ 'bthci_evt.hci_vers_nr',
+ 'gryphon.usdt.stmin_active',
+ 'dnp3.al.anaout.int',
+ 'dnp3.al.ana.int',
+ 'dnp3.al.cnt',
+ 'bthfp.chld.mode',
+ 'nat-pmp.pml',
+ 'isystemactivator.actproperties.ts.hdr',
+ 'rtpdump.txt_addr',
+ 'unistim.vocoder.id',
+ 'mac.ueid',
+ 'cip.symbol.size',
+ 'dnp3.al.range.start',
+ 'dnp3.al.range.stop',
+ 'gtpv2.mp',
+ 'gvcp.cmd.resend.firstpacketid',
+ 'gvcp.cmd.resend.lastpacketid',
+ 'wlan.bf.reserved',
+ 'opa.sa.reserved',
+ 'rmt-lct.ext_tol_transfer_len',
+ 'pn_io.error_code2',
+ 'gryphon.ldf.schedsize',
+ 'wimaxmacphy.burst_opt_mimo_matrix_indicator',
+ 'ccsds.packet_type',
+ 'iso15765.flow_control.stmin',
+ 'msdo.PieceSize',
+ 'opa.clasportinfo.redirect.reserved',
+ 'p_mul.unused',
+ 'opa.pm.dataportcounters.reserved',
+ 'opa.switchinfo.switchcapabilitymask.reserved',
+ 'nvme-rdma.read_from_host_resp',
+ 'nvme-rdma.write_to_host_req',
+ 'netlink-route.ifla_linkstats.rx_errors.fifo_errs',
+ 'mtp3mg.japan_spare',
+ 'ixveriwave.errors.ip_checksum_error',
+ 'bpsec.asb.result_count',
+ 'btle.control.phys.le_coded_phy',
+ 'gsm_rlcmac.ul.gprs_multislot_class_exist',
+ 'tpm.resp.size',
+ 'sasp.flags.quiesce',
+ 'canopen.sdo.n',
+ 'cigi.celestial_sphere_control.date',
+ 'corosync_totemsrp.orf_token.seq',
+ 'dec_dna.flags.msglen',
+ 'hiqnet.device',
+ 'ipdr.cm_ipv6_addr_len',
+ 'ipdr.cm_ipv6_addr_string',
+ 'mpeg_descr.phone.nat_code_len'
+ }
+ if filter in ignore_filters:
+ return True
+
+
ignore_patterns = [
- re.compile(r'^elf.sh_type'),
- re.compile(r'^elf.p_type'),
- re.compile(r'^btavrcp.pdu_id'),
re.compile(r'^nstrace.trcdbg.val(\d+)'),
- re.compile(r'^netlogon.dummy_string'),
- re.compile(r'^opa.reserved'),
re.compile(r'^mpls_pm.timestamp\d\..*'),
- re.compile(r'^wassp.data.mu_mac'),
- re.compile(r'^thrift.type'),
- re.compile(r'^quake2.game.client.command.move.angles'),
- re.compile(r'^ipp.enum_value'),
- re.compile(r'^idrp.error.subcode'),
- re.compile(r'^ftdi-ft.lValue'),
- re.compile(r'^6lowpan.src'),
- re.compile(r'^couchbase.flex_frame.frame.id'),
- re.compile(r'^rtps.param.id'),
- re.compile(r'^rtps.locator.port'),
- re.compile(r'^sigcomp.udvm.value'),
- re.compile(r'^opa.mad.attributemodifier.n'),
- re.compile(r'^smb.cmd'),
- re.compile(r'^sctp.checksum'),
- re.compile(r'^dhcp.option.end'),
- re.compile(r'^nfapi.num.bf.vector.bf.value'),
- re.compile(r'^dnp3.al.range.abs'),
- re.compile(r'^dnp3.al.range.quantity'),
- re.compile(r'^dnp3.al.index'),
- re.compile(r'^dnp3.al.size'),
- re.compile(r'^ftdi-ft.hValue'),
- re.compile(r'^homeplug_av.op_attr_cnf.data.sw_sub'),
- re.compile(r'^radiotap.he_mu.preamble_puncturing'),
- re.compile(r'^ndmp.file'),
- re.compile(r'^ocfs2.dlm.lvb'),
- re.compile(r'^oran_fh_cus.reserved'),
- re.compile(r'^qnet6.kif.msgsend.msg.read.xtypes0-7'),
- re.compile(r'^qnet6.kif.msgsend.msg.write.xtypes0-7'),
- re.compile(r'^mih.sig_strength'),
- re.compile(r'^couchbase.flex_frame.frame.len'),
- re.compile(r'^nvme-rdma.read_to_host_req'),
- re.compile(r'^rpcap.dummy'),
- re.compile(r'^sflow.flow_sample.output_interface'),
- re.compile(r'^socks.results'),
- re.compile(r'^opa.mad.attributemodifier.p'),
- re.compile(r'^v5ua.efa'),
- re.compile(r'^zbncp.data.tx_power'),
- re.compile(r'^zbncp.data.nwk_addr'),
- re.compile(r'^zbee_zcl_hvac.pump_config_control.attr.ctrl_mode'),
- re.compile(r'^nat-pmp.external_port'),
- re.compile(r'^zbee_zcl.attr.float'),
- re.compile(r'^wpan-tap.phr.fsk_ms.mode'),
- re.compile(r'^mysql.exec_flags'),
- re.compile(r'^pim.metric_pref'),
- re.compile(r'^modbus.regval_float'),
- re.compile(r'^alcap.cau.value'),
- re.compile(r'^bpv7.crc_field'),
- re.compile(r'^at.chld.mode'),
- re.compile(r'^btl2cap.psm'),
- re.compile(r'^srvloc.srvtypereq.nameauthlistlen'),
- re.compile(r'^a11.ext.code'),
- re.compile(r'^adwin_config.port'),
- re.compile(r'^afp.unknown'),
- re.compile(r'^ansi_a_bsmap.mid.digit_1'),
- re.compile(r'^ber.unknown.OCTETSTRING'),
- re.compile(r'^btatt.handle'),
- re.compile(r'^btl2cap.option_flushto'),
- re.compile(r'^cip.network_segment.prod_inhibit'),
- re.compile(r'^cql.result.rows.table_name'),
- re.compile(r'^dcom.sa.vartype'),
- re.compile(r'^f5ethtrailer.slot'),
- re.compile(r'^ipdr.cm_ipv6_addr'),
- re.compile(r'^mojito.kuid'),
- re.compile(r'^mtp3.priority'),
- re.compile(r'^pw.cw.length'),
- re.compile(r'^rlc.ciphered_data'),
- re.compile(r'^vp8.pld.pictureid'),
- re.compile(r'^gryphon.sched.channel'),
- re.compile(r'^pn_io.ioxs'),
- re.compile(r'^pn_dcp.block_qualifier_reset'),
- re.compile(r'^pn_dcp.suboption_device_instance'),
- re.compile(r'^nfs.attr'),
- re.compile(r'^nfs.create_session_flags'),
- re.compile(r'^rmt-lct.toi64'),
- re.compile(r'^gryphon.data.header_length'),
- re.compile(r'^quake2.game.client.command.move.movement'),
- re.compile(r'^isup.parameter_type'),
- re.compile(r'^cip.port'),
- re.compile(r'^adwin.fifo_no'),
- re.compile(r'^bthci_evt.hci_vers_nr'),
- re.compile(r'^gryphon.usdt.stmin_active'),
- re.compile(r'^dnp3.al.anaout.int'),
- re.compile(r'^dnp3.al.ana.int'),
- re.compile(r'^dnp3.al.cnt'),
- re.compile(r'^bthfp.chld.mode'),
- re.compile(r'^nat-pmp.pml'),
- re.compile(r'^isystemactivator.actproperties.ts.hdr'),
- re.compile(r'^rtpdump.txt_addr'),
- re.compile(r'^unistim.vocoder.id'),
- re.compile(r'^mac.ueid'),
- re.compile(r'cip.symbol.size'),
- re.compile(r'dnp3.al.range.start'),
- re.compile(r'dnp3.al.range.stop'),
- re.compile(r'gtpv2.mp'),
- re.compile(r'gvcp.cmd.resend.firstpacketid'),
- re.compile(r'gvcp.cmd.resend.lastpacketid'),
- re.compile(r'wlan.bf.reserved'),
- re.compile(r'opa.sa.reserved'),
- re.compile(r'rmt-lct.ext_tol_transfer_len'),
- re.compile(r'pn_io.error_code2'),
- re.compile(r'gryphon.ldf.schedsize'),
- re.compile(r'wimaxmacphy.burst_opt_mimo_matrix_indicator'),
re.compile(r'alcap.*bwt.*.[b|f]w'),
- re.compile(r'ccsds.packet_type'),
- re.compile(r'iso15765.flow_control.stmin'),
- re.compile(r'msdo.PieceSize'),
- re.compile(r'opa.clasportinfo.redirect.reserved'),
- re.compile(r'p_mul.unused'),
re.compile(r'btle.control.phys.le_[1|2]m_phy'),
- re.compile(r'opa.pm.dataportcounters.reserved'),
- re.compile(r'opa.switchinfo.switchcapabilitymask.reserved'),
- re.compile(r'nvme-rdma.read_from_host_resp'),
- re.compile(r'nvme-rdma.write_to_host_req'),
- re.compile(r'netlink-route.ifla_linkstats.rx_errors.fifo_errs'),
- re.compile(r'mtp3mg.japan_spare'),
- re.compile(r'ixveriwave.errors.ip_checksum_error'),
- re.compile(r'ansi_a_bsmap.cm2.scm.bc_entry.opmode[0|1]')
+ re.compile(r'ansi_a_bsmap.cm2.scm.bc_entry.opmode[0|1]'),
+ re.compile(r'cemi.[n|x]')
]
-
for patt in ignore_patterns:
if patt.match(filter):
return True
+
return False
@@ -549,7 +578,7 @@ class ValueString:
value,label = m.group(1), m.group(2)
if value in macros:
value = macros[value]
- elif any(not c in '0123456789abcdefABCDEFxX' for c in value):
+ elif any(c not in '0123456789abcdefABCDEFxX' for c in value):
self.valid = False
return
@@ -563,12 +592,16 @@ class ValueString:
value = int(value, 8)
else:
value = int(value, 10)
- except:
+ except Exception:
return
global warnings_found
# Check for value conflict before inserting
+ if do_extra_checks and value in self.parsed_vals and label == self.parsed_vals[value]:
+ print('Warning:', self.file, ': value_string', self.name, '- value ', value, 'repeated with same string - ', label)
+ warnings_found += 1
+
if value in self.parsed_vals and label != self.parsed_vals[value]:
print('Warning:', self.file, ': value_string', self.name, '- value ', value, 'repeated with different values - was',
self.parsed_vals[value], 'now', label)
@@ -583,14 +616,16 @@ class ValueString:
'other', 'for further study', 'future', 'vendor specific', 'obsolete', 'none',
'shall not be used', 'national use', 'unassigned', 'oem', 'user defined',
'manufacturer specific', 'not specified', 'proprietary', 'operator-defined',
- 'dynamically allocated', 'user specified', 'xxx', 'default', 'planned', 'not req' ]
+ 'dynamically allocated', 'user specified', 'xxx', 'default', 'planned', 'not req',
+ 'deprecated', 'not measured', 'unspecified', 'nationally defined', 'nondisplay', 'general',
+ 'tbd' ]
excepted = False
for ex in exceptions:
if label.lower().find(ex) != -1:
excepted = True
break
- if not excepted:
+ if not excepted and len(label)>2:
print('Warning:', self.file, ': value_string', self.name, '- label ', label, 'repeated')
warnings_found += 1
else:
@@ -609,7 +644,7 @@ class ValueString:
span = self.max_value - self.min_value + 1
if num_items > 4 and span > num_items and (span-num_items <=1):
for val in range(self.min_value, self.max_value):
- if not val in self.parsed_vals:
+ if val not in self.parsed_vals:
print('Warning:', self.file, ': value_string', self.name, '- value', val, 'missing?', '(', num_items, 'entries)')
global warnings_found
warnings_found += 1
@@ -627,7 +662,7 @@ class ValueString:
# Be forgiving about first or last entry
first_val = list(self.parsed_vals)[0]
last_val = list(self.parsed_vals)[-1]
- if not first_val in matching_label_entries or not last_val in matching_label_entries:
+ if first_val not in matching_label_entries or last_val not in matching_label_entries:
return
print('Warning:', self.file, ': value_string', self.name, 'Labels match value except for 1!', matching_label_entries, num_items, self)
@@ -680,21 +715,20 @@ class RangeString:
self.max_value = -99999
# Now parse out each entry in the value_string
- matches = re.finditer(r'\{\s*([0-9_A-Za-z]*)\s*,\s*([0-9_A-Za-z]*)\s*,\s*(".*?")\s*}\s*,', self.raw_vals)
+ matches = re.finditer(r'\{\s*([0-9_A-Za-z]*)\s*,\s*([0-9_A-Za-z]*)\s*,\s*(".*?")\s*\}\s*,', self.raw_vals)
for m in matches:
min,max,label = m.group(1), m.group(2), m.group(3)
if min in macros:
min = macros[min]
- elif any(not c in '0123456789abcdefABCDEFxX' for c in min):
+ elif any(c not in '0123456789abcdefABCDEFxX' for c in min):
self.valid = False
return
if max in macros:
max = macros[max]
- elif any(not c in '0123456789abcdefABCDEFxX' for c in max):
+ elif any(c not in '0123456789abcdefABCDEFxX' for c in max):
self.valid = False
return
-
try:
# Read according to the appropriate base.
if min.lower().startswith('0x'):
@@ -714,7 +748,7 @@ class RangeString:
max = int(max, 8)
else:
max = int(max, 10)
- except:
+ except Exception:
return
# Now check what we've found.
@@ -724,7 +758,7 @@ class RangeString:
self.min_value = min
# For overall max value, still use min of each entry.
# It is common for entries to extend to e.g. 0xff, but at least we can check for items
- # that can never match if we only chec the min.
+ # that can never match if we only check the min.
if min > self.max_value:
self.max_value = min
@@ -734,7 +768,7 @@ class RangeString:
print('Warning:', self.file, ': range_string label', label, 'hidden by', prev)
warnings_found += 1
- # Max should not be > min
+ # Min should not be > max
if min > max:
print('Warning:', self.file, ': range_string', self.name, 'entry', label, 'min', min, '>', max)
warnings_found += 1
@@ -747,12 +781,59 @@ class RangeString:
# OK, add this entry
self.parsed_vals.append(RangeStringEntry(min, max, label))
+ # TODO: mark as not valid if not all pairs were successfully parsed?
+
def extraChecks(self):
- pass
- # TODO: some checks over all entries. e.g.,
- # - can multiple values be coalesced into 1?
- # - if in all cases min==max, suggest value_string instead?
+ global warnings_found
+
+ # if in all cases min==max, suggest value_string instead?
+ could_use_value_string = True
+ for val in self.parsed_vals:
+ if val.min != val.max:
+ could_use_value_string = False
+ break
+ if could_use_value_string:
+ print('Warning:', self.file, ': range_string', self.name, 'could be value_string instead!')
+ warnings_found += 1
+
+ # TODO: can multiple values be coalesced into fewer?
+ # TODO: Partial overlapping?
+
+
+class StringString:
+ def __init__(self, file, name, vals, macros, do_extra_checks=False):
+ self.file = file
+ self.name = name
+ self.raw_vals = vals
+ self.parsed_vals = {}
+
+ terminated = False
+ global errors_found
+
+ # Now parse out each entry in the string_string
+ matches = re.finditer(r'\{\s*(["0-9_A-Za-z\s\-]*?)\s*,\s*(["0-9_A-Za-z\s\-]*)\s*', self.raw_vals)
+ for m in matches:
+ key = m.group(1).strip()
+ value = m.group(2).strip()
+ if key in self.parsed_vals:
+ print('Error:', self.file, ': string_string', self.name, 'entry', key, 'has been added twice (values',
+ self.parsed_vals[key], 'and now', value, ')')
+ errors_found += 1
+
+ else:
+ self.parsed_vals[key] = value
+ # TODO: Also allow key to be "0" ?
+ if (key in { "NULL" }) and value == "NULL":
+ terminated = True
+
+ if not terminated:
+ print('Error:', self.file, ': string_string', self.name, "is not terminated with { NULL, NULL }")
+ errors_found += 1
+
+ def extraChecks(self):
+ pass
+ # TODO: ?
@@ -781,7 +862,7 @@ def findValueStrings(filename, macros, do_extra_checks=False):
return vals_found
-# Look for value_string entries in a dissector file. Return a dict name -> ValueString
+# Look for range_string entries in a dissector file. Return a dict name -> RangeString
def findRangeStrings(filename, macros, do_extra_checks=False):
vals_found = {}
@@ -805,6 +886,29 @@ def findRangeStrings(filename, macros, do_extra_checks=False):
return vals_found
+# Look for string_string entries in a dissector file. Return a dict name -> StringString
+def findStringStrings(filename, macros, do_extra_checks=False):
+ vals_found = {}
+
+ #static const string_string ice_candidate_types[] = {
+ # { "host", "Host candidate" },
+ # { "srflx", "Server reflexive candidate" },
+ # { 0, NULL }
+ #};
+
+ with open(filename, 'r', encoding="utf8") as f:
+ contents = f.read()
+
+ # Remove comments so as not to trip up RE.
+ contents = removeComments(contents)
+
+ matches = re.finditer(r'.*const string_string\s*([a-zA-Z0-9_]*)\s*\[\s*\]\s*\=\s*\{([\{\}\d\,a-zA-Z0-9_\-\*\#\.:\/\(\)\'\s\"]*)\};', contents)
+ for m in matches:
+ name = m.group(1)
+ vals = m.group(2)
+ vals_found[name] = StringString(filename, name, vals, macros, do_extra_checks)
+
+ return vals_found
# The relevant parts of an hf item. Used as value in dict where hf variable name is key.
@@ -821,11 +925,17 @@ class Item:
self.hf = hf
self.filter = filter
self.label = label
+ self.blurb = blurb
self.mask = mask
self.strings = strings
self.mask_exact_width = mask_exact_width
- global warnings_found
+ global warnings_found, errors_found
+
+ if blurb == '0':
+ print('Error:', filename, hf, ': - filter "' + filter +
+ '" has blurb of 0 - if no string, please set NULL instead')
+ errors_found += 1
self.set_mask_value(macros)
@@ -846,16 +956,19 @@ class Item:
Item.previousItems.pop()
self.item_type = item_type
+
self.display = display
+ self.set_display_value(macros)
# Optionally check label (short and long).
if check_label:
self.check_label(label, 'label')
#self.check_label(blurb, 'blurb')
+ self.check_blurb_vs_label()
# Optionally check that mask bits are contiguous
if check_mask:
- if self.mask_read and not mask in { 'NULL', '0x0', '0', '0x00' }:
+ if self.mask_read and mask not in { 'NULL', '0x0', '0', '0x00' }:
self.check_contiguous_bits(mask)
self.check_num_digits(self.mask)
# N.B., if last entry in set is removed, see around 18,000 warnings
@@ -889,9 +1002,16 @@ class Item:
rs = range_strings[self.rs_name]
self.check_range_string_range(rs.min_value, rs.max_value)
+ # Could/should this item be FT_FRAMENUM ?
+ #if ((self.label.lower().find(' frame') != -1 or self.label.lower().find('frame ') != -1) and self.label.lower().find('frames') == -1 and
+ # (self.label.lower().find('in') != -1 or self.label.lower().find('for') != -1) and
+ # self.item_type == 'FT_UINT32' and self.mask_value == 0x0):
+ # print('Warning: ' + self.filename, self.hf, 'filter "' + self.filter + '", label "' + label + '"', 'item type is', self.item_type, '- could be FT_FRANENUM?')
+ # warnings_found += 1
+
def __str__(self):
- return 'Item ({0} "{1}" {2} type={3}:{4} {5} mask={6})'.format(self.filename, self.label, self.filter, self.item_type, self.display, self.strings, self.mask)
+ return 'Item ({0} {1} "{2}" {3} type={4}:{5} {6} mask={7})'.format(self.filename, self.hf, self.label, self.filter, self.item_type, self.display, self.strings, self.mask)
def check_label(self, label, label_name):
global warnings_found
@@ -915,20 +1035,50 @@ class Item:
print('Warning: ' + self.filename, self.hf, 'filter "' + self.filter + '"', label_name, '"' + label + '"', 'ends with an unnecessary colon')
warnings_found += 1
+ def check_blurb_vs_label(self):
+ global warnings_found
+ if self.blurb == "NULL":
+ return
+
+ # Is the label longer than the blurb?
+ # Generated dissectors tend to write the type into the blurb field...
+ #if len(self.label) > len(self.blurb):
+ # print('Warning:', self.filename, self.hf, 'label="' + self.label + '" blurb="' + self.blurb + '"', "- label longer than blurb!!!")
+
+ # Is the blurb just the label in a different order?
+ label_words = self.label.lower().split(' ')
+ label_words.sort()
+ blurb_words = self.blurb.lower().split(' ')
+ blurb_words.sort()
+
+ # Subset - often happens when part specific to that field is dropped
+ if set(label_words) > set(blurb_words):
+ print('Warning:', self.filename, self.hf, 'label="' + self.label + '" blurb="' + self.blurb + '"', "- words in blurb are subset of label words")
+ warnings_found += 1
+
+ # Just a re-ordering (but may also contain capitalization changes.)
+ if blurb_words == label_words:
+ print('Warning:', self.filename, self.hf, 'label="' + self.label + '" blurb="' + self.blurb + '"', "- blurb words are label words (re-ordered?)")
+ warnings_found += 1
+
+ # TODO: could have item know protocol name(s) from file this item was found in, and complain if blurb is just prot-name + label ?
+
def set_mask_value(self, macros):
try:
self.mask_read = True
+ # PIDL generator adds annoying parenthesis and spaces around mask..
+ self.mask = self.mask.strip('() ')
# Substitute mask if found as a macro..
if self.mask in macros:
self.mask = macros[self.mask]
- elif any(not c in '0123456789abcdefABCDEFxX' for c in self.mask):
+ elif any(c not in '0123456789abcdefABCDEFxX' for c in self.mask):
self.mask_read = False
self.mask_value = 0
+ #print(self.filename, 'Could not read:', '"' + self.mask + '"')
return
-
# Read according to the appropriate base.
if self.mask.startswith('0x'):
self.mask_value = int(self.mask, 16)
@@ -936,10 +1086,39 @@ class Item:
self.mask_value = int(self.mask, 8)
else:
self.mask_value = int(self.mask, 10)
- except:
+ except Exception:
self.mask_read = False
self.mask_value = 0
+ #if not self.mask_read:
+ # print('Could not read:', self.mask)
+
+
+ def set_display_value(self, macros):
+ try:
+ self.display_read = True
+ display = self.display
+
+ # Substitute display if found as a macro..
+ if display in macros:
+ display = macros[display]
+ elif any(c not in '0123456789abcdefABCDEFxX' for c in display):
+ self.display_read = False
+ self.display_value = 0
+ return
+
+ # Read according to the appropriate base.
+ if self.display.startswith('0x'):
+ self.display_value = int(display, 16)
+ elif self.display.startswith('0'):
+ self.display_value = int(display, 8)
+ else:
+ self.display_value = int(display, 10)
+ except Exception:
+ self.display_read = False
+ self.display_value = 0
+
+
def check_value_string_range(self, vs_min, vs_max):
item_width = self.get_field_width_in_bits()
@@ -993,7 +1172,7 @@ class Item:
def check_bit(self, value, n):
return (value & (0x1 << n)) != 0
- # Output a warning if non-contigous bits are found in the mask (guint64).
+ # Output a warning if non-contiguous bits are found in the mask (uint64_t).
# Note that this legimately happens in several dissectors where multiple reserved/unassigned
# bits are conflated into one field.
# - there is probably a cool/efficient way to check this (+1 => 1-bit set?)
@@ -1026,7 +1205,7 @@ class Item:
# Look up the field width
field_width = 0
- if not self.item_type in field_widths:
+ if self.item_type not in field_widths:
print('unexpected item_type is ', self.item_type)
field_width = 64
else:
@@ -1058,15 +1237,13 @@ class Item:
if self.item_type == 'FT_BOOLEAN':
if self.display == 'NULL':
return 8 # i.e. 1 byte
- elif self.display == 'BASE_NONE':
- return 8
elif self.display == 'SEP_DOT': # from proto.h, only meant for FT_BYTES
return 64
else:
try:
# For FT_BOOLEAN, modifier is just numerical number of bits. Round up to next nibble.
return int((int(self.display) + 3)/4)*4
- except:
+ except Exception:
return None
else:
if self.item_type in field_widths:
@@ -1156,13 +1333,11 @@ class Item:
def check_mask_if_in_field_array(self, mask, field_arrays):
# Work out if this item appears in a field array
found = False
- array_name = None
for arr in field_arrays:
list = field_arrays[arr][0]
if self.hf in list:
# These need to have a mask - don't judge for being 0
found = True
- array_name = arr
break
if found:
@@ -1226,6 +1401,30 @@ class Item:
return True
+ def check_boolean_length(self):
+ global errors_found
+ # If mask is 0, display must be BASE_NONE.
+ if self.item_type == 'FT_BOOLEAN' and self.mask_read and self.mask_value == 0 and self.display.find('BASE_NONE') == -1:
+ print('Error:', self.filename, self.hf, 'type is FT_BOOLEAN, no mask set (', self.mask, ') - display should be BASE_NONE, is instead', self.display)
+ errors_found += 1
+ # TODO: check for length > 64?
+
+ def check_string_display(self):
+ global warnings_found
+ if self.item_type in { 'FT_STRING', 'FT_STRINGZ', 'FT_UINT_STRING'}:
+ if self.display.find('BASE_NONE')==-1:
+ print('Warning:', self.filename, self.hf, 'type is', self.item_type, 'display must be BASE_NONE, is instead', self.display)
+ warnings_found += 1
+
+
+
+
+ def check_ipv4_display(self):
+ global errors_found
+ if self.item_type == 'FT_IPv4' and self.display not in { 'BASE_NETMASK', 'BASE_NONE' }:
+ print('Error:', self.filename, self.hf, 'type is FT_IPv4, should be BASE_NETMASK or BASE_NONE, is instead', self.display)
+ errors_found += 1
+
class CombinedCallsCheck:
def __init__(self, file, apiChecks):
@@ -1256,8 +1455,8 @@ class CombinedCallsCheck:
# More compelling if close together..
if call.line_number>prev.line_number and call.line_number-prev.line_number <= 4:
scope_different = False
- for l in range(prev.line_number, call.line_number-1):
- if lines[l].find('{') != -1 or lines[l].find('}') != -1 or lines[l].find('else') != -1 or lines[l].find('break;') != -1 or lines[l].find('if ') != -1:
+ for no in range(prev.line_number, call.line_number-1):
+ if lines[no].find('{') != -1 or lines[no].find('}') != -1 or lines[no].find('else') != -1 or lines[no].find('break;') != -1 or lines[no].find('if ') != -1:
scope_different = True
break
# Also more compelling if check for and scope changes { } in lines in-between?
@@ -1315,7 +1514,6 @@ apiChecks.append(APICheck('proto_tree_add_item_ret_varint', { 'FT_INT8', 'FT_INT
'FT_CHAR', 'FT_UINT8', 'FT_UINT16', 'FT_UINT24', 'FT_UINT32', 'FT_FRAMENUM',
'FT_UINT40', 'FT_UINT48', 'FT_UINT56', 'FT_UINT64',}))
apiChecks.append(APICheck('proto_tree_add_boolean_bits_format_value', { 'FT_BOOLEAN'}))
-apiChecks.append(APICheck('proto_tree_add_boolean_bits_format_value64', { 'FT_BOOLEAN'}))
apiChecks.append(APICheck('proto_tree_add_ascii_7bits_item', { 'FT_STRING'}))
# TODO: positions are different, and takes 2 hf_fields..
#apiChecks.append(APICheck('proto_tree_add_checksum', { 'FT_UINT8', 'FT_UINT16', 'FT_UINT24', 'FT_UINT32'}))
@@ -1401,17 +1599,27 @@ def isGeneratedFile(filename):
return False
+# TODO: could also look for macros in related/included header file(s)?
def find_macros(filename):
- macros = {}
+ # Pre-populate with some useful values..
+ macros = { 'BASE_NONE' : 0, 'BASE_DEC' : 1 }
+
with open(filename, 'r', encoding="utf8") as f:
contents = f.read()
# Remove comments so as not to trip up RE.
contents = removeComments(contents)
- matches = re.finditer( r'#define\s*([A-Z0-9_]*)\s*([0-9xa-fA-F]*)\n', contents)
+ matches = re.finditer( r'#define\s*([A-Za-z0-9_]*)\s*([0-9xa-fA-F]*)\s*\n', contents)
+ for m in matches:
+ # Store this mapping.
+ macros[m.group(1)] = m.group(2)
+
+ # Also look for what could be enumeration assignments
+ matches = re.finditer( r'\s*([A-Za-z0-9_]*)\s*=\s*([0-9xa-fA-F]*)\s*,?\n', contents)
for m in matches:
# Store this mapping.
macros[m.group(1)] = m.group(2)
+
return macros
@@ -1468,7 +1676,7 @@ def find_field_arrays(filename, all_fields, all_hf):
for m in matches:
name = m.group(1)
# Ignore if not used in a call to an _add_bitmask_ API
- if not name in all_fields:
+ if name not in all_fields:
continue
fields_text = m.group(2)
@@ -1591,6 +1799,11 @@ def checkFile(filename, check_mask=False, mask_exact_width=False, check_label=Fa
for name in range_strings:
range_strings[name].extraChecks()
+ # Find (and sanity-check) string_strings
+ string_strings = findStringStrings(filename, macros, do_extra_checks=extra_value_string_checks)
+ if extra_value_string_checks:
+ for name in string_strings:
+ string_strings[name].extraChecks()
# Find important parts of items.
@@ -1638,10 +1851,15 @@ def checkFile(filename, check_mask=False, mask_exact_width=False, check_label=Fa
# Only checking if almost every field does match.
checking = len(items_defined) and matches<len(items_defined) and ((matches / len(items_defined)) > 0.93)
if checking:
- print(filename, ':', matches, 'label-vs-filter matches of out of', len(items_defined), 'so reporting mismatches')
+ print(filename, ':', matches, 'label-vs-filter matches out of', len(items_defined), 'so reporting mismatches')
for hf in items_defined:
items_defined[hf].check_label_vs_filter(reportError=True, reportNumericalMismatch=False)
+ for hf in items_defined:
+ items_defined[hf].check_boolean_length()
+ items_defined[hf].check_string_display()
+ items_defined[hf].check_ipv4_display()
+
#################################################################
@@ -1686,7 +1904,7 @@ if args.all_checks:
args.mask_exact_width = True
args.consecutive = True
args.check_bitmask_fields = True
- #args.label = True
+ args.label = True
args.label_vs_filter = True
args.extra_value_string_checks
@@ -1734,7 +1952,7 @@ elif args.open:
# Only interested in dissector files.
files_staged = list(filter(lambda f : is_dissector_file(f), files_staged))
for f in files_staged:
- if not f in files:
+ if f not in files:
files.append(f)
else:
# Find all dissector files.
diff --git a/tools/check_val_to_str.py b/tools/check_val_to_str.py
index 417655c3..4ce2ca8c 100755
--- a/tools/check_val_to_str.py
+++ b/tools/check_val_to_str.py
@@ -73,7 +73,7 @@ def removeComments(code_string):
def is_dissector_file(filename):
- p = re.compile(r'.*packet-.*\.c')
+ p = re.compile(r'.*(packet|file)-.*\.c')
return p.match(filename)
def findDissectorFilesInFolder(folder, recursive=False):
@@ -101,7 +101,7 @@ warnings_found = 0
errors_found = 0
# Check the given dissector file.
-def checkFile(filename):
+def checkFile(filename, generated):
global warnings_found
global errors_found
@@ -130,18 +130,28 @@ def checkFile(filename):
# TODO: I suppose it could be escaped, but haven't seen this...
if format_string.find('%') != -1:
# This is an error as format specifier would show in app
- print('Error:', filename, " ", m.group(0), ' - should not have specifiers in unknown string')
+ print('Error:', filename, " ", m.group(0),
+ ' - should not have specifiers in unknown string',
+ '(GENERATED)' if generated else '')
errors_found += 1
else:
# These ones need to have a specifier, and it should be suitable for an int
- specifier_id = format_string.find('%')
- if specifier_id == -1:
- print('Warning:', filename, " ", m.group(0), ' - should have suitable format specifier in unknown string (or use _const()?)')
+ count = format_string.count('%')
+ if count == 0:
+ print('Warning:', filename, " ", m.group(0),
+ ' - should have suitable format specifier in unknown string (or use _const()?)',
+ '(GENERATED)' if generated else '')
warnings_found += 1
+ elif count > 1:
+ print('Warning:', filename, " ", m.group(0),
+ ' - has more than one specifier?',
+ '(GENERATED)' if generated else '')
# TODO: check allowed specifiers (d, u, x, ?) and modifiers (0-9*) in re ?
if format_string.find('%s') != -1:
# This is an error as this likely causes a crash
- print('Error:', filename, " ", m.group(0), ' - inappropriate format specifier in unknown string')
+ print('Error:', filename, " ", m.group(0),
+ ' - inappropriate format specifier in unknown string',
+ '(GENERATED)' if generated else '')
errors_found += 1
@@ -158,6 +168,8 @@ parser.add_argument('--commits', action='store',
help='last N commits to check')
parser.add_argument('--open', action='store_true',
help='check open files')
+parser.add_argument('--generated', action='store_true',
+ help='check generated files')
args = parser.parse_args()
@@ -167,7 +179,7 @@ files = []
if args.file:
# Add specified file(s)
for f in args.file:
- if not f.startswith('epan'):
+ if not os.path.isfile(f) and not f.startswith('epan'):
f = os.path.join('epan', 'dissectors', f)
if not os.path.isfile(f):
print('Chosen file', f, 'does not exist.')
@@ -195,7 +207,7 @@ elif args.open:
# Only interested in dissector files.
files_staged = list(filter(lambda f : is_dissector_file(f), files_staged))
for f in files_staged:
- if not f in files:
+ if f not in files:
files.append(f)
else:
# Find all dissector files from folder.
@@ -219,8 +231,9 @@ else:
for f in files:
if should_exit:
exit(1)
- if not isGeneratedFile(f):
- checkFile(f)
+ generated = isGeneratedFile(f)
+ if args.generated or not generated:
+ checkFile(f, generated)
# Show summary.
diff --git a/tools/checkfiltername.pl b/tools/checkfiltername.pl
index ea286b26..d7b1c0f6 100755
--- a/tools/checkfiltername.pl
+++ b/tools/checkfiltername.pl
@@ -357,6 +357,7 @@ sub is_proto_dup_allowed {
if (($_[0] eq "tn3270") && (index($_[1], "tn3270e") >= 0)) {return 1;}
if (($_[0] eq "usb") && (index($_[1], "usb") >= 0)) {return 1;}
if (($_[0] eq "xml") && (index($_[1], "xml") >= 0)) {return 1;}
+ if (($_[0] eq "dns") && (index($_[1], "dnscrypt") >= 0)) {return 1;}
return 0;
}
diff --git a/tools/checkhf.pl b/tools/checkhf.pl
index 7e01c7e5..df075e62 100755
--- a/tools/checkhf.pl
+++ b/tools/checkhf.pl
@@ -291,7 +291,7 @@ sub remove_quoted_strings {
sub remove_if0_code {
my ($codeRef, $fileName) = @_;
- # Preprocess outputput (ensure trailing LF and no leading WS before '#')
+ # Preprocess output (ensure trailing LF and no leading WS before '#')
$$codeRef =~ s/^\s*#/#/m;
if ($$codeRef !~ /\n$/) { $$codeRef .= "\n"; }
diff --git a/tools/checklicenses.py b/tools/checklicenses.py
index 192fecbe..b0a0ef02 100755
--- a/tools/checklicenses.py
+++ b/tools/checklicenses.py
@@ -37,7 +37,6 @@ ALLOWED_LICENSES = [
'BSD (2 clause) GPL (v2 or later)',
'BSD (3 clause)',
'GPL (v2 or later)',
- 'GPL (v3 or later) (with Bison parser exception)',
'ISC',
'ISC GPL (v2 or later)',
'LGPL (v2 or later)',
@@ -73,15 +72,12 @@ PATH_SPECIFIC_ALLOWED_LICENSES = {
'doc/': [
'UNKNOWN',
],
- 'docbook/custom_layer_chm.xsl': [
+ 'doc/custom_layer_chm.xsl': [
'UNKNOWN',
],
- 'docbook/custom_layer_single_html.xsl': [
+ 'doc/custom_layer_single_html.xsl': [
'UNKNOWN',
],
- 'docbook/ws.css' : [
- 'UNKNOWN'
- ],
'fix': [
'UNKNOWN',
],
@@ -122,7 +118,7 @@ PATH_SPECIFIC_ALLOWED_LICENSES = {
],
# Special IDL license that appears to be compatible as far as I (not a
# lawyer) can tell. See
- # https://www.wireshark.org/lists/wireshark-dev/201310/msg00234.html
+ # https://lists.wireshark.org/archives/wireshark-dev/201310/msg00234.html
'epan/dissectors/pidl/idl_types.h': [
'UNKNOWN',
],
@@ -172,7 +168,7 @@ def check_licenses(options, args):
'licensecheck.pl'))
licensecheck = subprocess.Popen([licensecheck_path,
- '-l', '150',
+ '-l', '160',
'-r', start_dir],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
diff --git a/tools/convert-glib-types.py b/tools/convert-glib-types.py
index aa714d7d..83c8978a 100755
--- a/tools/convert-glib-types.py
+++ b/tools/convert-glib-types.py
@@ -5,7 +5,7 @@
#
# SPDX-License-Identifier: GPL-2.0-or-later
'''\
-convert-glib-types.py - Convert glib types to their C and C99 eqivalents.
+convert-glib-types.py - Convert glib types to their C and C99 equivalents.
'''
# Imports
@@ -22,9 +22,13 @@ type_map = {
'gboolean': 'bool',
'gchar': 'char',
'guchar': 'unsigned char',
+ 'gshort': 'int16_t',
+ 'gushort': 'uint16_t',
'gint': 'int',
'guint': 'unsigned', # Matches README.developer
- 'glong': 'long',
+ # Our remaining glong instances probably shouldn't be converted, e.g.
+ # sequence_analysis.c:350
+ # 'glong': 'long',
'gulong': 'unsigned long',
'gint8': 'int8_t',
'gint16': 'int16_t',
@@ -38,6 +42,10 @@ type_map = {
'gdouble': 'double',
'gpointer ': 'void *', # 'void *foo' instead of 'void * foo'
'gpointer': 'void *',
+ 'gconstpointer ': 'const void *', # 'void *foo' instead of 'void * foo'
+ 'gconstpointer': 'const void *',
+ 'gintptr': 'intptr_t',
+ 'guintptr': 'uintptr_t',
# Is gsize the same as size_t on the platforms we support?
# https://gitlab.gnome.org/GNOME/glib/-/issues/2493
'gsize': 'size_t',
@@ -45,8 +53,6 @@ type_map = {
}
definition_map = {
- 'TRUE': 'true',
- 'FALSE': 'false',
'G_MAXINT8': 'INT8_MAX',
'G_MAXINT16': 'INT16_MAX',
'G_MAXINT32': 'INT32_MAX',
@@ -62,6 +68,17 @@ definition_map = {
'G_MININT32': 'INT32_MIN',
'G_MININT64': 'INT64_MIN',
'G_MININT': 'INT_MIN',
+ 'G_MINFLOAT': 'FLT_MIN',
+ 'G_MAXFLOAT': 'FLT_MAX',
+ 'G_MINDOUBLE': 'DBL_MIN',
+ 'G_MAXDOUBLE': 'DBL_MAX',
+ 'G_GINT64_CONSTANT': 'INT64_C',
+ 'G_GUINT64_CONSTANT': 'UINT64_C',
+}
+
+tf_definition_map = {
+ 'TRUE': 'true',
+ 'FALSE': 'false',
}
format_spec_map = {
@@ -69,6 +86,33 @@ format_spec_map = {
'G_GUINT64_FORMAT': 'PRIu64',
}
+api_map = {
+ 'tvb_get_guint8': 'tvb_get_uint8',
+ 'tvb_get_gint8': 'tvb_get_int8',
+ 'tvb_get_guint16': 'tvb_get_uint16',
+ 'tvb_get_gint16': 'tvb_get_int16',
+ 'tvb_get_guint24': 'tvb_get_uint24',
+ 'tvb_get_gint24': 'tvb_get_int24',
+ 'tvb_get_guint32': 'tvb_get_uint32',
+ 'tvb_get_gint32': 'tvb_get_int32',
+ 'tvb_get_guint40': 'tvb_get_uint40',
+ 'tvb_get_gint40': 'tvb_get_int40',
+ 'tvb_get_guint48': 'tvb_get_uint48',
+ 'tvb_get_gint48': 'tvb_get_int48',
+ 'tvb_get_guint56': 'tvb_get_uint56',
+ 'tvb_get_gint56': 'tvb_get_int56',
+ 'tvb_get_guint64': 'tvb_get_uint64',
+ 'tvb_get_gint64': 'tvb_get_int64',
+ 'tvb_find_guint8': 'tvb_find_uint8',
+ 'tvb_find_guint16': 'tvb_find_uint16',
+ 'tvb_ws_mempbrk_pattern_guint8': 'tvb_ws_mempbrk_pattern_uint8',
+ 'guint32_to_str_buf': 'uint32_to_str_buf',
+ 'guint64_to_str_buf': 'uint64_to_str_buf',
+ 'get_nonzero_guint32': 'get_nonzero_uint32',
+ 'get_guint32': 'get_uint32',
+ 'guint8_to_hex': 'uint8_to_hex',
+}
+
def convert_file(file):
lines = ''
try:
@@ -80,15 +124,19 @@ def convert_file(file):
lines = re.sub(rf'([^"])\b{glib_type}\b([^"])', rf'\1{c99_type}\2', lines, flags=re.MULTILINE)
for glib_define, c99_define in definition_map.items():
lines = re.sub(rf'\b{glib_define}\b', rf'{c99_define}', lines, flags=re.MULTILINE)
+ for glib_tf_define, c99_define in tf_definition_map.items():
+ lines = re.sub(rf'\b{glib_tf_define}\b([^\'"])', rf'{c99_define}\1', lines, flags=re.MULTILINE)
for glib_fmt_spec, c99_fmt_spec in format_spec_map.items():
lines = re.sub(rf'\b{glib_fmt_spec}\b', rf'{c99_fmt_spec}', lines, flags=re.MULTILINE)
+ for glib_api, c99_api in api_map.items():
+ lines = re.sub(rf'\b{glib_api}\b', rf'{c99_api}', lines, flags=re.MULTILINE)
except IsADirectoryError:
sys.stderr.write(f'{file} is a directory.\n')
return
except UnicodeDecodeError:
sys.stderr.write(f"{file} isn't valid UTF-8.\n")
return
- except:
+ except Exception:
sys.stderr.write(f'Unable to open {file}.\n')
return
@@ -97,11 +145,11 @@ def convert_file(file):
print(f'Converted {file}')
def main():
- parser = argparse.ArgumentParser(description='Convert glib types to their C and C99 eqivalents.')
+ parser = argparse.ArgumentParser(description='Convert glib types to their C and C99 equivalents.')
parser.add_argument('files', metavar='FILE', nargs='*')
args = parser.parse_args()
- # Build a padded version of type_map which attempts to preseve alignment
+ # Build a padded version of type_map which attempts to preserve alignment
for glib_type, c99_type in type_map.items():
pg_type = glib_type + ' '
pc_type = c99_type + ' '
diff --git a/tools/convert-proto-init.py b/tools/convert-proto-init.py
new file mode 100755
index 00000000..f0ce652f
--- /dev/null
+++ b/tools/convert-proto-init.py
@@ -0,0 +1,73 @@
+#!/usr/bin/env python3
+#
+# Wireshark - Network traffic analyzer
+# By Gerald Combs <gerald@wireshark.org>
+# Copyright 1998 Gerald Combs
+#
+# SPDX-License-Identifier: GPL-2.0-or-later
+'''\
+convert-proto-init.py - Remove explicit init of proto variables.
+'''
+
+# Imports
+
+import argparse
+import glob
+import platform
+import re
+import sys
+
+def convert_file(file):
+ lines = ''
+ try:
+ with open(file, 'r') as f:
+ lines = f.read()
+ # Match the following proto, header field, expert info and subtree variables:
+ #
+ # static int proto_a = -1;
+ # int proto_b=-1;
+ #
+ # static int hf_proto_a_value_1 = -1;
+ # int hf_proto_a_value_2 = - 1;
+ # int hf_proto_a_value_3=-1;
+ # /* static int hf_proto_a_unused_1 = -1; */
+ #
+ # static gint ett_proto_a_tree_1=-1;
+ # gint ett_proto_a_tree_2 = -1; /* A comment. */
+ #
+ # static expert_field ei_proto_a_expert_1 = EI_INIT;
+ #
+ lines = re.sub(r'^((?://\s*|/[*]+\s*)?(?:static\s*| )?(?:g?int|expert_field)\s*(?:proto|hf|ett|ei)_[\w_]+)\s*=\s*(?:-\s*1|EI_INIT)\s*', r'\1', lines, flags=re.MULTILINE)
+ except IsADirectoryError:
+ sys.stderr.write(f'{file} is a directory.\n')
+ return
+ except UnicodeDecodeError:
+ sys.stderr.write(f"{file} isn't valid UTF-8.\n")
+ return
+ except Exception:
+ sys.stderr.write(f'Unable to open {file}.\n')
+ return
+
+ with open(file, 'w') as f:
+ f.write(lines)
+ print(f'Converted {file}')
+
+def main():
+ parser = argparse.ArgumentParser(description='Initialize static proto values to 0.')
+ parser.add_argument('files', metavar='FILE', nargs='*')
+ args = parser.parse_args()
+
+ files = []
+ if platform.system() == 'Windows':
+ for arg in args.files:
+ files += glob.glob(arg)
+ else:
+ files = args.files
+
+ for file in files:
+ convert_file(file)
+
+# On with the show
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/tools/convert_expert_add_info_format.pl b/tools/convert_expert_add_info_format.pl
index 57289364..0b0ddd32 100755
--- a/tools/convert_expert_add_info_format.pl
+++ b/tools/convert_expert_add_info_format.pl
@@ -59,7 +59,10 @@ my %EXPERT_GROUPS = ('PI_CHECKSUM' => "PI_CHECKSUM",
'PI_COMMENTS_GROUP' => "PI_COMMENTS_GROUP",
'PI_DECRYPTION' => "PI_DECRYPTION",
'PI_ASSUMPTION' => "PI_ASSUMPTION",
- 'PI_DEPRECATED' => "PI_DEPRECATED");
+ 'PI_DEPRECATED' => "PI_DEPRECATED",
+ 'PI_RECEIVE' => "PI_RECEIVE",
+ 'PI_INTERFACE' => "PI_INTERFACE",
+ 'PI_DISSECTOR_BUG' => "PI_DISSECTOR_BUG");
my @expert_list;
my $protabbrev = "";
diff --git a/tools/convert_proto_tree_add_text.pl b/tools/convert_proto_tree_add_text.pl
index 35764558..cffc875c 100755
--- a/tools/convert_proto_tree_add_text.pl
+++ b/tools/convert_proto_tree_add_text.pl
@@ -3,12 +3,12 @@
# Copyright 2013 Michael Mann (see AUTHORS file)
#
# A program to help convert proto_tree_add_text calls into filterable "items" that
-# use proto_tree_add_item. The program requires 2 passes. "Pass 1" (generate) collects
+# use proto_tree_add_item. The program requires 2 passes. "Pass 1" (generate) collects
# the eligible proto_tree_add_text calls and outputs the necessary data into a delimited
# file. "Pass 2" (fix-all) takes the data from the delimited file and replaces the
-# proto_tree_add_text calls with proto_tree_add_item or "expert info" calls as well as
+# proto_tree_add_text calls with proto_tree_add_item or "expert info" calls as well as
# generating separate files for the hf and/or ei variable declarations and hf and/or ei array data.
-# The hf "files" can be copy/pasted into the dissector where appropriate (until such time as
+# The hf "files" can be copy/pasted into the dissector where appropriate (until such time as
# its done automatically)
#
# Note that the output from "Pass 1" won't always be a perfect conversion for "Pass 2", so
@@ -96,8 +96,11 @@ my %EXPERT_GROUPS = ('PI_CHECKSUM' => "PI_CHECKSUM",
'PI_SECURITY' => "PI_SECURITY",
'PI_COMMENTS_GROUP' => "PI_COMMENTS_GROUP",
'PI_DECRYPTION' => "PI_DECRYPTION",
- 'PI_ASSUMPTION' => "PI_ASSUMPTION",
- 'PI_DEPRECATED' => "PI_DEPRECATED");
+ 'PI_ASSUMPTION' => "PI_ASSUMPTION",
+ 'PI_DEPRECATED' => "PI_DEPRECATED",
+ 'PI_RECEIVE' => "PI_RECEIVE",
+ 'PI_INTERFACE' => "PI_INTERFACE",
+ 'PI_DISSECTOR_BUG' => "PI_DISSECTOR_BUG");
my @proto_tree_list;
my @expert_list;
@@ -379,7 +382,7 @@ sub generate_hfs {
#encoding
if (scalar @args > 5) {
if (($proto_tree_item[6] eq "1") ||
- ($args[5] =~ /tvb_get_guint8/) ||
+ ($args[5] =~ /tvb_get_g?uint8/) ||
($args[5] =~ /tvb_bytes_to_str/) ||
($args[5] =~ /tvb_ether_to_str/)) {
$proto_tree_item[7] = "ENC_NA";
@@ -387,7 +390,7 @@ sub generate_hfs {
$proto_tree_item[7] = "ENC_BIG_ENDIAN";
} elsif ($args[5] =~ /tvb_get_letoh/) {
$proto_tree_item[7] = "ENC_LITTLE_ENDIAN";
- } elsif (($args[5] =~ /tvb_get_ephemeral_string/) ||
+ } elsif (($args[5] =~ /tvb_get_ephemeral_string/) ||
($args[5] =~ /tvb_format_text/)){
$proto_tree_item[7] = "ENC_NA|ENC_ASCII";
} elsif ($encoding ne "") {
@@ -434,7 +437,7 @@ sub generate_hfs {
#field type
if (scalar @args > 5) {
- if ($args[5] =~ /tvb_get_guint8/) {
+ if ($args[5] =~ /tvb_get_g?uint8/) {
if ($args[4] =~ /%[0-9]*[i]/) {
$proto_tree_item[9] = "FT_INT8";
} else {
@@ -479,7 +482,7 @@ sub generate_hfs {
$proto_tree_item[9] = "FT_GUID";
} elsif ($args[5] =~ /tvb_get_ephemeral_stringz/) {
$proto_tree_item[9] = "FT_STRINGZ";
- } elsif (($args[5] =~ /tvb_get_ephemeral_string/) ||
+ } elsif (($args[5] =~ /tvb_get_ephemeral_string/) ||
($args[5] =~ /tvb_format_text/)){
$proto_tree_item[9] = "FT_STRING";
} elsif (($args[5] =~ /tvb_bytes_to_str/)) {
diff --git a/tools/debian-setup.sh b/tools/debian-setup.sh
index 9b688794..8fade5ac 100755
--- a/tools/debian-setup.sh
+++ b/tools/debian-setup.sh
@@ -26,6 +26,26 @@ function print_usage() {
printf "\\t[other]: other options are passed as-is to apt\\n"
}
+# Adds package $2 to list variable $1 if the package is found.
+# If $3 is given, then this version requirement must be satisfied.
+function add_package() {
+ local list="$1" pkgname="$2" versionreq="${3:-}" version
+
+ version=$(apt-cache show "$pkgname" 2>/dev/null |
+ awk '/^Version:/{ print $2; exit}')
+ # fail if the package is not known
+ if [ -z "$version" ]; then
+ return 1
+ elif [ -n "$versionreq" ]; then
+ # Require minimum version or fail.
+ # shellcheck disable=SC2086
+ dpkg --compare-versions $version $versionreq || return 1
+ fi
+
+ # package is found, append it to list
+ eval "${list}=\"\${${list}} \${pkgname}\""
+}
+
ADDITIONAL=0
DEBDEPS=0
TESTDEPS=0
@@ -75,36 +95,48 @@ then
exit 1
fi
-BASIC_LIST="gcc \
- g++\
- libglib2.0-dev \
- libc-ares-dev \
- libpcap-dev \
- libpcre2-dev \
- flex \
- make \
- python3 \
- libgcrypt-dev \
- libspeexdsp-dev"
-
-QT5_LIST="qttools5-dev \
- qttools5-dev-tools \
- libqt5svg5-dev \
- qtmultimedia5-dev \
- qtbase5-dev \
- qtchooser \
- qt5-qmake \
- qtbase5-dev-tools"
-
-QT6_LIST="qt6-base-dev \
- qt6-multimedia-dev \
- qt6-tools-dev \
- qt6-tools-dev-tools \
- qt6-l10n-tools \
- libqt6core5compat6-dev \
- freeglut3-dev \
- libvulkan-dev \
- libxkbcommon-dev"
+BASIC_LIST="
+ cmake
+ flex
+ g++
+ gcc
+ libc-ares-dev
+ libgcrypt-dev
+ libglib2.0-dev
+ libpcap-dev
+ libpcre2-dev
+ libspeexdsp-dev
+ make
+ python3
+ "
+
+QT5_LIST="
+ libqt5svg5-dev
+ qt5-qmake
+ qtbase5-dev
+ qtbase5-dev-tools
+ qtchooser
+ qtmultimedia5-dev
+ qttools5-dev
+ qttools5-dev-tools
+ "
+
+QT6_LIST="
+ freeglut3-dev
+ libqt6svg6-dev
+ libvulkan-dev
+ libxkbcommon-dev
+ qt6-base-dev
+ qt6-l10n-tools
+ qt6-multimedia-dev
+ qt6-tools-dev
+ qt6-tools-dev-tools
+ "
+
+# qt6-5compat-dev: Debian >= bookworm, Ubuntu >= 23.04
+# libqt6core5compat6-dev: Ubuntu 22.04
+add_package QT6_LIST qt6-5compat-dev ||
+QT6_LIST="$QT6_LIST libqt6core5compat6-dev"
if [ $ADD_QT5 -ne 0 ]
then
@@ -125,144 +157,101 @@ then
# shellcheck disable=SC1090
. "${os_release}"
- # Ubuntu 22.04 (jammy) or later
+ # Ubuntu 22.04 (jammy) / Debian 12 (bookworm) or later
MAJOR=$(echo "$VERSION_ID" | cut -f1 -d.)
if [ "${ID:-linux}" = "ubuntu" ] && [ "${MAJOR:-0}" -ge "22" ]; then
echo "Installing Qt6."
BASIC_LIST="$BASIC_LIST $QT6_LIST"
+ elif [ "${ID:-linux}" = "debian" ] && [ "${MAJOR:-0}" -ge "12" ]; then
+ echo "Installing Qt6."
+ BASIC_LIST="$BASIC_LIST $QT6_LIST"
else
echo "Installing Qt5."
BASIC_LIST="$BASIC_LIST $QT5_LIST"
fi
fi
-ADDITIONAL_LIST="libnl-3-dev \
- libkrb5-dev \
- libsmi2-dev \
- libsbc-dev \
- liblua5.2-dev \
- libnl-cli-3-dev \
- libparse-yapp-perl \
- libcap-dev \
- liblz4-dev \
- libsnappy-dev \
- libzstd-dev \
- libspandsp-dev \
- libxml2-dev \
- libminizip-dev \
- git \
- ninja-build \
- perl \
- xsltproc \
- ccache \
- doxygen"
+ADDITIONAL_LIST="
+ ccache
+ doxygen
+ git
+ libbrotli-dev
+ libcap-dev
+ libgnutls28-dev
+ libkrb5-dev
+ liblz4-dev
+ libmaxminddb-dev
+ libminizip-dev
+ libnghttp2-dev
+ libnl-3-dev
+ libnl-cli-3-dev
+ libopencore-amrnb-dev
+ libopus-dev
+ libparse-yapp-perl
+ libsbc-dev
+ libssh-gcrypt-dev
+ libsmi2-dev
+ libsnappy-dev
+ libspandsp-dev
+ libsystemd-dev
+ libxml2-dev
+ libzstd-dev
+ ninja-build
+ perl
+ xsltproc
+ "
# Uncomment to add PNG compression utilities used by compress-pngs:
-# ADDITIONAL_LIST="$ADDITIONAL_LIST \
-# advancecomp \
-# optipng \
-# pngcrush"
-
-DEBDEPS_LIST="debhelper \
- dh-python \
- asciidoctor \
- docbook-xml \
- docbook-xsl \
- libxml2-utils \
- lintian \
- lsb-release \
- po-debconf \
- python3-ply \
- quilt"
-
-TESTDEPS_LIST="python3-pytest \
- python3-pytest-xdist"
-
-# Adds package $2 to list variable $1 if the package is found.
-# If $3 is given, then this version requirement must be satisfied.
-add_package() {
- local list="$1" pkgname="$2" versionreq="${3:-}" version
-
- version=$(apt-cache show "$pkgname" 2>/dev/null |
- awk '/^Version:/{ print $2; exit}')
- # fail if the package is not known
- if [ -z "$version" ]; then
- return 1
- elif [ -n "$versionreq" ]; then
- # Require minimum version or fail.
- # shellcheck disable=SC2086
- dpkg --compare-versions $version $versionreq || return 1
- fi
-
- # package is found, append it to list
- eval "${list}=\"\${${list}} \${pkgname}\""
-}
+# ADDITIONAL_LIST="
+# $ADDITIONAL_LIST
+# advancecomp
+# optipng
+# pngcrush
+# "
+
+DEBDEPS_LIST="
+ asciidoctor
+ debhelper
+ dh-python
+ docbook-xml
+ docbook-xsl
+ libxml2-utils
+ lintian
+ lsb-release
+ po-debconf
+ python3-ply
+ quilt
+ "
+
+TESTDEPS_LIST="
+ gdb
+ python3-pytest
+ python3-pytest-xdist
+ softhsm2
+ "
# apt-get update must be called before calling add_package
# otherwise available packages appear as unavailable
apt-get update || exit 2
-# cmake3 3.5.1: Ubuntu 14.04
-# cmake >= 3.5: Debian >= jessie-backports, Ubuntu >= 16.04
-add_package BASIC_LIST cmake3 ||
-BASIC_LIST="$BASIC_LIST cmake"
-
-# Debian >= wheezy-backports, Ubuntu >= 16.04
-add_package ADDITIONAL_LIST libnghttp2-dev ||
-echo "libnghttp2-dev is unavailable" >&2
+# Lua 5.4: Debian >= bullseye, Ubuntu >= 22.04 (jammy)
+# Lua 5.3: Debian >= buster, Ubuntu >= 20.04 (focal)
+add_package ADDITIONAL_LIST liblua5.4-dev ||
+ADDITIONAL_LIST="$ADDITIONAL_LIST liblua5.3-dev"
# Debian >= bookworm, Ubuntu >= 22.04
add_package ADDITIONAL_LIST libnghttp3-dev ||
echo "libnghttp3-dev is unavailable" >&2
-# libssh-gcrypt-dev: Debian >= jessie, Ubuntu >= 16.04
-# libssh-dev (>= 0.6): Debian >= jessie, Ubuntu >= 14.04
-add_package ADDITIONAL_LIST libssh-gcrypt-dev ||
-add_package ADDITIONAL_LIST libssh-dev ||
-echo "libssh-gcrypt-dev and libssh-dev are unavailable" >&2
-
-# libgnutls28-dev: Debian >= wheezy-backports, Ubuntu >= 12.04
-add_package ADDITIONAL_LIST libgnutls28-dev ||
-echo "libgnutls28-dev is unavailable" >&2
-
-# Debian >= jessie-backports, Ubuntu >= 16.04
-add_package ADDITIONAL_LIST libmaxminddb-dev ||
-echo "libmaxminddb-dev is unavailable" >&2
-
-# Debian >= stretch-backports, Ubuntu >= 16.04
-add_package ADDITIONAL_LIST libbrotli-dev ||
-echo "libbrotli-dev is unavailable" >&2
-
-# libsystemd-journal-dev: Ubuntu 14.04
-# libsystemd-dev: Ubuntu >= 16.04
-add_package ADDITIONAL_LIST libsystemd-dev ||
-add_package ADDITIONAL_LIST libsystemd-journal-dev ||
-echo "libsystemd-dev is unavailable"
-
# ilbc library from http://www.deb-multimedia.org
add_package ADDITIONAL_LIST libilbc-dev ||
echo "libilbc-dev is unavailable"
-# opus library libopus-dev
-add_package ADDITIONAL_LIST libopus-dev ||
- echo "libopus-dev is unavailable"
-
+# Debian >= bullseye, Ubuntu >= 22.04 (jammy)
# bcg729 library libbcg729-dev
add_package ADDITIONAL_LIST libbcg729-dev ||
echo "libbcg729-dev is unavailable"
-# softhsm2 2.0.0: Ubuntu 16.04
-# softhsm2 2.2.0: Debian >= jessie-backports, Ubuntu 18.04
-# softhsm2 >= 2.4.0: Debian >= buster, Ubuntu >= 18.10
-if ! add_package TESTDEPS_LIST softhsm2 '>= 2.3.0'; then
- if add_package TESTDEPS_LIST softhsm2; then
- # If SoftHSM 2.3.0 is unavailble, install p11tool.
- TESTDEPS_LIST="$TESTDEPS_LIST gnutls-bin"
- else
- echo "softhsm2 is unavailable" >&2
- fi
-fi
-
ACTUAL_LIST=$BASIC_LIST
# Now arrange for optional support libraries
diff --git a/tools/delete_includes.py b/tools/delete_includes.py
index cc804e0b..8ea5e806 100755
--- a/tools/delete_includes.py
+++ b/tools/delete_includes.py
@@ -18,7 +18,6 @@ import sys
import shutil
import argparse
import signal
-import re
from pathlib import Path
@@ -190,8 +189,6 @@ def test_file(filename):
# Don't want to delete 'self-includes', so prepare filename.
module_name = Path(filename).stem
- extension = Path(filename).suffix
-
module_header = module_name + '.h'
# Loop around, finding all possible include lines to comment out
diff --git a/tools/detect_bad_alloc_patterns.py b/tools/detect_bad_alloc_patterns.py
index a89ceb6f..b2459c6e 100644
--- a/tools/detect_bad_alloc_patterns.py
+++ b/tools/detect_bad_alloc_patterns.py
@@ -69,8 +69,8 @@ def test_replacements():
test_string = """\
(if_info_t*) g_malloc0(sizeof(if_info_t))
(oui_info_t *)g_malloc(sizeof (oui_info_t))
-(guint8 *)g_malloc(16 * sizeof(guint8))
-(guint32 *)g_malloc(sizeof(guint32)*2)
+(uint8_t *)g_malloc(16 * sizeof(uint8_t))
+(uint32_t *)g_malloc(sizeof(uint32_t)*2)
(struct imf_field *)g_malloc (sizeof (struct imf_field))
(rtspstat_t *)g_malloc( sizeof(rtspstat_t) )
(proto_data_t *)wmem_alloc(scope, sizeof(proto_data_t))
@@ -85,8 +85,8 @@ def test_replacements():
expected_output = """\
g_new0(if_info_t, 1)
g_new(oui_info_t, 1)
-g_new(guint8, 16)
-g_new(guint32, 2)
+g_new(uint8_t, 16)
+g_new(uint32_t, 2)
g_new(struct imf_field, 1)
g_new(rtspstat_t, 1)
wmem_new(scope, proto_data_t)
diff --git a/tools/eti2wireshark.py b/tools/eti2wireshark.py
index 98fb291a..fe11d644 100755
--- a/tools/eti2wireshark.py
+++ b/tools/eti2wireshark.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
-# Generate Wireshark Dissectors for eletronic trading/market data
+# Generate Wireshark Dissectors for electronic trading/market data
# protocols such as ETI/EOBI.
#
# Targets Wireshark 3.5 or later.
@@ -10,7 +10,6 @@
import argparse
-import itertools
import re
import sys
import xml.etree.ElementTree as ET
@@ -130,6 +129,10 @@ def gen_header(proto, desc, o=sys.stdout):
/* (Required to prevent [-Wmissing-prototypes] warnings */
void proto_reg_handoff_{proto}(void);
void proto_register_{proto}(void);
+
+static dissector_handle_t {proto}_handle;
+
+static int proto_{proto};
''', file=o)
@@ -232,21 +235,20 @@ def get_fields(st, dt):
return vs
def gen_field_handles(st, dt, proto, o=sys.stdout):
- print(f'''static expert_field ei_{proto}_counter_overflow = EI_INIT;
-static expert_field ei_{proto}_invalid_template = EI_INIT;
-static expert_field ei_{proto}_invalid_length = EI_INIT;''', file=o)
+ print(f'''static expert_field ei_{proto}_counter_overflow;
+static expert_field ei_{proto}_invalid_template;
+static expert_field ei_{proto}_invalid_length;''', file=o)
if not proto.startswith('eobi'):
- print(f'static expert_field ei_{proto}_unaligned = EI_INIT;', file=o)
- print(f'''static expert_field ei_{proto}_missing = EI_INIT;
-static expert_field ei_{proto}_overused = EI_INIT;
+ print(f'static expert_field ei_{proto}_unaligned;', file=o)
+ print(f'''static expert_field ei_{proto}_missing;
+static expert_field ei_{proto}_overused;
''', file=o)
vs = get_fields(st, dt)
- s = ', '.join('-1' for i in range(len(vs)))
- print(f'static int hf_{proto}[] = {{ {s} }};', file=o)
- print(f'''static int hf_{proto}_dscp_exec_summary = -1;
-static int hf_{proto}_dscp_improved = -1;
-static int hf_{proto}_dscp_widened = -1;''', file=o)
+ print(f'static int hf_{proto}[{len(vs)}];', file=o)
+ print(f'''static int hf_{proto}_dscp_exec_summary;
+static int hf_{proto}_dscp_improved;
+static int hf_{proto}_dscp_widened;''', file=o)
print('enum Field_Handle_Index {', file=o)
for i, (name, _) in enumerate(vs):
c = ' ' if i == 0 else ','
@@ -334,10 +336,9 @@ def gen_field_info(st, dt, n2enum, proto='eti', o=sys.stdout):
def gen_subtree_handles(st, proto='eti', o=sys.stdout):
ns = [ name for name, e in st.items() if e.get('type') != 'Message' ]
ns.sort()
- s = ', '.join('-1' for i in range(len(ns) + 1))
h = dict( (n, i) for i, n in enumerate(ns, 1) )
- print(f'static gint ett_{proto}[] = {{ {s} }};', file=o)
- print(f'static gint ett_{proto}_dscp = -1;', file=o)
+ print(f'static int ett_{proto}[{len(ns) + 1}];', file=o)
+ print(f'static int ett_{proto}_dscp;', file=o)
return h
@@ -345,7 +346,7 @@ def gen_subtree_array(st, proto='eti', o=sys.stdout):
n = sum(1 for name, e in st.items() if e.get('type') != 'Message')
n += 1
s = ', '.join(f'&ett_{proto}[{i}]' for i in range(n))
- print(f' static gint * const ett[] = {{ {s}, &ett_{proto}_dscp }};', file=o)
+ print(f' static int * const ett[] = {{ {s}, &ett_{proto}_dscp }};', file=o)
def gen_fields_table(st, dt, sh, o=sys.stdout):
@@ -380,7 +381,6 @@ def gen_fields_table(st, dt, sh, o=sys.stdout):
size = int(t.get('size')) if t is not None else 0
rep = ''
fh = f'{m.get("name").upper()}_FH_IDX'
- sub = ''
if is_padding(t):
print(f' {c} {{ ETI_PADDING, 0, {size}, 0, 0 }}', file=o)
elif is_fixed_point(t):
@@ -517,7 +517,7 @@ def gen_usage_table(min_templateid, n, ts, ams, o=sys.stdout):
# (cf. the uidx DISSECTOR_ASSER_CMPUINIT() before the switch statement)
# when the ETI_EOF of the message whose usage information comes last
# is reached
- print(f' , 0 // filler', file=o)
+ print(' , 0 // filler', file=o)
print(' };', file=o)
xs = [ '-1' ] * n
t2n = dict(ts)
@@ -622,13 +622,13 @@ dissect_{proto}_message(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, voi
{{
col_set_str(pinfo->cinfo, COL_PROTOCOL, "{proto.upper()}");
col_clear(pinfo->cinfo, COL_INFO);
- guint16 templateid = tvb_get_letohs(tvb, {template_off});
+ uint16_t templateid = tvb_get_letohs(tvb, {template_off});
const char *template_str = val_to_str_ext(templateid, &template_id_vals_ext, "Unknown {proto.upper()} template: 0x%04x");
- col_add_fstr(pinfo->cinfo, COL_INFO, "%s", template_str);
+ col_add_str(pinfo->cinfo, COL_INFO, template_str);
/* create display subtree for the protocol */
proto_item *ti = proto_tree_add_item(tree, proto_{proto}, tvb, 0, -1, ENC_NA);
- guint32 bodylen= {bl_fn}(tvb, 0);
+ uint32_t bodylen= {bl_fn}(tvb, 0);
proto_item_append_text(ti, ", %s (%" PRIu16 "), BodyLen: %u", template_str, templateid, bodylen);
proto_tree *root = proto_item_add_subtree(ti, ett_{proto}[0]);
''', file=o)
@@ -676,7 +676,7 @@ dissect_{proto}_message(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, voi
print(f''' int uidx = tid2uidx[templateid - {min_templateid}];
DISSECTOR_ASSERT_CMPINT(uidx, >=, 0);
- DISSECTOR_ASSERT_CMPUINT(((size_t)uidx), <, (sizeof usages / sizeof usages[0]));
+ DISSECTOR_ASSERT_CMPUINT(((size_t)uidx), <, array_length(usages));
''', file=o)
print(f''' int old_fidx = 0;
@@ -689,9 +689,9 @@ dissect_{proto}_message(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, voi
proto_tree *t = root;
while (top) {{
DISSECTOR_ASSERT_CMPINT(fidx, >=, 0);
- DISSECTOR_ASSERT_CMPUINT(((size_t)fidx), <, (sizeof fields / sizeof fields[0]));
+ DISSECTOR_ASSERT_CMPUINT(((size_t)fidx), <, array_length(fields));
DISSECTOR_ASSERT_CMPINT(uidx, >=, 0);
- DISSECTOR_ASSERT_CMPUINT(((size_t)uidx), <, (sizeof usages / sizeof usages[0]));
+ DISSECTOR_ASSERT_CMPUINT(((size_t)uidx), <, array_length(usages));
switch (fields[fidx].type) {{
case ETI_EOF:
@@ -713,7 +713,7 @@ dissect_{proto}_message(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, voi
break;
case ETI_VAR_STRUCT:
case ETI_STRUCT:
- DISSECTOR_ASSERT_CMPUINT(fields[fidx].counter_off, <, sizeof counter / sizeof counter[0]);
+ DISSECTOR_ASSERT_CMPUINT(fields[fidx].counter_off, <, array_length(counter));
repeats = fields[fidx].type == ETI_VAR_STRUCT ? counter[fields[fidx].counter_off] : 1;
if (repeats) {{
--repeats;
@@ -740,7 +740,7 @@ dissect_{proto}_message(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, voi
break;
case ETI_STRING:
{{
- guint8 c = tvb_get_guint8(tvb, off);
+ uint8_t c = tvb_get_uint8(tvb, off);
if (c)
proto_tree_add_item(t, hf_{proto}[fields[fidx].field_handle_idx], tvb, off, fields[fidx].size, ENC_ASCII);
else {{
@@ -754,20 +754,20 @@ dissect_{proto}_message(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, voi
++uidx;
break;
case ETI_VAR_STRING:
- DISSECTOR_ASSERT_CMPUINT(fields[fidx].counter_off, <, sizeof counter / sizeof counter[0]);
+ DISSECTOR_ASSERT_CMPUINT(fields[fidx].counter_off, <, array_length(counter));
proto_tree_add_item(t, hf_{proto}[fields[fidx].field_handle_idx], tvb, off, counter[fields[fidx].counter_off], ENC_ASCII);
off += counter[fields[fidx].counter_off];
++fidx;
++uidx;
break;
case ETI_COUNTER:
- DISSECTOR_ASSERT_CMPUINT(fields[fidx].counter_off, <, sizeof counter / sizeof counter[0]);
+ DISSECTOR_ASSERT_CMPUINT(fields[fidx].counter_off, <, array_length(counter));
DISSECTOR_ASSERT_CMPUINT(fields[fidx].size, <=, 2);
{{
switch (fields[fidx].size) {{
case 1:
{{
- guint8 x = tvb_get_guint8(tvb, off);
+ uint8_t x = tvb_get_uint8(tvb, off);
if (x == UINT8_MAX) {{
proto_tree_add_uint_format_value(t, hf_{proto}[fields[fidx].field_handle_idx], tvb, off, fields[fidx].size, x, "NO_VALUE (0xff)");
counter[fields[fidx].counter_off] = 0;
@@ -784,7 +784,7 @@ dissect_{proto}_message(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, voi
break;
case 2:
{{
- guint16 x = tvb_get_letohs(tvb, off);
+ uint16_t x = tvb_get_letohs(tvb, off);
if (x == UINT16_MAX) {{
proto_tree_add_uint_format_value(t, hf_{proto}[fields[fidx].field_handle_idx], tvb, off, fields[fidx].size, x, "NO_VALUE (0xffff)");
counter[fields[fidx].counter_off] = 0;
@@ -839,7 +839,7 @@ dissect_{proto}_message(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, voi
DISSECTOR_ASSERT_CMPUINT(fields[fidx].counter_off, >, 0);
DISSECTOR_ASSERT_CMPUINT(fields[fidx].counter_off, <=, 16);
{{
- gint64 x = tvb_get_letohi64(tvb, off);
+ int64_t x = tvb_get_letohi64(tvb, off);
if (x == INT64_MIN) {{
proto_item *e = proto_tree_add_int64_format_value(t, hf_{proto}[fields[fidx].field_handle_idx], tvb, off, fields[fidx].size, x, "NO_VALUE (0x8000000000000000)");
if (!usages[uidx])
@@ -882,10 +882,10 @@ dissect_{proto}_message(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree, voi
''', file=o)
print(f'''/* determine PDU length of protocol {proto.upper()} */
-static guint
+static unsigned
get_{proto}_message_len(packet_info *pinfo _U_, tvbuff_t *tvb, int offset, void *data _U_)
{{
- return (guint){bl_fn}(tvb, offset);
+ return (unsigned){bl_fn}(tvb, offset);
}}
''', file=o)
@@ -903,7 +903,7 @@ dissect_{proto}(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree,
dissect_{proto}(tvbuff_t *tvb, packet_info *pinfo, proto_tree *tree,
void *data)
{{
- tcp_dissect_pdus(tvb, pinfo, tree, TRUE, 4 /* bytes to read for bodylen */,
+ tcp_dissect_pdus(tvb, pinfo, tree, true, 4 /* bytes to read for bodylen */,
get_{proto}_message_len, dissect_{proto}_message, data);
return tvb_captured_length(tvb);
}}
@@ -955,6 +955,8 @@ proto_register_{proto}(void)
print(' proto_register_subtree_array(ett, array_length(ett));', file=o)
if proto.startswith('eobi'):
print(f' proto_disable_by_default(proto_{proto});', file=o)
+
+ print(f'\n {proto}_handle = register_dissector("{proto}", dissect_{proto}, proto_{proto});', file=o)
print('}\n', file=o)
@@ -962,9 +964,6 @@ def gen_handoff_fn(proto, o=sys.stdout):
print(f'''void
proto_reg_handoff_{proto}(void)
{{
- dissector_handle_t {proto}_handle = create_dissector_handle(dissect_{proto},
- proto_{proto});
-
// cf. N7 Network Access Guide, e.g.
// https://www.xetra.com/xetra-en/technology/t7/system-documentation/release10-0/Release-10.0-2692700?frag=2692724
// https://www.xetra.com/resource/blob/2762078/388b727972b5122945eedf0e63c36920/data/N7-Network-Access-Guide-v2.0.59.pdf
@@ -1014,7 +1013,7 @@ proto_reg_handoff_{proto}(void)
56500, // Snapshot Boerse Frankfurt SIMU
56501 // Incremental Boerse Frankfurt SIMU
}};
- for (unsigned i = 0; i < sizeof ports / sizeof ports[0]; ++i)
+ for (unsigned i = 0; i < array_length(ports); ++i)
dissector_add_uint("udp.port", ports[i], {proto}_handle);''', file=o)
print('}', file=o)
@@ -1120,7 +1119,7 @@ def group_members(e, dt):
def parse_args():
- p = argparse.ArgumentParser(description='Generate Wireshark Dissector for ETI/EOBI style protocol specifictions')
+ p = argparse.ArgumentParser(description='Generate Wireshark Dissector for ETI/EOBI style protocol specifications')
p.add_argument('filename', help='protocol description XML file')
p.add_argument('--proto', default='eti',
help='short protocol name (default: %(default)s)')
@@ -1152,7 +1151,6 @@ def main():
ams = d.getroot().find('ApplicationMessages')
gen_header(proto, desc, o)
- print(f'static int proto_{proto} = -1;', file=o)
gen_field_handles(st, dt, proto, o)
n2enum = gen_enums(dt, ts, o)
gen_dissect_structs(o)
diff --git a/tools/fuzz-test.sh b/tools/fuzz-test.sh
index 7e8d1abc..110a2820 100755
--- a/tools/fuzz-test.sh
+++ b/tools/fuzz-test.sh
@@ -64,7 +64,7 @@ while getopts "2b:C:d:e:agp:P:o:t:U" OPTCHAR ; do
C) CONFIG_PROFILE="-C $OPTARG " ;;
d) TMP_DIR=$OPTARG ;;
e) ERR_PROB=$OPTARG ;;
- g) VALGRIND=1 ;;
+ g) VALGRIND=1 ; CHECK_UTF_8= ;;
p) MAX_PASSES=$OPTARG ;;
P) MIN_PLUGINS=$OPTARG ;;
o) CHANGE_OFFSET=$OPTARG ;;
diff --git a/tools/generate-bacnet-vendors.py b/tools/generate-bacnet-vendors.py
index 14fc5303..06d21e88 100755
--- a/tools/generate-bacnet-vendors.py
+++ b/tools/generate-bacnet-vendors.py
@@ -1,16 +1,29 @@
#!/usr/bin/env python3
+#
+# Wireshark - Network traffic analyzer
+# By Gerald Combs <gerald@wireshark.org>
+# Copyright 1998 Gerald Combs
+#
+# SPDX-License-Identifier: GPL-2.0-or-later
+'''Update the BACNET vendors list.
-'''
- Copyright 2023 Jaap Keuter <jaap.keuter@xs4all.nl>
- based on work by Anish Bhatt <anish@chelsio.com>
+generate-bacnet-vendors generates output containing BACNET vendor Identifiers.
+
+Copyright 2023 Jaap Keuter <jaap.keuter@xs4all.nl>
+based on work by Anish Bhatt <anish@chelsio.com>
-SPDX-License-Identifier: GPL-2.0-or-later
'''
import sys
import urllib.request, urllib.error, urllib.parse
from bs4 import BeautifulSoup
+def exit_msg(msg=None, status=1):
+ if msg is not None:
+ sys.stderr.write(msg + '\n\n')
+ sys.stderr.write(__doc__ + '\n')
+ sys.exit(status)
+
req_headers = { 'User-Agent': 'Wireshark generate-bacnet-vendors' }
try:
req = urllib.request.Request("https://bacnet.org/assigned-vendor-ids/", headers=req_headers)
diff --git a/tools/generate-dissector.py b/tools/generate-dissector.py
index 4d8ab37d..aee1d615 100755
--- a/tools/generate-dissector.py
+++ b/tools/generate-dissector.py
@@ -144,7 +144,7 @@ def print_header():
def print_trailer(args):
print("")
print("The skeleton for the dissector of the " + args.protoshortname + " protocol has been generated.")
- print("Please review/extend it to match your specific criterias.")
+ print("Please review/extend it to match your specific criteria.")
print("")
diff --git a/tools/generate-nl80211-fields.py b/tools/generate-nl80211-fields.py
index dfa8faaf..ddd42575 100755
--- a/tools/generate-nl80211-fields.py
+++ b/tools/generate-nl80211-fields.py
@@ -34,7 +34,10 @@ EXPORT_ENUMS = {
'nl80211_sta_flags': (None, None, None),
'nl80211_sta_p2p_ps_status': ('Attribute Value', 'FT_UINT8', None),
'nl80211_he_gi': (None, None, None),
+ 'nl80211_he_ltf': (None, None, None),
'nl80211_he_ru_alloc': (None, None, None),
+ 'nl80211_eht_gi': (None, None, None),
+ 'nl80211_eht_ru_alloc': (None, None, None),
'nl80211_rate_info': (None, None, None),
'nl80211_sta_bss_param': (None, None, None),
'nl80211_sta_info': (None, None, None),
@@ -65,6 +68,8 @@ EXPORT_ENUMS = {
'nl80211_key_mode': (None, None, None),
'nl80211_chan_width': ('Attribute Value', 'FT_UINT32', None),
'nl80211_bss_scan_width': ('Attribute Value', 'FT_UINT32', None),
+ 'nl80211_bss_use_for': (None, None, None),
+ 'nl80211_bss_cannot_use_reasons': (None, None, None),
'nl80211_bss': (None, None, None),
'nl80211_bss_status': ('Attribute Value', 'FT_UINT32', None),
'nl80211_auth_type': ('Attribute Value', 'FT_UINT32', None),
@@ -80,6 +85,9 @@ EXPORT_ENUMS = {
'nl80211_attr_cqm': (None, None, None),
'nl80211_cqm_rssi_threshold_event': (None, None, None),
'nl80211_tx_power_setting': ('Attribute Value', 'FT_UINT32', None),
+ 'nl80211_tid_config': (None, None, None),
+ 'nl80211_tx_rate_setting': (None, None, None),
+ 'nl80211_tid_config_attr': (None, None, None),
'nl80211_packet_pattern_attr': (None, None, None),
'nl80211_wowlan_triggers': (None, None, None),
'nl80211_wowlan_tcp_attrs': (None, None, None),
@@ -88,13 +96,13 @@ EXPORT_ENUMS = {
'nl80211_iface_limit_attrs': (None, None, None),
'nl80211_if_combination_attrs': (None, None, None),
'nl80211_plink_state': ('Attribute Value', 'FT_UINT8', None),
- 'plink_actions': ('Attribute Value', 'FT_UINT8', None),
+ 'nl80211_plink_action': ('Attribute Value', 'FT_UINT8', None),
'nl80211_rekey_data': (None, None, None),
'nl80211_hidden_ssid': (None, None, None),
'nl80211_sta_wme_attr': (None, None, None),
'nl80211_pmksa_candidate_attr': (None, None, None),
'nl80211_tdls_operation': ('Attribute Value', 'FT_UINT8', None),
- #Reserved for future use 'nl80211_ap_sme_features': (None, None, None),
+ 'nl80211_ap_sme_features': (None, None, None),
'nl80211_feature_flags': (None, None, None),
'nl80211_ext_feature_index': (None, None, None),
'nl80211_probe_resp_offload_support_attr': (None, None, None),
@@ -132,6 +140,18 @@ EXPORT_ENUMS = {
'nl80211_peer_measurement_ftm_failure_reasons': (None, None, None),
'nl80211_peer_measurement_ftm_resp': (None, None, None),
'nl80211_obss_pd_attributes': (None, None, None),
+ 'nl80211_bss_color_attributes': (None, None, None),
+ 'nl80211_iftype_akm_attributes': (None, None, None),
+ 'nl80211_fils_discovery_attributes': (None, None, None),
+ 'nl80211_unsol_bcast_probe_resp_attributes': (None, None, None),
+ 'nl80211_sae_pwe_mechanism': (None, None, None),
+ 'nl80211_sar_type': (None, None, None),
+ 'nl80211_sar_attrs': (None, None, None),
+ 'nl80211_sar_specs_attrs': (None, None, None),
+ 'nl80211_mbssid_config_attributes': (None, None, None),
+ 'nl80211_ap_settings_flags': (None, None, None),
+ 'nl80211_wiphy_radio_attrs': (None, None, None),
+ 'nl80211_wiphy_radio_freq_range': (None, None, None),
}
# File to be patched
SOURCE_FILE = "epan/dissectors/packet-netlink-nl80211.c"
@@ -169,7 +189,7 @@ def remove_prefix(prefix, text):
return text
def make_hf_defs(name, indent):
- code = 'static gint hf_%s = -1;' % name
+ code = 'static int hf_%s;' % name
return code
def make_hf(name, indent):
@@ -202,7 +222,7 @@ def make_hf(name, indent):
return code
def make_ett_defs(name, indent):
- code = 'static gint ett_%s = -1;' % name
+ code = 'static int ett_%s;' % name
return code
def make_ett(name, indent):
diff --git a/tools/generate-sysdig-event.py b/tools/generate-sysdig-event.py
index 67419c8e..0cec2d3a 100755
--- a/tools/generate-sysdig-event.py
+++ b/tools/generate-sysdig-event.py
@@ -46,7 +46,7 @@ def get_url_lines(url):
except urllib.error.URLError as err:
exit_msg("URL error fetching {0}: {1}".format(url, err.reason))
except OSError as err:
- exit_msg("OS error fetching {0}".format(url, err.strerror))
+ exit_msg("OS error fetching {0}: {1}".format(url, err.strerror))
except Exception:
exit_msg("Unexpected error:", sys.exc_info()[0])
@@ -274,7 +274,7 @@ def main():
strip_re_l.append(re.compile('^\s*{\s*&hf_param_.*},')) # Must all be on one line
for strip_re in strip_re_l:
- dissector_lines = [l for l in dissector_lines if not strip_re.search(l)]
+ dissector_lines = [line for line in dissector_lines if not strip_re.search(line)]
# Find our value strings
value_string_re = re.compile('static\s+const\s+value_string\s+([A-Za-z0-9_]+_vals)')
@@ -290,7 +290,7 @@ def main():
header_fields_re = re.compile('/\*\s+' + header_fields_c, flags = re.IGNORECASE)
header_fields_l = []
for hf_name in sorted(hf_d.keys()):
- header_fields_l.append('static int {} = -1;'.format(hf_name))
+ header_fields_l.append('static int {};'.format(hf_name))
event_names_c = 'Event names'
event_names_re = re.compile('/\*\s+' + event_names_c, flags = re.IGNORECASE)
diff --git a/tools/generate_authors.py b/tools/generate_authors.py
index a74ef1c4..a536b850 100755
--- a/tools/generate_authors.py
+++ b/tools/generate_authors.py
@@ -16,10 +16,8 @@
# SPDX-License-Identifier: GPL-2.0-or-later
import argparse
-import io
import re
import subprocess
-import sys
def get_git_authors():
@@ -29,7 +27,7 @@ def get_git_authors():
'''
GIT_LINE_REGEX = r"^\s*\d+\s+([^<]*)\s*<([^>]*)>"
cmd = "git --no-pager shortlog --email --summary HEAD".split(' ')
- # check_output is used for Python 3.4 compatability
+ # check_output is used for Python 3.4 compatibility
git_cmd_output = subprocess.check_output(cmd, universal_newlines=True, encoding='utf-8')
git_authors = []
@@ -107,7 +105,7 @@ def generate_git_contributors_text(contributors_emails, git_authors_emails):
return "\n".join(output_lines)
-# Read authos file until we find gitlog entries, then stop
+# Read authors file until we find gitlog entries, then stop
def read_authors(parsed_args):
lines = []
with open(parsed_args.authors[0], 'r', encoding='utf-8') as fh:
diff --git a/tools/indexcap.py b/tools/indexcap.py
index d18e76f6..31f4e397 100755
--- a/tools/indexcap.py
+++ b/tools/indexcap.py
@@ -207,7 +207,7 @@ def main():
if options.dissect_files and not options.list_all_files and not options.list_all_proto_files:
parser.error("--list-all-files or --list-all-proto-files must be specified")
- if options.dissect_files and not options.compare_dir is None:
+ if options.dissect_files and options.compare_dir is not None:
parser.error("--dissect-files and --compare-dir cannot be specified at the same time")
index_file_name = args.pop(0)
@@ -236,15 +236,15 @@ def main():
print(indexed_files)
tshark_bin = find_tshark_executable(options.bin_dir)
- if not tshark_bin is None:
+ if tshark_bin is not None:
print("tshark: %s [FOUND]" % tshark_bin)
else:
print("tshark: %s [MISSING]" % tshark_bin)
exit(1)
- if not options.compare_dir is None:
+ if options.compare_dir is not None:
tshark_cmp = find_tshark_executable(options.compare_dir)
- if not tshark_cmp is None:
+ if tshark_cmp is not None:
print("tshark: %s [FOUND]" % tshark_cmp)
else:
print("tshark: %s [MISSING]" % tshark_cmp)
diff --git a/tools/json2pcap/json2pcap.py b/tools/json2pcap/json2pcap.py
index 2a059ad0..baa64b64 100755
--- a/tools/json2pcap/json2pcap.py
+++ b/tools/json2pcap/json2pcap.py
@@ -28,7 +28,7 @@ from scapy import all as scapy
class AnonymizedField:
'''
The Anonymization field object specifying anonymization
- :filed arg: field name
+ :field arg: field name
:type arg: anonymization type [0 masking 0xff, 1 anonymization shake_256]
:start arg: If specified, the anonymization starts at given byte number
:end arg: If specified, the anonymization ends at given byte number
diff --git a/tools/lemon/CMakeLists.txt b/tools/lemon/CMakeLists.txt
index 529eeae1..b7ba7543 100644
--- a/tools/lemon/CMakeLists.txt
+++ b/tools/lemon/CMakeLists.txt
@@ -12,6 +12,8 @@ add_executable(lemon lemon.c)
if(DEFINED LEMON_C_COMPILER)
set(CMAKE_C_COMPILER "${LEMON_C_COMPILER}")
set(CMAKE_C_FLAGS "")
+ set(CMAKE_EXE_LINKER_FLAGS "")
+ set(CMAKE_SYSROOT "")
endif()
# To keep lemon.c as close to upstream as possible disable all warnings
diff --git a/tools/lemon/lemon.c b/tools/lemon/lemon.c
index 869ac580..c4b48811 100644
--- a/tools/lemon/lemon.c
+++ b/tools/lemon/lemon.c
@@ -2106,6 +2106,7 @@ int OptInit(char **a, struct s_options *o, FILE *err)
if( g_argv && *g_argv && op ){
int i;
for(i=1; g_argv[i]; i++){
+ if( strcmp(g_argv[i],"--")==0 ) break;
if( g_argv[i][0]=='+' || g_argv[i][0]=='-' ){
errcnt += handleflags(i,err);
}else if( strchr(g_argv[i],'=') ){
@@ -5847,7 +5848,7 @@ int Configtable_insert(struct config *data)
newnp->from = &(array.ht[h]);
array.ht[h] = newnp;
}
- /* free(x4a->tbl); // This code was originall written for 16-bit machines.
+ /* free(x4a->tbl); // This code was originally written for 16-bit machines.
** on modern machines, don't worry about freeing this trival amount of
** memory. */
*x4a = array;
diff --git a/tools/lemon/patches/01-lemon-dashdash.patch b/tools/lemon/patches/01-lemon-dashdash.patch
new file mode 100644
index 00000000..1c204574
--- /dev/null
+++ b/tools/lemon/patches/01-lemon-dashdash.patch
@@ -0,0 +1,14 @@
+Don't try to parse flags and options that are after "--". This makes it
+possible to prevent a filename path with an '=' in it from being processed
+as an option.
+SPDX-License-Identifier: CC0-1.0
+--- a/lemon.c
++++ b/lemon.c
+@@ -2106,6 +2106,7 @@ int OptInit(char **a, struct s_options *o, FILE *err)
+ if( g_argv && *g_argv && op ){
+ int i;
+ for(i=1; g_argv[i]; i++){
++ if( strcmp(g_argv[i],"--")==0 ) break;
+ if( g_argv[i][0]=='+' || g_argv[i][0]=='-' ){
+ errcnt += handleflags(i,err);
+ }else if( strchr(g_argv[i],'=') ){
diff --git a/tools/macos-setup-brew.sh b/tools/macos-setup-brew.sh
index 910f7e86..da1a0aa9 100755
--- a/tools/macos-setup-brew.sh
+++ b/tools/macos-setup-brew.sh
@@ -23,6 +23,7 @@ function print_usage() {
printf "\\t--install-dmg-deps: install packages required to build the .dmg file\\n"
printf "\\t--install-sparkle-deps: install the Sparkle automatic updater\\n"
printf "\\t--install-all: install everything\\n"
+ printf "\\t--install-logray: install everything to compile Logray and falco bridge\\n"
printf "\\t[other]: other options are passed as-is to apt\\n"
}
@@ -46,6 +47,7 @@ INSTALL_DOC_DEPS=0
INSTALL_DMG_DEPS=0
INSTALL_SPARKLE_DEPS=0
INSTALL_TEST_DEPS=0
+INSTALL_LOGRAY=0
OPTIONS=()
for arg; do
case $arg in
@@ -68,6 +70,9 @@ for arg; do
--install-test-deps)
INSTALL_TEST_DEPS=1
;;
+ --install-logray)
+ INSTALL_LOGRAY=1
+ ;;
--install-all)
INSTALL_OPTIONAL=1
INSTALL_DOC_DEPS=1
@@ -108,11 +113,15 @@ ADDITIONAL_LIST=(
libsmi
libssh
libxml2
+ lua
lz4
minizip
+ minizip-ng
+ opencore-amr
opus
snappy
spandsp
+ zlib-ng
zstd
)
@@ -122,6 +131,12 @@ DOC_DEPS_LIST=(
docbook-xsl
)
+LOGRAY_LIST=(
+ jsoncpp
+ onetbb
+ re2
+)
+
ACTUAL_LIST=( "${BUILD_LIST[@]}" "${REQUIRED_LIST[@]}" )
# Now arrange for optional support libraries
@@ -133,16 +148,16 @@ if [ $INSTALL_DOC_DEPS -ne 0 ] ; then
ACTUAL_LIST+=( "${DOC_DEPS_LIST[@]}" )
fi
+if [ $INSTALL_LOGRAY -ne 0 ] ; then
+ ACTUAL_LIST+=( "${LOGRAY_LIST[@]}" )
+fi
+
if (( ${#OPTIONS[@]} != 0 )); then
ACTUAL_LIST+=( "${OPTIONS[@]}" )
fi
install_formulae "${ACTUAL_LIST[@]}"
-if [ $INSTALL_OPTIONAL -ne 0 ] ; then
- brew install lua@5.1 || printf "Lua 5.1 installation failed.\\n"
-fi
-
if [ $INSTALL_DMG_DEPS -ne 0 ] ; then
printf "Sorry, you'll have to install dmgbuild yourself for the time being.\\n"
# pip3 install dmgbuild
@@ -157,6 +172,28 @@ if [ $INSTALL_TEST_DEPS -ne 0 ] ; then
# pip3 install pytest pytest-xdist
fi
+if [ $INSTALL_LOGRAY -ne 0 ] ; then
+ FALCO_LIBS_VERSION=0.17.1
+ if [ "$FALCO_LIBS_VERSION" ] && [ ! -f "falco-libs-$FALCO_LIBS_VERSION-done" ] ; then
+ echo "Downloading, building, and installing libsinsp and libscap:"
+ [ -f "falco-libs-$FALCO_LIBS_VERSION.tar.gz" ] || curl -L -O --remote-header-name "https://github.com/falcosecurity/libs/archive/refs/tags/$FALCO_LIBS_VERSION.tar.gz"
+ mv "libs-$FALCO_LIBS_VERSION.tar.gz" "falco-libs-$FALCO_LIBS_VERSION.tar.gz"
+ tar -xf "falco-libs-$FALCO_LIBS_VERSION.tar.gz"
+ mv "libs-$FALCO_LIBS_VERSION" "falco-libs-$FALCO_LIBS_VERSION"
+ cd "falco-libs-$FALCO_LIBS_VERSION"
+ patch -p1 < "../tools/macos-setup-patches/falco-uthash_h-install.patch"
+ mkdir build_dir
+ cd build_dir
+ cmake -DBUILD_SHARED_LIBS=ON -DMINIMAL_BUILD=ON -DCREATE_TEST_TARGETS=OFF \
+ -DUSE_BUNDLED_DEPS=ON -DUSE_BUNDLED_CARES=OFF -DUSE_BUNDLED_ZLIB=OFF \
+ -DUSE_BUNDLED_JSONCPP=OFF -DUSE_BUNDLED_TBB=OFF -DUSE_BUNDLED_RE2=OFF \
+ ..
+ make
+ sudo make install
+ cd ../..
+ fi
+fi
+
# Uncomment to add PNG compression utilities used by compress-pngs:
# brew install advancecomp optipng oxipng pngcrush
diff --git a/tools/macos-setup-patches/falco-include-dirs.patch b/tools/macos-setup-patches/falco-include-dirs.patch
new file mode 100644
index 00000000..68f5f228
--- /dev/null
+++ b/tools/macos-setup-patches/falco-include-dirs.patch
@@ -0,0 +1,15 @@
+758865ee6 update(cmake): Don't add build directories to our pc files
+diff --git a/userspace/libsinsp/CMakeLists.txt b/userspace/libsinsp/CMakeLists.txt
+index 6104603e8..1989ea3fb 100644
+--- a/userspace/libsinsp/CMakeLists.txt
++++ b/userspace/libsinsp/CMakeLists.txt
+@@ -335,6 +335,9 @@ endforeach()
+ # Build our pkg-config "Cflags:" flags.
+ set(SINSP_PKG_CONFIG_INCLUDES "")
+ foreach(sinsp_include_directory ${LIBSINSP_INCLUDE_DIRS})
++ if (${sinsp_include_directory} MATCHES "^${CMAKE_SOURCE_DIR}" OR ${sinsp_include_directory} MATCHES "^${CMAKE_BINARY_DIR}")
++ continue()
++ endif()
+ list(APPEND SINSP_PKG_CONFIG_INCLUDES -I${sinsp_include_directory})
+ endforeach()
+
diff --git a/tools/macos-setup-patches/falco-uthash_h-install.patch b/tools/macos-setup-patches/falco-uthash_h-install.patch
new file mode 100644
index 00000000..7e1f7855
--- /dev/null
+++ b/tools/macos-setup-patches/falco-uthash_h-install.patch
@@ -0,0 +1,9 @@
+--- falco-libs-0.14.1/cmake/modules/libscap.cmake.orig 2024-03-25 22:46:40
++++ falco-libs-0.14.1/cmake/modules/libscap.cmake 2024-03-25 22:46:10
+@@ -139,5 +139,6 @@
+ FILES_MATCHING PATTERN "*.h")
+ install(FILES ${PROJECT_BINARY_DIR}/libscap/scap_config.h DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${LIBS_PACKAGE_NAME}/libscap)
+ install(FILES ${PROJECT_BINARY_DIR}/libscap/scap_strl_config.h DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${LIBS_PACKAGE_NAME}/libscap)
++install(FILES ${PROJECT_BINARY_DIR}/uthash-prefix/src/uthash/src/uthash.h DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/${LIBS_PACKAGE_NAME}/libscap)
+ install(FILES ${PROJECT_BINARY_DIR}/libscap/libscap.pc DESTINATION ${CMAKE_INSTALL_LIBDIR}/pkgconfig)
+ endif()
diff --git a/tools/macos-setup-patches/glib-pkgconfig.patch b/tools/macos-setup-patches/glib-pkgconfig.patch
new file mode 100644
index 00000000..07761460
--- /dev/null
+++ b/tools/macos-setup-patches/glib-pkgconfig.patch
@@ -0,0 +1,10 @@
+--- gobject-2.0.pc.in.orig 2011-12-30 22:08:27.000000000 +0100
++++ gobject-2.0.pc.in 2011-12-30 22:09:06.000000000 +0100
+@@ -7,6 +7,6 @@
+ Description: GLib Type, Object, Parameter and Signal Library
+ Requires: glib-2.0
+ Version: @VERSION@
+-Libs: -L${libdir} -lgobject-2.0
++Libs: -L${libdir} -lgobject-2.0 -lffi
+ Libs.private: @LIBFFI_LIBS@
+ Cflags:
diff --git a/tools/macos-setup-patches/gnutls-pkgconfig.patch b/tools/macos-setup-patches/gnutls-pkgconfig.patch
new file mode 100644
index 00000000..f0ad93ec
--- /dev/null
+++ b/tools/macos-setup-patches/gnutls-pkgconfig.patch
@@ -0,0 +1,8 @@
+--- gnutls.pc.in.orig 2012-05-27 02:08:48.000000000 +0200
++++ gnutls.pc.in 2012-05-27 02:11:39.000000000 +0200
+@@ -21,5 +21,4 @@
+ Version: @VERSION@
+ Libs: -L${libdir} -lgnutls
+ Libs.private: @LTLIBGCRYPT@ @LTLIBNETTLE@ @NETTLE_LIBS@ @GNUTLS_ZLIB_LIBS_PRIVATE@
+-@GNUTLS_REQUIRES_PRIVATE@
+ Cflags: -I${includedir}
diff --git a/tools/macos-setup-patches/qt-fix-pc-file b/tools/macos-setup-patches/qt-fix-pc-file
new file mode 100755
index 00000000..fa9ba54b
--- /dev/null
+++ b/tools/macos-setup-patches/qt-fix-pc-file
@@ -0,0 +1,24 @@
+#! /bin/sh
+#
+# Fix a Qt .pc file's flags.
+#
+# Wireshark - Network traffic analyzer
+# By Gerald Combs <gerald@wireshark.org>
+# Copyright 2014 Gerald Combs
+#
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# Fix a single Qt .pc file to provide the right flags; a -F flag is
+# needed to point to the directory under which the frameworks are
+# placed, and a -I flag needs to point to the directory where
+# the include files for the component in question are place in
+# the framework's directory hierarchy, rather thany to where the
+# include files *would* be if the component *weren't* distributed
+# as a framework.
+#
+ed - "$1" <<EOF
+H
+/^Cflags: /s;-I\${includedir}/\([A-Za-z0-9]*\);-I\${libdir}/\1.framework/Versions/5/Headers;
+/^Cflags: /s;;Cflags: -F\${libdir} ;
+w
+q
diff --git a/tools/macos-setup-patches/qt-fix-pc-files b/tools/macos-setup-patches/qt-fix-pc-files
new file mode 100755
index 00000000..d30edb07
--- /dev/null
+++ b/tools/macos-setup-patches/qt-fix-pc-files
@@ -0,0 +1,21 @@
+#! /bin/sh
+#
+# Fix the .pc files for versions of Qt installed from binary packages.
+#
+# Wireshark - Network traffic analyzer
+# By Gerald Combs <gerald@wireshark.org>
+# Copyright 2014 Gerald Combs
+#
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+# See bug QTBUG-35256 for the full painful story. Shorter version:
+# the macOS Qt packages provide the Qt components as frameworks, but
+# the .pc files don't generate the right CFLAGS/CXXFLAGS to make
+# that work, so autoconf doesn't work correctly.
+#
+if [ "$#" != 1 ]
+then
+ echo "Usage: qt-fix-pc-files <top-level Qt directory>" 1>&1
+ exit 1
+fi
+find "$1" -name "*.pc" -exec tools/macos-setup-patches/qt-fix-pc-file {} ";"
diff --git a/tools/macos-setup-patches/snappy-signed.patch b/tools/macos-setup-patches/snappy-signed.patch
new file mode 100644
index 00000000..0018fcc4
--- /dev/null
+++ b/tools/macos-setup-patches/snappy-signed.patch
@@ -0,0 +1,11 @@
+--- snappy.cc.orig 2023-09-14 01:04:05
++++ snappy.cc 2023-09-14 01:04:28
+@@ -1290,7 +1290,7 @@
+ DeferMemCopy(&deferred_src, &deferred_length, from, len);
+ }
+ } while (ip < ip_limit_min_slop &&
+- (op + deferred_length) < op_limit_min_slop);
++ static_cast<ptrdiff_t>(op + deferred_length) < op_limit_min_slop);
+ exit:
+ ip--;
+ assert(ip <= ip_limit);
diff --git a/tools/macos-setup-patches/spandsp-configure-patch b/tools/macos-setup-patches/spandsp-configure-patch
new file mode 100644
index 00000000..5a898f99
--- /dev/null
+++ b/tools/macos-setup-patches/spandsp-configure-patch
@@ -0,0 +1,53 @@
+*** configure.dist 2014-06-04 07:28:14.000000000 -0700
+--- configure 2017-08-07 00:16:39.000000000 -0700
+***************
+*** 19658,19664 ****
+
+ case "${ax_cv_c_compiler_vendor}" in
+ gnu)
+! COMP_VENDOR_CFLAGS="-std=gnu99 -ffast-math -Wall -Wunused-variable -Wunused-but-set-variable -Wwrite-strings -Wstrict-prototypes -Wmissing-prototypes $COMP_VENDOR_CFLAGS"
+ if test "$enable_avx" = "yes" ; then
+ COMP_VENDOR_CFLAGS="-mavx $COMP_VENDOR_CFLAGS"
+ fi
+--- 19658,19664 ----
+
+ case "${ax_cv_c_compiler_vendor}" in
+ gnu)
+! COMP_VENDOR_CFLAGS="-std=gnu99 -ffast-math -Wall -Wunused-variable -Wwrite-strings -Wstrict-prototypes -Wmissing-prototypes $COMP_VENDOR_CFLAGS"
+ if test "$enable_avx" = "yes" ; then
+ COMP_VENDOR_CFLAGS="-mavx $COMP_VENDOR_CFLAGS"
+ fi
+***************
+*** 19733,19739 ****
+
+ ;;
+ intel)
+! COMP_VENDOR_CFLAGS="-std=c99 -D_POSIX_C_SOURCE=2 -D_GNU_SOURCE=1 -Wall -Wunused-variable -Wunused-but-set-variable -Wwrite-strings -Wstrict-prototypes -Wmissing-prototypes $COMP_VENDOR_CFLAGS"
+ if test "$enable_avx" = "yes" ; then
+ COMP_VENDOR_CFLAGS="-mavx $COMP_VENDOR_CFLAGS"
+ fi
+--- 19733,19739 ----
+
+ ;;
+ intel)
+! COMP_VENDOR_CFLAGS="-std=c99 -D_POSIX_C_SOURCE=2 -D_GNU_SOURCE=1 -Wall -Wunused-variable -Wwrite-strings -Wstrict-prototypes -Wmissing-prototypes $COMP_VENDOR_CFLAGS"
+ if test "$enable_avx" = "yes" ; then
+ COMP_VENDOR_CFLAGS="-mavx $COMP_VENDOR_CFLAGS"
+ fi
+***************
+*** 19767,19773 ****
+ COMP_VENDOR_LDFLAGS=
+ ;;
+ *)
+! COMP_VENDOR_CFLAGS="-std=c99 -Wall -Wunused-variable -Wunused-but-set-variable -Wwrite-strings -Wstrict-prototypes -Wmissing-prototypes $COMP_VENDOR_CFLAGS"
+ COMP_VENDOR_LDFLAGS=
+ ;;
+ esac
+--- 19767,19773 ----
+ COMP_VENDOR_LDFLAGS=
+ ;;
+ *)
+! COMP_VENDOR_CFLAGS="-std=c99 -Wall -Wunused-variable -Wwrite-strings -Wstrict-prototypes -Wmissing-prototypes $COMP_VENDOR_CFLAGS"
+ COMP_VENDOR_LDFLAGS=
+ ;;
+ esac
diff --git a/tools/macos-setup.sh b/tools/macos-setup.sh
index 0017ffcf..13cb4547 100755
--- a/tools/macos-setup.sh
+++ b/tools/macos-setup.sh
@@ -10,29 +10,31 @@
#
# SPDX-License-Identifier: GPL-2.0-or-later
+set -e
+
shopt -s extglob
#
# Get the major version of Darwin, so we can check the major macOS
# version.
#
-DARWIN_MAJOR_VERSION=`uname -r | sed 's/\([0-9]*\).*/\1/'`
+DARWIN_MAJOR_VERSION=$(uname -r | sed 's/\([0-9]*\).*/\1/')
#
-# The minimum supported version of Qt is 5.9, so the minimum supported version
-# of macOS is OS X 10.10 (Yosemite), aka Darwin 14.0.
+# The minimum supported version of Qt is 5.11, so the minimum supported version
+# of macOS is OS X 10.11 (El Capitan), aka Darwin 15.0.
#
-if [[ $DARWIN_MAJOR_VERSION -lt 14 ]]; then
- echo "This script does not support any versions of macOS before Yosemite" 1>&2
+if [[ $DARWIN_MAJOR_VERSION -lt 15 ]]; then
+ echo "This script does not support any versions of macOS before El Capitan" 1>&2
exit 1
fi
#
# Get the processor architecture of Darwin. Currently supported: arm, i386
#
-DARWIN_PROCESSOR_ARCH=`uname -p`
+DARWIN_PROCESSOR_ARCH=$(uname -m)
-if [ "$DARWIN_PROCESSOR_ARCH" != "arm" -a "$DARWIN_PROCESSOR_ARCH" != "i386" ]; then
+if [ "$DARWIN_PROCESSOR_ARCH" != "arm64" ] && [ "$DARWIN_PROCESSOR_ARCH" != "x86_64" ]; then
echo "This script does not support this processor architecture" 1>&2
exit 1
fi
@@ -43,18 +45,19 @@ fi
#
# We use curl, but older versions of curl in older macOS releases can't
-# handle some sites - including the xz site.
+# handle some sites - including the xz site. We also use the --fail-with-body
+# flag, which was added in curl 7.76.0.
#
-# If the version of curl in the system is older than 7.54.0, download
+# If the version of curl in the system is older than 7.76.0, download
# curl and install it.
#
-current_curl_version=`curl --version | sed -n 's/curl \([0-9.]*\) .*/\1/p'`
-current_curl_major_version="`expr $current_curl_version : '\([0-9][0-9]*\).*'`"
-current_curl_minor_version="`expr $current_curl_version : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`"
+current_curl_version=$( curl --version | sed -n 's/curl \([0-9.]*\) .*/\1/p' )
+current_curl_major_version="$( expr "$current_curl_version" : '\([0-9][0-9]*\).*' )"
+current_curl_minor_version="$(expr "$current_curl_version" : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )"
if [[ $current_curl_major_version -lt 7 ||
($current_curl_major_version -eq 7 &&
$current_curl_minor_version -lt 54) ]]; then
- CURL_VERSION=${CURL_VERSION-7.60.0}
+ CURL_VERSION=${CURL_VERSION-7.88.1}
fi
#
@@ -73,17 +76,6 @@ fi
XZ_VERSION=5.2.5
#
-# Some packages need lzip to unpack their current source.
-#
-LZIP_VERSION=1.21
-
-#
-# The version of libPCRE on Catalina is insufficient to build glib due to
-# missing UTF-8 support.
-#
-PCRE_VERSION=8.45
-
-#
# CMake is required to do the build - and to build some of the
# dependencies.
#
@@ -94,29 +86,31 @@ CMAKE_VERSION=${CMAKE_VERSION-3.21.4}
# claimed to build faster than make.
# Comment it out if you don't want it.
#
-NINJA_VERSION=${NINJA_VERSION-1.10.2}
+NINJA_VERSION=${NINJA_VERSION-1.12.1}
+NINJA_SHA256=89a287444b5b3e98f88a945afa50ce937b8ffd1dcc59c555ad9b1baf855298c9
#
# The following libraries and tools are required even to build only TShark.
#
-GETTEXT_VERSION=0.21
-GLIB_VERSION=2.76.6
+GETTEXT_VERSION=0.22.5
+GLIB_VERSION=2.80.3
if [ "$GLIB_VERSION" ]; then
- GLIB_MAJOR_VERSION="`expr $GLIB_VERSION : '\([0-9][0-9]*\).*'`"
- GLIB_MINOR_VERSION="`expr $GLIB_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`"
- GLIB_DOTDOT_VERSION="`expr $GLIB_VERSION : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*'`"
- GLIB_MAJOR_MINOR_VERSION=$GLIB_MAJOR_VERSION.$GLIB_MINOR_VERSION
- GLIB_MAJOR_MINOR_DOTDOT_VERSION=$GLIB_MAJOR_VERSION.$GLIB_MINOR_VERSION.$GLIB_DOTDOT_VERSION
+ GLIB_MAJOR_VERSION="$( expr $GLIB_VERSION : '\([0-9][0-9]*\).*' )"
+ GLIB_MINOR_VERSION="$( expr $GLIB_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )"
+# Unused?
+# GLIB_DOTDOT_VERSION="$( expr $GLIB_VERSION : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*' )"
+# GLIB_MAJOR_MINOR_VERSION=$GLIB_MAJOR_VERSION.$GLIB_MINOR_VERSION
+# GLIB_MAJOR_MINOR_DOTDOT_VERSION=$GLIB_MAJOR_VERSION.$GLIB_MINOR_VERSION.$GLIB_DOTDOT_VERSION
fi
PKG_CONFIG_VERSION=0.29.2
#
# libgpg-error is required for libgcrypt.
#
-LIBGPG_ERROR_VERSION=1.39
+LIBGPG_ERROR_VERSION=1.47
#
# libgcrypt is required.
#
-LIBGCRYPT_VERSION=1.8.7
+LIBGCRYPT_VERSION=1.10.2
#
# libpcre2 is required.
#
@@ -135,9 +129,9 @@ PCRE2_VERSION=10.39
QT_VERSION=${QT_VERSION-6.2.4}
if [ "$QT_VERSION" ]; then
- QT_MAJOR_VERSION="`expr $QT_VERSION : '\([0-9][0-9]*\).*'`"
- QT_MINOR_VERSION="`expr $QT_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`"
- QT_DOTDOT_VERSION="`expr $QT_VERSION : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*'`"
+ QT_MAJOR_VERSION="$( expr "$QT_VERSION" : '\([0-9][0-9]*\).*' )"
+ QT_MINOR_VERSION="$( expr "$QT_VERSION" : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )"
+ QT_DOTDOT_VERSION="$( expr "$QT_VERSION" : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*' )"
QT_MAJOR_MINOR_VERSION=$QT_MAJOR_VERSION.$QT_MINOR_VERSION
QT_MAJOR_MINOR_DOTDOT_VERSION=$QT_MAJOR_VERSION.$QT_MINOR_VERSION.$QT_DOTDOT_VERSION
fi
@@ -148,15 +142,16 @@ fi
# the optional libraries are required by other optional libraries.
#
LIBSMI_VERSION=0.4.8
-GNUTLS_VERSION=3.7.8
+GNUTLS_VERSION=3.8.4
+GNUTLS_SHA256=2bea4e154794f3f00180fa2a5c51fe8b005ac7a31cd58bd44cdfa7f36ebc3a9b
if [ "$GNUTLS_VERSION" ]; then
#
# We'll be building GnuTLS, so we may need some additional libraries.
# We assume GnuTLS can work with Nettle; newer versions *only* use
# Nettle, not libgcrypt.
#
- GNUTLS_MAJOR_VERSION="`expr $GNUTLS_VERSION : '\([0-9][0-9]*\).*'`"
- GNUTLS_MINOR_VERSION="`expr $GNUTLS_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`"
+ GNUTLS_MAJOR_VERSION="$( expr $GNUTLS_VERSION : '\([0-9][0-9]*\).*' )"
+ GNUTLS_MINOR_VERSION="$( expr $GNUTLS_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )"
NETTLE_VERSION=3.9.1
#
@@ -166,27 +161,28 @@ if [ "$GNUTLS_VERSION" ]; then
#
# And p11-kit
- P11KIT_VERSION=0.25.0
+ P11KIT_VERSION=0.25.3
# Which requires libtasn1
LIBTASN1_VERSION=4.19.0
fi
-# Use 5.2.4, not 5.3, for now; lua_bitop.c hasn't been ported to 5.3
-# yet, and we need to check for compatibility issues (we'd want Lua
-# scripts to work with 5.1, 5.2, and 5.3, as long as they only use Lua
-# features present in all three versions)
-LUA_VERSION=5.2.4
-SNAPPY_VERSION=1.1.10
-ZSTD_VERSION=1.5.5
+# lua_bitop.c has been ported to 5.3 and 5.4 so use the latest release.
+# We may still need to check for compatibility issues (we'd want Lua
+# scripts to work with 5.1 through 5.4, as long as they only use Lua
+# features present in all versions)
+LUA_VERSION=5.4.6
+SNAPPY_VERSION=1.2.1
+ZSTD_VERSION=1.5.6
+ZLIBNG_VERSION=2.1.6
LIBXML2_VERSION=2.11.5
LZ4_VERSION=1.9.4
SBC_VERSION=2.0
-CARES_VERSION=1.19.1
+CARES_VERSION=1.31.0
LIBSSH_VERSION=0.10.5
# mmdbresolve
-MAXMINDDB_VERSION=1.4.3
-NGHTTP2_VERSION=1.56.0
-NGHTTP3_VERSION=0.15.0
+MAXMINDDB_VERSION=1.9.1
+NGHTTP2_VERSION=1.62.1
+NGHTTP3_VERSION=1.1.0
SPANDSP_VERSION=0.0.6
SPEEXDSP_VERSION=1.2.1
if [ "$SPANDSP_VERSION" ]; then
@@ -198,8 +194,19 @@ fi
BCG729_VERSION=1.1.1
# libilbc 3.0.0 & later link with absiel, which is released under Apache 2.0
ILBC_VERSION=2.0.2
+OPENCORE_AMR_VERSION=0.1.6
+OPENCORE_AMR_SHA256=483eb4061088e2b34b358e47540b5d495a96cd468e361050fae615b1809dc4a1
OPUS_VERSION=1.4
+# Falco libs (libsinsp and libscap) and their dependencies. Unset for now.
+#FALCO_LIBS_VERSION=0.17.1
+if [ "$FALCO_LIBS_VERSION" ] ; then
+ JSONCPP_VERSION=1.9.5
+ ONETBB_VERSION=2021.11.0
+ # 2023-06-01 and later require Abseil.
+ RE2_VERSION=2022-06-01
+fi
+
#
# Is /usr/bin/python3 a working version of Python? It may be, as it
# might be a wrapper that runs the Python 3 that's part of Xcode.
@@ -214,20 +221,27 @@ else
#
# No - install a Python package.
#
- PYTHON3_VERSION=3.9.5
+ PYTHON3_VERSION=3.12.1
fi
BROTLI_VERSION=1.0.9
# minizip
+MINIZIPNG_VERSION=4.0.7
ZLIB_VERSION=1.3
# Uncomment to enable automatic updates using Sparkle
-#SPARKLE_VERSION=2.1.0
+#SPARKLE_VERSION=2.2.2
#
# Asciidoctor is required to build the documentation.
#
+# As the Asciidoctor Homebrew formula shows, managing all of the various
+# dependencies can become quite hairy:
+# https://github.com/Homebrew/homebrew-core/blob/master/Formula/a/asciidoctor.rb
+# Maybe we should install a JRE and use AsciidoctorJ instead?
ASCIIDOCTOR_VERSION=${ASCIIDOCTOR_VERSION-2.0.16}
ASCIIDOCTORPDF_VERSION=${ASCIIDOCTORPDF_VERSION-1.6.1}
+# css_parser 1.13 and later require Ruby 2.7
+CSS_PARSER_VERSION=${CSS_PARSER_VERSION-1.12.0}
#
# GNU autotools. They're not supplied with the macOS versions we
# support, and we currently use them for minizip.
@@ -237,35 +251,35 @@ AUTOMAKE_VERSION=1.16.5
LIBTOOL_VERSION=2.4.6
install_curl() {
- if [ "$CURL_VERSION" -a ! -f curl-$CURL_VERSION-done ] ; then
+ if [ "$CURL_VERSION" ] && [ ! -f "curl-$CURL_VERSION-done" ] ; then
echo "Downloading, building, and installing curl:"
- [ -f curl-$CURL_VERSION.tar.bz2 ] || curl -L -O https://curl.haxx.se/download/curl-$CURL_VERSION.tar.bz2 || exit 1
+ [ -f "curl-$CURL_VERSION.tar.bz2" ] || curl --fail --location --remote-name "https://curl.haxx.se/download/curl-$CURL_VERSION.tar.bz2"
$no_build && echo "Skipping installation" && return
- bzcat curl-$CURL_VERSION.tar.bz2 | tar xf - || exit 1
- cd curl-$CURL_VERSION
- ./configure || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ bzcat "curl-$CURL_VERSION.tar.bz2" | tar xf -
+ cd "curl-$CURL_VERSION"
+ ./configure "${CONFIGURE_OPTS[@]}"
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
- touch curl-$CURL_VERSION-done
+ touch "curl-$CURL_VERSION-done"
fi
}
uninstall_curl() {
- if [ ! -z "$installed_curl_version" ] ; then
+ if [ -n "$installed_curl_version" ] ; then
echo "Uninstalling curl:"
- cd curl-$installed_curl_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ cd "curl-$installed_curl_version"
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
- rm curl-$installed_curl_version-done
+ rm "curl-$installed_curl_version-done"
- if [ "$#" -eq 1 -a "$1" = "-r" ] ; then
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
#
# Get rid of the previously downloaded and unpacked version.
#
- rm -rf curl-$installed_curl_version
- rm -rf curl-$installed_curl_version.tar.bz2
+ rm -rf "curl-$installed_curl_version"
+ rm -rf "curl-$installed_curl_version.tar.bz2"
fi
installed_curl_version=""
@@ -273,112 +287,83 @@ uninstall_curl() {
}
install_xz() {
- if [ "$XZ_VERSION" -a ! -f xz-$XZ_VERSION-done ] ; then
+ if [ "$XZ_VERSION" ] && [ ! -f xz-$XZ_VERSION-done ] ; then
echo "Downloading, building, and installing xz:"
- [ -f xz-$XZ_VERSION.tar.bz2 ] || curl -L -O https://tukaani.org/xz/xz-$XZ_VERSION.tar.bz2 || exit 1
+ [ -f xz-$XZ_VERSION.tar.bz2 ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://tukaani.org/xz/xz-$XZ_VERSION.tar.bz2
$no_build && echo "Skipping installation" && return
- bzcat xz-$XZ_VERSION.tar.bz2 | tar xf - || exit 1
+ bzcat xz-$XZ_VERSION.tar.bz2 | tar xf -
cd xz-$XZ_VERSION
#
# This builds and installs liblzma, which libxml2 uses, and
- # Wireshark uses liblzma, so we need to build this with
+ # Wireshark uses libxml2, so we need to build this with
# all the minimum-deployment-version and SDK stuff.
#
- CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}"
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch xz-$XZ_VERSION-done
fi
}
uninstall_xz() {
- if [ ! -z "$installed_xz_version" ] ; then
+ if [ -n "$installed_xz_version" ] ; then
echo "Uninstalling xz:"
- cd xz-$installed_xz_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ cd "xz-$installed_xz_version"
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
- rm xz-$installed_xz_version-done
+ rm "xz-$installed_xz_version-done"
- if [ "$#" -eq 1 -a "$1" = "-r" ] ; then
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
#
# Get rid of the previously downloaded and unpacked version.
#
- rm -rf xz-$installed_xz_version
- rm -rf xz-$installed_xz_version.tar.bz2
+ rm -rf "xz-$installed_xz_version"
+ rm -rf "xz-$installed_xz_version.tar.bz2"
fi
installed_xz_version=""
fi
}
-install_lzip() {
- if [ "$LZIP_VERSION" -a ! -f lzip-$LZIP_VERSION-done ] ; then
- echo "Downloading, building, and installing lzip:"
- [ -f lzip-$LZIP_VERSION.tar.gz ] || curl -L -O https://download.savannah.gnu.org/releases/lzip/lzip-$LZIP_VERSION.tar.gz || exit 1
- $no_build && echo "Skipping installation" && return
- gzcat lzip-$LZIP_VERSION.tar.gz | tar xf - || exit 1
- cd lzip-$LZIP_VERSION
- ./configure || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
- cd ..
- touch lzip-$LZIP_VERSION-done
- fi
-}
-
uninstall_lzip() {
- if [ ! -z "$installed_lzip_version" ] ; then
+ if [ -n "$installed_lzip_version" ] ; then
echo "Uninstalling lzip:"
- cd lzip-$installed_lzip_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ cd "lzip-$installed_lzip_version"
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
- rm lzip-$installed_lzip_version-done
+ rm "lzip-$installed_lzip_version-done"
- if [ "$#" -eq 1 -a "$1" = "-r" ] ; then
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
#
# Get rid of the previously downloaded and unpacked version.
#
- rm -rf lzip-$installed_lzip_version
- rm -rf lzip-$installed_lzip_version.tar.gz
+ rm -rf "lzip-$installed_lzip_version"
+ rm -rf "lzip-$installed_lzip_version.tar.gz"
fi
installed_lzip_version=""
fi
}
-install_pcre() {
- if [ "$PCRE_VERSION" -a ! -f pcre-$PCRE_VERSION-done ] ; then
- echo "Downloading, building, and installing pcre:"
- [ -f pcre-$PCRE_VERSION.tar.bz2 ] || curl -L -O https://sourceforge.net/projects/pcre/files/pcre/$PCRE_VERSION/pcre-$PCRE_VERSION.tar.bz2 || exit 1
- $no_build && echo "Skipping installation" && return
- bzcat pcre-$PCRE_VERSION.tar.bz2 | tar xf - || exit 1
- cd pcre-$PCRE_VERSION
- ./configure --enable-unicode-properties || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
- cd ..
- touch pcre-$PCRE_VERSION-done
- fi
-}
-
uninstall_pcre() {
- if [ ! -z "$installed_pcre_version" ] ; then
- echo "Uninstalling pcre:"
- cd pcre-$installed_pcre_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ if [ -n "$installed_pcre_version" ] ; then
+ echo "Uninstalling leftover pcre:"
+ cd "pcre-$installed_pcre_version"
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
- rm pcre-$installed_pcre_version-done
+ rm "pcre-$installed_pcre_version-done"
- if [ "$#" -eq 1 -a "$1" = "-r" ] ; then
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
#
# Get rid of the previously downloaded and unpacked version.
#
- rm -rf pcre-$installed_pcre_version
- rm -rf pcre-$installed_pcre_version.tar.bz2
+ rm -rf "pcre-$installed_pcre_version"
+ rm -rf "pcre-$installed_pcre_version.tar.bz2"
fi
installed_pcre_version=""
@@ -386,20 +371,19 @@ uninstall_pcre() {
}
install_pcre2() {
- if [ "$PCRE2_VERSION" -a ! -f "pcre2-$PCRE2_VERSION-done" ] ; then
+ if [ "$PCRE2_VERSION" ] && [ ! -f "pcre2-$PCRE2_VERSION-done" ] ; then
echo "Downloading, building, and installing pcre2:"
- [ -f "pcre2-$PCRE2_VERSION.tar.bz2" ] || curl -L -O "https://github.com/PhilipHazel/pcre2/releases/download/pcre2-$PCRE2_VERSION/pcre2-10.39.tar.bz2" || exit 1
+ [ -f "pcre2-$PCRE2_VERSION.tar.bz2" ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" "https://github.com/PhilipHazel/pcre2/releases/download/pcre2-$PCRE2_VERSION/pcre2-10.39.tar.bz2"
$no_build && echo "Skipping installation" && return
- bzcat "pcre2-$PCRE2_VERSION.tar.bz2" | tar xf - || exit 1
+ bzcat "pcre2-$PCRE2_VERSION.tar.bz2" | tar xf -
cd "pcre2-$PCRE2_VERSION"
mkdir build_dir
cd build_dir
# https://github.com/Homebrew/homebrew-core/blob/master/Formula/pcre2.rb
# https://github.com/microsoft/vcpkg/blob/master/ports/pcre2/portfile.cmake
- MACOSX_DEPLOYMENT_TARGET=$min_osx_target SDKROOT="$SDKPATH" \
- $DO_CMAKE -DBUILD_STATIC_LIBS=OFF -DBUILD_SHARED_LIBS=ON -DPCRE2_SUPPORT_JIT=ON -DPCRE2_SUPPORT_UNICODE=ON .. || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ "${DO_CMAKE[@]}" -DBUILD_STATIC_LIBS=OFF -DBUILD_SHARED_LIBS=ON -DPCRE2_SUPPORT_JIT=ON -DPCRE2_SUPPORT_UNICODE=ON ..
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ../..
touch "pcre2-$PCRE2_VERSION-done"
fi
@@ -412,7 +396,7 @@ uninstall_pcre2() {
while read -r ; do $DO_RM -v "$REPLY" ; done < <(cat "pcre2-$installed_pcre2_version/build_dir/install_manifest.txt"; echo)
rm "pcre2-$installed_pcre2_version-done"
- if [ "$#" -eq 1 -a "$1" = "-r" ] ; then
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
#
# Get rid of the previously downloaded and unpacked version.
#
@@ -427,20 +411,20 @@ uninstall_pcre2() {
install_autoconf() {
if [ "$AUTOCONF_VERSION" -a ! -f autoconf-$AUTOCONF_VERSION-done ] ; then
echo "Downloading, building and installing GNU autoconf..."
- [ -f autoconf-$AUTOCONF_VERSION.tar.xz ] || curl -L -O ftp://ftp.gnu.org/gnu/autoconf/autoconf-$AUTOCONF_VERSION.tar.xz || exit 1
+ [ -f autoconf-$AUTOCONF_VERSION.tar.xz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://ftp.gnu.org/gnu/autoconf/autoconf-$AUTOCONF_VERSION.tar.xz
$no_build && echo "Skipping installation" && return
- xzcat autoconf-$AUTOCONF_VERSION.tar.xz | tar xf - || exit 1
+ xzcat autoconf-$AUTOCONF_VERSION.tar.xz | tar xf -
cd autoconf-$AUTOCONF_VERSION
- ./configure || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ ./configure "${CONFIGURE_OPTS[@]}"
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch autoconf-$AUTOCONF_VERSION-done
fi
}
uninstall_autoconf() {
- if [ ! -z "$installed_autoconf_version" ] ; then
+ if [ -n "$installed_autoconf_version" ] ; then
#
# automake and libtool depend on this, so uninstall them.
#
@@ -449,8 +433,8 @@ uninstall_autoconf() {
echo "Uninstalling GNU autoconf:"
cd autoconf-$installed_autoconf_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm autoconf-$installed_autoconf_version-done
@@ -469,20 +453,20 @@ uninstall_autoconf() {
install_automake() {
if [ "$AUTOMAKE_VERSION" -a ! -f automake-$AUTOMAKE_VERSION-done ] ; then
echo "Downloading, building and installing GNU automake..."
- [ -f automake-$AUTOMAKE_VERSION.tar.xz ] || curl -L -O ftp://ftp.gnu.org/gnu/automake/automake-$AUTOMAKE_VERSION.tar.xz || exit 1
+ [ -f automake-$AUTOMAKE_VERSION.tar.xz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://ftp.gnu.org/gnu/automake/automake-$AUTOMAKE_VERSION.tar.xz
$no_build && echo "Skipping installation" && return
- xzcat automake-$AUTOMAKE_VERSION.tar.xz | tar xf - || exit 1
+ xzcat automake-$AUTOMAKE_VERSION.tar.xz | tar xf -
cd automake-$AUTOMAKE_VERSION
- ./configure || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ ./configure "${CONFIGURE_OPTS[@]}"
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch automake-$AUTOMAKE_VERSION-done
fi
}
uninstall_automake() {
- if [ ! -z "$installed_automake_version" ] ; then
+ if [ -n "$installed_automake_version" ] ; then
#
# libtool depends on this(?), so uninstall it.
#
@@ -490,8 +474,8 @@ uninstall_automake() {
echo "Uninstalling GNU automake:"
cd automake-$installed_automake_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm automake-$installed_automake_version-done
@@ -510,26 +494,26 @@ uninstall_automake() {
install_libtool() {
if [ "$LIBTOOL_VERSION" -a ! -f libtool-$LIBTOOL_VERSION-done ] ; then
echo "Downloading, building and installing GNU libtool..."
- [ -f libtool-$LIBTOOL_VERSION.tar.xz ] || curl -L -O ftp://ftp.gnu.org/gnu/libtool/libtool-$LIBTOOL_VERSION.tar.xz || exit 1
+ [ -f libtool-$LIBTOOL_VERSION.tar.xz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://ftp.gnu.org/gnu/libtool/libtool-$LIBTOOL_VERSION.tar.xz
$no_build && echo "Skipping installation" && return
- xzcat libtool-$LIBTOOL_VERSION.tar.xz | tar xf - || exit 1
+ xzcat libtool-$LIBTOOL_VERSION.tar.xz | tar xf -
cd libtool-$LIBTOOL_VERSION
- ./configure --program-prefix=g || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ ./configure "${CONFIGURE_OPTS[@]}" --program-prefix=g
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch libtool-$LIBTOOL_VERSION-done
fi
}
uninstall_libtool() {
- if [ ! -z "$installed_libtool_version" ] ; then
+ if [ -n "$installed_libtool_version" ] ; then
echo "Uninstalling GNU libtool:"
cd libtool-$installed_libtool_version
- $DO_MV /usr/local/bin/glibtool /usr/local/bin/libtool
- $DO_MV /usr/local/bin/glibtoolize /usr/local/bin/libtoolize
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MV "$installation_prefix/bin/glibtool" "$installation_prefix/bin/libtool"
+ $DO_MV "$installation_prefix/bin/glibtoolize" "$installation_prefix/bin/libtoolize"
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm libtool-$installed_libtool_version-done
@@ -546,27 +530,28 @@ uninstall_libtool() {
}
install_ninja() {
- if [ "$NINJA_VERSION" -a ! -f ninja-$NINJA_VERSION-done ] ; then
+ if [ "$NINJA_VERSION" ] && [ ! -f "ninja-$NINJA_VERSION-done" ] ; then
echo "Downloading and installing Ninja:"
#
# Download the zipball, unpack it, and move the binary to
- # /usr/local/bin.
+ # $installation_prefix/bin.
#
- [ -f ninja-mac-v$NINJA_VERSION.zip ] || curl -L -o ninja-mac-v$NINJA_VERSION.zip https://github.com/ninja-build/ninja/releases/download/v$NINJA_VERSION/ninja-mac.zip || exit 1
+ [ -f "ninja-mac-v$NINJA_VERSION.zip" ] || curl "${CURL_LOCAL_NAME_OPTS[@]}" "ninja-mac-v$NINJA_VERSION.zip" https://github.com/ninja-build/ninja/releases/download/v$NINJA_VERSION/ninja-mac.zip
+ echo "$NINJA_SHA256 ninja-mac-v$NINJA_VERSION.zip" | shasum --algorithm 256 --check
$no_build && echo "Skipping installation" && return
- unzip ninja-mac-v$NINJA_VERSION.zip
- sudo mv ninja /usr/local/bin
- touch ninja-$NINJA_VERSION-done
+ unzip "ninja-mac-v$NINJA_VERSION.zip"
+ sudo mv ninja "$installation_prefix/bin"
+ touch "ninja-$NINJA_VERSION-done"
fi
}
uninstall_ninja() {
- if [ ! -z "$installed_ninja_version" ]; then
+ if [ -n "$installed_ninja_version" ]; then
echo "Uninstalling Ninja:"
- sudo rm /usr/local/bin/ninja
- rm ninja-$installed_ninja_version-done
- if [ "$#" -eq 1 -a "$1" = "-r" ] ; then
- rm -f ninja-mac-v$installed_ninja_version.zip
+ $DO_RM "$installation_prefix/bin/ninja"
+ rm "ninja-$installed_ninja_version-done"
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
+ rm -f "ninja-mac-v$installed_ninja_version.zip"
fi
installed_ninja_version=""
@@ -576,13 +561,14 @@ uninstall_ninja() {
install_asciidoctor() {
if [ ! -f asciidoctor-${ASCIIDOCTOR_VERSION}-done ]; then
echo "Downloading and installing Asciidoctor:"
+ $no_build && echo "Skipping installation" && return
sudo gem install -V asciidoctor --version "=${ASCIIDOCTOR_VERSION}"
touch asciidoctor-${ASCIIDOCTOR_VERSION}-done
fi
}
uninstall_asciidoctor() {
- if [ ! -z "$installed_asciidoctor_version" ]; then
+ if [ -n "$installed_asciidoctor_version" ]; then
echo "Uninstalling Asciidoctor:"
sudo gem uninstall -V asciidoctor --version "=${installed_asciidoctor_version}"
rm asciidoctor-$installed_asciidoctor_version-done
@@ -604,13 +590,15 @@ install_asciidoctorpdf() {
## record them for uninstallation
## ttfunk, pdf-core, prawn, prawn-table, Ascii85, ruby-rc4, hashery, afm, pdf-reader, prawn-templates, public_suffix, addressable, css_parser, prawn-svg, prawn-icon, safe_yaml, thread_safe, polyglot, treetop, asciidoctor-pdf
echo "Downloading and installing Asciidoctor-pdf:"
+ $no_build && echo "Skipping installation" && return
+ sudo gem install -V css_parser --version "=${CSS_PARSER_VERSION}"
sudo gem install -V asciidoctor-pdf --version "=${ASCIIDOCTORPDF_VERSION}"
touch asciidoctorpdf-${ASCIIDOCTORPDF_VERSION}-done
fi
}
uninstall_asciidoctorpdf() {
- if [ ! -z "$installed_asciidoctorpdf_version" ]; then
+ if [ -n "$installed_asciidoctorpdf_version" ]; then
echo "Uninstalling Asciidoctor:"
sudo gem uninstall -V asciidoctor-pdf --version "=${installed_asciidoctorpdf_version}"
## XXX uninstall dependencies
@@ -630,8 +618,9 @@ uninstall_asciidoctorpdf() {
install_cmake() {
if [ ! -f cmake-$CMAKE_VERSION-done ]; then
echo "Downloading and installing CMake:"
- CMAKE_MAJOR_VERSION="`expr $CMAKE_VERSION : '\([0-9][0-9]*\).*'`"
- CMAKE_MINOR_VERSION="`expr $CMAKE_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`"
+ $no_build && echo "Skipping installation" && return
+ CMAKE_MAJOR_VERSION="$( expr "$CMAKE_VERSION" : '\([0-9][0-9]*\).*' )"
+ CMAKE_MINOR_VERSION="$( expr "$CMAKE_VERSION" : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )"
CMAKE_MAJOR_MINOR_VERSION=$CMAKE_MAJOR_VERSION.$CMAKE_MINOR_VERSION
#
@@ -654,7 +643,7 @@ install_cmake() {
# 3.19.3 and later have a macos-universal DMG for 10.13 and later,
# and a macos10.10-universal DMG for 10.10 and later.
#
- if [ "$CMAKE_MINOR_VERSION" -lt 5 ]; then
+ if [ "$CMAKE_MINOR_VERSION" -lt 10 ]; then
echo "CMake $CMAKE_VERSION" is too old 1>&2
elif [ "$CMAKE_MINOR_VERSION" -lt 19 -o \
"$CMAKE_VERSION" = 3.19.0 -o \
@@ -666,17 +655,17 @@ install_cmake() {
else
type="macos10.0-universal"
fi
- [ -f cmake-$CMAKE_VERSION-$type.dmg ] || curl -L -O https://cmake.org/files/v$CMAKE_MAJOR_MINOR_VERSION/cmake-$CMAKE_VERSION-$type.dmg || exit 1
+ [ -f cmake-$CMAKE_VERSION-$type.dmg ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://cmake.org/files/v$CMAKE_MAJOR_MINOR_VERSION/cmake-$CMAKE_VERSION-$type.dmg
$no_build && echo "Skipping installation" && return
- sudo hdiutil attach cmake-$CMAKE_VERSION-$type.dmg || exit 1
- sudo ditto /Volumes/cmake-$CMAKE_VERSION-$type/CMake.app /Applications/CMake.app || exit 1
+ sudo hdiutil attach cmake-$CMAKE_VERSION-$type.dmg
+ sudo ditto /Volumes/cmake-$CMAKE_VERSION-$type/CMake.app /Applications/CMake.app
#
- # Plant the appropriate symbolic links in /usr/local/bin.
+ # Plant the appropriate symbolic links in $installation_prefix/bin.
# It's a drag-install, so there's no installer to make them,
# and the CMake code to put them in place is lame, as
#
- # 1) it defaults to /usr/bin, not /usr/local/bin;
+ # 1) it defaults to /usr/bin, not $installation_prefix/bin;
# 2) it doesn't request the necessary root privileges;
# 3) it can't be run from the command line;
#
@@ -684,7 +673,7 @@ install_cmake() {
#
for i in ccmake cmake cmake-gui cmakexbuild cpack ctest
do
- sudo ln -s /Applications/CMake.app/Contents/bin/$i /usr/local/bin/$i
+ sudo ln -s /Applications/CMake.app/Contents/bin/$i "$installation_prefix/bin/$i"
done
sudo hdiutil detach /Volumes/cmake-$CMAKE_VERSION-$type
;;
@@ -697,9 +686,9 @@ install_cmake() {
}
uninstall_cmake() {
- if [ ! -z "$installed_cmake_version" ]; then
+ if [ -n "$installed_cmake_version" ]; then
echo "Uninstalling CMake:"
- installed_cmake_major_version="`expr $installed_cmake_version : '\([0-9][0-9]*\).*'`"
+ installed_cmake_major_version="$( expr "$installed_cmake_version" : '\([0-9][0-9]*\).*' )"
case "$installed_cmake_major_version" in
0|1|2)
@@ -710,7 +699,7 @@ uninstall_cmake() {
sudo rm -rf /Applications/CMake.app
for i in ccmake cmake cmake-gui cmakexbuild cpack ctest
do
- sudo rm -f /usr/local/bin/$i
+ sudo rm -f "$installation_prefix/bin/$i"
done
rm cmake-$installed_cmake_version-done
;;
@@ -739,6 +728,7 @@ install_meson() {
# We have it.
:
else
+ $no_build && echo "Skipping installation" && return
sudo pip3 install meson
touch meson-done
fi
@@ -758,7 +748,7 @@ install_pytest() {
#
# Install pytest with pip3 if we don't have it already.
#
- if python3 -m pytest --version >/dev/null 2>&1
+ if python3 -m pytest --version &> /dev/null || pytest --version &> /dev/null
then
# We have it.
:
@@ -781,122 +771,21 @@ uninstall_pytest() {
install_gettext() {
if [ ! -f gettext-$GETTEXT_VERSION-done ] ; then
echo "Downloading, building, and installing GNU gettext:"
- [ -f gettext-$GETTEXT_VERSION.tar.gz ] || curl -L -O https://ftp.gnu.org/pub/gnu/gettext/gettext-$GETTEXT_VERSION.tar.gz || exit 1
+ [ -f gettext-$GETTEXT_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://ftp.gnu.org/pub/gnu/gettext/gettext-$GETTEXT_VERSION.tar.gz
$no_build && echo "Skipping installation" && return
- gzcat gettext-$GETTEXT_VERSION.tar.gz | tar xf - || exit 1
+ gzcat gettext-$GETTEXT_VERSION.tar.gz | tar xf -
cd gettext-$GETTEXT_VERSION
-
- #
- # This is annoying.
- #
- # GNU gettext's configuration script checks for the presence of an
- # implementation of iconv(). Not only does it check whether iconv()
- # is available, *but* it checks for certain behavior *not* specified
- # by POSIX that the GNU implementation provides, namely that an
- # attempt to convert the UTF-8 for the EURO SYMBOL chaaracter to
- # ISO 8859-1 results in an error.
- #
- # macOS, prior to Sierra, provided the GNU iconv library (as it's
- # a POSIX API).
- #
- # Sierra appears to have picked up an implementation from FreeBSD
- # (that implementation originated with the CITRUS project:
- #
- # http://citrus.bsdclub.org
- #
- # with additional work done to integrate it into NetBSD, and then
- # adopted by FreeBSD with further work done).
- #
- # That implementation does *NOT* return an error in that case; instead,
- # it transliterates the EURO SYMBOL to "EUR".
- #
- # Both behaviors conform to POSIX.
- #
- # This causes GNU gettext's configure script to conclude that it
- # should not say iconv() is available. That, unfortunately, causes
- # the build to fail with a linking error when trying to build
- # libtextstyle (a library for which we have no use, that is offered
- # as a separate library by the GNU project:
- #
- # https://www.gnu.org/software/gettext/libtextstyle/manual/libtextstyle.html
- #
- # and that is presumably bundled in GNU gettext because some gettext
- # tool depends on it). The failure appears to be due to:
- #
- # libtextstyle's exported symbols file is generated from a
- # template and a script that passes through only symbols
- # that appear in a header file that declares the symbol
- # as extern;
- #
- # one such header file declares iconv_ostream_create, but only
- # if HAVE_ICONV is defined.
- #
- # the source file that defines iconv_ostream_create does so
- # only if HAVE_ICONV is defined;
- #
- # the aforementioned script pays *NO ATTENTION* to #ifdefs,
- # so it will include iconv_ostream_create in the list of
- # symbols to export regardless of whether a working iconv()
- # was found;
- #
- # the linker failing because it was told to export a symbol
- # that doesn't exist.
- #
- # This is a collection of multiple messes:
- #
- # 1) not all versions of iconv() defaulting to "return an error
- # if the target character set doesn't have a character that
- # corresponds to the source character" and not offering a way
- # to force that behavior;
- #
- # 2) either some parts of GNU gettext - and libraries bundled
- # with it, for some mysterious reason - depending on the GNU
- # behavior rather than assuming only what POSIX specifies, and
- # the configure script checking for the GNU behavior and not
- # setting HAVE_ICONV if it's not found;
- #
- # 3) the process for building the exported symbols file not
- # removing symbols that won't exist in the build due to
- # a "working" iconv() not being found;
- #
- # 4) the file that would define iconv_ostream_create() not
- # defining as an always-failing stub if HAVE_ICONV isn't
- # defined;
- #
- # 5) macOS's linker failing if a symbol is specified in an
- # exported symbols file but not found, while other linkers
- # just ignore it? (I add this because I'm a bit surprised
- # that this has not been fixed, as I suspect it would fail
- # on FreeBSD and possibly NetBSD as well, as I think their
- # iconv()s also default to transliterating rather than failing
- # if an input character has no corresponding character in
- # the output encoding.)
- #
- # The Homebrew folks are aware of this and have reported it to
- # Apple as a "feedback", for what that's worth:
- #
- # https://github.com/Homebrew/homebrew-core/commit/af3b4da5a096db3d9ee885e99ed29b33dec1f1c4
- #
- # We adopt their fix, which is to run the configure script with
- # "am_cv_func_iconv_works=y" as one of the arguments if it's
- # running on Sonoma; in at least one test, doing so on Ventura
- # caused the build to fail.
- #
- if [[ $DARWIN_MAJOR_VERSION -ge 23 ]]; then
- workaround_arg="am_cv_func_iconv_works=y"
- else
- workaround_arg=
- fi
- CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure $workaround_arg || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}"
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch gettext-$GETTEXT_VERSION-done
fi
}
uninstall_gettext() {
- if [ ! -z "$installed_gettext_version" ] ; then
+ if [ -n "$installed_gettext_version" ] ; then
#
# GLib depends on this, so uninstall it.
#
@@ -904,8 +793,8 @@ uninstall_gettext() {
echo "Uninstalling GNU gettext:"
cd gettext-$installed_gettext_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm gettext-$installed_gettext_version-done
@@ -924,24 +813,24 @@ uninstall_gettext() {
install_pkg_config() {
if [ ! -f pkg-config-$PKG_CONFIG_VERSION-done ] ; then
echo "Downloading, building, and installing pkg-config:"
- [ -f pkg-config-$PKG_CONFIG_VERSION.tar.gz ] || curl -L -O https://pkgconfig.freedesktop.org/releases/pkg-config-$PKG_CONFIG_VERSION.tar.gz || exit 1
+ [ -f pkg-config-$PKG_CONFIG_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://pkgconfig.freedesktop.org/releases/pkg-config-$PKG_CONFIG_VERSION.tar.gz
$no_build && echo "Skipping installation" && return
- gzcat pkg-config-$PKG_CONFIG_VERSION.tar.gz | tar xf - || exit 1
+ gzcat pkg-config-$PKG_CONFIG_VERSION.tar.gz | tar xf -
cd pkg-config-$PKG_CONFIG_VERSION
- ./configure --with-internal-glib || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ CFLAGS="$CFLAGS -Wno-int-conversion" ./configure "${CONFIGURE_OPTS[@]}" --with-internal-glib
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch pkg-config-$PKG_CONFIG_VERSION-done
fi
}
uninstall_pkg_config() {
- if [ ! -z "$installed_pkg_config_version" ] ; then
+ if [ -n "$installed_pkg_config_version" ] ; then
echo "Uninstalling pkg-config:"
cd pkg-config-$installed_pkg_config_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm pkg-config-$installed_pkg_config_version-done
@@ -960,13 +849,13 @@ uninstall_pkg_config() {
install_glib() {
if [ ! -f glib-$GLIB_VERSION-done ] ; then
echo "Downloading, building, and installing GLib:"
- glib_dir=`expr $GLIB_VERSION : '\([0-9][0-9]*\.[0-9][0-9]*\).*'`
+ glib_dir=$( expr "$GLIB_VERSION" : '\([0-9][0-9]*\.[0-9][0-9]*\).*' )
#
# Starting with GLib 2.28.8, xz-compressed tarballs are available.
#
- [ -f glib-$GLIB_VERSION.tar.xz ] || curl -L -O https://download.gnome.org/sources/glib/$glib_dir/glib-$GLIB_VERSION.tar.xz || exit 1
+ [ -f glib-$GLIB_VERSION.tar.xz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" "https://download.gnome.org/sources/glib/$glib_dir/glib-$GLIB_VERSION.tar.xz"
$no_build && echo "Skipping installation" && return
- xzcat glib-$GLIB_VERSION.tar.xz | tar xf - || exit 1
+ xzcat glib-$GLIB_VERSION.tar.xz | tar xf -
cd glib-$GLIB_VERSION
#
# First, determine where the system include files are.
@@ -979,7 +868,7 @@ install_glib() {
#
# We need this for several things we do later.
#
- includedir=`SDKROOT="$SDKPATH" xcrun --show-sdk-path 2>/dev/null`/usr/include
+ includedir=$( SDKROOT="$SDKPATH" xcrun --show-sdk-path 2>/dev/null )/usr/include
#
# GLib's configuration procedure, whether autotools-based or
# Meson-based, really likes to use pkg-config to find libraries,
@@ -993,7 +882,7 @@ install_glib() {
#
# So, if we have a system-provided libffi, but pkg-config
# doesn't find libffi, we construct a .pc file for that libffi,
- # and install it in /usr/local/lib/pkgconfig.
+ # and install it in $installation_prefix/lib/pkgconfig.
#
# First, check whether pkg-config finds libffi but thinks its
# header files are in a non-existent directory. That probaby
@@ -1008,17 +897,17 @@ install_glib() {
if pkg-config libffi ; then
# We have a .pc file for libffi; what does it say the
# include directory is?
- incldir=`pkg-config --variable=includedir libffi`
- if [ ! -z "$incldir" -a ! -d "$incldir" ] ; then
+ incldir=$( pkg-config --variable=includedir libffi )
+ if [ -n "$incldir" -a ! -d "$incldir" ] ; then
# Bogus - remove it, assuming
- $DO_RM /usr/local/lib/pkgconfig/libffi.pc
+ $DO_RM "$installation_prefix/lib/pkgconfig/libffi.pc"
fi
fi
if pkg-config libffi ; then
# It found libffi; no need to install a .pc file, and we
# don't want to overwrite what's there already.
:
- elif [ ! -e $includedir/ffi/ffi.h ] ; then
+ elif [ ! -e "$includedir"/ffi/ffi.h ] ; then
# We don't appear to have libffi as part of the system, so
# let the configuration process figure out what to do.
#
@@ -1054,7 +943,7 @@ install_glib() {
# to the standard output, but running the last process in
# the pipeline as root won't allow the shell that's
# *running* it to open the .pc file if we don't have write
- # permission on /usr/local/lib/pkgconfig, so we need a
+ # permission on $installation_prefix/lib/pkgconfig, so we need a
# program that creates a file and then reads from the
# standard input and writes to that file. UN*Xes have a
# program that does that; it's called "tee". :-)
@@ -1062,7 +951,7 @@ install_glib() {
# However, it *also* writes the file to the standard output,
# so we redirect that to /dev/null when we run it.
#
- cat <<"EOF" | sed "s;@INCLUDEDIR@;$includedir;" | $DO_TEE_TO_PC_FILE /usr/local/lib/pkgconfig/libffi.pc >/dev/null
+ cat <<"EOF" | sed "s;@INCLUDEDIR@;$includedir;" | $DO_TEE_TO_PC_FILE "$installation_prefix/lib/pkgconfig/libffi.pc" >/dev/null
prefix=/usr
libdir=${prefix}/lib
includedir=@INCLUDEDIR@
@@ -1087,11 +976,11 @@ EOF
*)
case $GLIB_MINOR_VERSION in
- [0-9]|1[0-9]|2[0-9]|3[0-7])
+ [0-9]|1[0-9]|2[0-9]|3[0-9]|4[0-9])
echo "GLib $GLIB_VERSION" is too old 1>&2
;;
- 3[8-9]|4[0-9]|5[0-8])
+ 5[0-8])
if [ ! -f ./configure ]; then
LIBTOOLIZE=glibtoolize ./autogen.sh
fi
@@ -1111,15 +1000,17 @@ EOF
#
# https://bugzilla.gnome.org/show_bug.cgi?id=691608#c25
#
- if grep -qs '#define.*MACOSX' $includedir/ffi/fficonfig.h
+ if grep -qs '#define.*MACOSX' "$includedir/ffi/fficonfig.h"
then
# It's defined, nothing to do
- CFLAGS="$CFLAGS -Wno-format-nonliteral $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -Wno-format-nonliteral $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1
+ CFLAGS="$CFLAGS -Wno-format-nonliteral $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -Wno-format-nonliteral $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}"
else
- CFLAGS="$CFLAGS -DMACOSX -Wno-format-nonliteral $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -DMACOSX -Wno-format-nonliteral $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1
+ CFLAGS="$CFLAGS -DMACOSX -Wno-format-nonliteral $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -DMACOSX -Wno-format-nonliteral $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}"
fi
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
;;
59|[6-9][0-9]|[1-9][0-9][0-9])
@@ -1132,9 +1023,10 @@ EOF
#
# https://gitlab.gnome.org/GNOME/glib/-/issues/2902
#
- CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" $MESON -Dtests=false _build || exit 1
- ninja $MAKE_BUILD_OPTS -C _build || exit 1
- $DO_NINJA_INSTALL || exit 1
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ $MESON -Dprefix="$installation_prefix" -Dtests=false _build
+ ninja -C _build
+ $DO_NINJA_INSTALL
;;
*)
echo "Glib's put out 1000 2.x releases?" 1>&2
@@ -1148,28 +1040,28 @@ EOF
}
uninstall_glib() {
- if [ ! -z "$installed_glib_version" ] ; then
+ if [ -n "$installed_glib_version" ] ; then
echo "Uninstalling GLib:"
- cd glib-$installed_glib_version
- installed_glib_major_version="`expr $installed_glib_version : '\([0-9][0-9]*\).*'`"
- installed_glib_minor_version="`expr $installed_glib_version : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`"
- installed_glib_dotdot_version="`expr $installed_glib_version : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*'`"
- installed_glib_major_minor_version=$installed_glib_major_version.$installed_glib_minor_version
- installed_glib_major_minor_dotdot_version=$installed_glib_major_version.$installed_glib_minor_version.$installed_glib_dotdot_version
+ cd "glib-$installed_glib_version"
+ installed_glib_major_version="$( expr "$installed_glib_version" : '\([0-9][0-9]*\).*' )"
+ installed_glib_minor_version="$( expr "$installed_glib_version" : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )"
+ # installed_glib_dotdot_version="$( expr $installed_glib_version : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*' )"
+ # installed_glib_major_minor_version=$installed_glib_major_version.$installed_glib_minor_version
+ # installed_glib_major_minor_dotdot_version=$installed_glib_major_version.$installed_glib_minor_version.$installed_glib_dotdot_version
#
# GLib 2.59.1 and later use Meson+Ninja as the build system.
#
case $installed_glib_major_version in
1)
- $DO_MAKE_UNINSTALL || exit 1
+ $DO_MAKE_UNINSTALL
#
# This appears to delete dependencies out from under other
# Makefiles in the tree, causing it to fail. At least until
# that gets fixed, if it ever gets fixed, we just ignore the
# exit status of "make distclean"
#
- # make distclean || exit 1
+ # make distclean
make distclean || echo "Ignoring make distclean failure" 1>&2
;;
@@ -1177,14 +1069,14 @@ uninstall_glib() {
case $installed_glib_minor_version in
[0-9]|1[0-9]|2[0-9]|3[0-9]|4[0-9]|5[0-8])
- $DO_MAKE_UNINSTALL || exit 1
+ $DO_MAKE_UNINSTALL
#
# This appears to delete dependencies out from under other
# Makefiles in the tree, causing it to fail. At least until
# that gets fixed, if it ever gets fixed, we just ignore the
# exit status of "make distclean"
#
- # make distclean || exit 1
+ # make distclean
make distclean || echo "Ignoring make distclean failure" 1>&2
;;
@@ -1194,7 +1086,7 @@ uninstall_glib() {
# supports it, and I'm too lazy to add a dot-dot
# version check.
#
- $DO_NINJA_UNINSTALL || exit 1
+ $DO_NINJA_UNINSTALL
#
# For Meson+Ninja, we do the build in an _build
# subdirectory, so the equivalent of "make distclean"
@@ -1250,11 +1142,11 @@ install_qt() {
5)
case $QT_MINOR_VERSION in
- 0|1|2|3|4|5|6|7|8)
+ 0|1|2|3|4|5|6|7|8|9|10)
echo "Qt $QT_VERSION" is too old 1>&2
;;
- 9|10|11|12|13|14)
+ 11|12|13|14)
QT_VOLUME=qt-opensource-mac-x64-$QT_VERSION
;;
*)
@@ -1262,9 +1154,9 @@ install_qt() {
;;
esac
- [ -f $QT_VOLUME.dmg ] || curl -L -O https://download.qt.io/archive/qt/$QT_MAJOR_MINOR_VERSION/$QT_MAJOR_MINOR_DOTDOT_VERSION/$QT_VOLUME.dmg || exit 1
+ [ -f $QT_VOLUME.dmg ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://download.qt.io/archive/qt/$QT_MAJOR_MINOR_VERSION/$QT_MAJOR_MINOR_DOTDOT_VERSION/$QT_VOLUME.dmg
$no_build && echo "Skipping installation" && return
- sudo hdiutil attach $QT_VOLUME.dmg || exit 1
+ sudo hdiutil attach $QT_VOLUME.dmg
#
# Run the installer executable directly, so that we wait for
@@ -1282,7 +1174,7 @@ install_qt() {
}
uninstall_qt() {
- if [ ! -z "$installed_qt_version" ] ; then
+ if [ -n "$installed_qt_version" ] ; then
echo "Uninstalling Qt:"
rm -rf $HOME/Qt$installed_qt_version
rm qt-$installed_qt_version-done
@@ -1297,9 +1189,9 @@ uninstall_qt() {
# 5.3 - 5.8: qt-opensource-mac-x64-clang-{version}.dmg
# 5.9 - 5.14: qt-opensource-mac-x64-{version}.dmg
#
- installed_qt_major_version="`expr $installed_qt_version : '\([0-9][0-9]*\).*'`"
- installed_qt_minor_version="`expr $installed_qt_version : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`"
- installed_qt_dotdot_version="`expr $installed_qt_version : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*'`"
+ installed_qt_major_version="$( expr "$installed_qt_version" : '\([0-9][0-9]*\).*' )"
+ installed_qt_minor_version="$( expr "$installed_qt_version" : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )"
+ # installed_qt_dotdot_version="$( expr "$installed_qt_version" : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*' )"
case $installed_qt_major_version in
1|2|3|4)
@@ -1309,14 +1201,10 @@ uninstall_qt() {
5*)
case $installed_qt_minor_version in
- 0|1|2|3|4|5)
+ 0|1|2|3|4|5|6|7|8)
echo "Qt $installed_qt_version" is too old 1>&2
;;
- 6|7|8)
- installed_qt_volume=qt-opensource-mac-x64-clang-$installed_qt_version.dmg
- ;;
-
9|10|11|12|13|14)
installed_qt_volume=qt-opensource-mac-x64-$installed_qt_version.dmg
;;
@@ -1332,24 +1220,24 @@ uninstall_qt() {
install_libsmi() {
if [ "$LIBSMI_VERSION" -a ! -f libsmi-$LIBSMI_VERSION-done ] ; then
echo "Downloading, building, and installing libsmi:"
- [ -f libsmi-$LIBSMI_VERSION.tar.gz ] || curl -L -O https://www.ibr.cs.tu-bs.de/projects/libsmi/download/libsmi-$LIBSMI_VERSION.tar.gz || exit 1
+ [ -f libsmi-$LIBSMI_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://www.ibr.cs.tu-bs.de/projects/libsmi/download/libsmi-$LIBSMI_VERSION.tar.gz
$no_build && echo "Skipping installation" && return
- gzcat libsmi-$LIBSMI_VERSION.tar.gz | tar xf - || exit 1
+ gzcat libsmi-$LIBSMI_VERSION.tar.gz | tar xf -
cd libsmi-$LIBSMI_VERSION
- CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure "${CONFIGURE_OPTS[@]}"
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch libsmi-$LIBSMI_VERSION-done
fi
}
uninstall_libsmi() {
- if [ ! -z "$installed_libsmi_version" ] ; then
+ if [ -n "$installed_libsmi_version" ] ; then
echo "Uninstalling libsmi:"
cd libsmi-$installed_libsmi_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm libsmi-$installed_libsmi_version-done
@@ -1368,20 +1256,20 @@ uninstall_libsmi() {
install_libgpg_error() {
if [ "$LIBGPG_ERROR_VERSION" -a ! -f libgpg-error-$LIBGPG_ERROR_VERSION-done ] ; then
echo "Downloading, building, and installing libgpg-error:"
- [ -f libgpg-error-$LIBGPG_ERROR_VERSION.tar.bz2 ] || curl -L -O https://www.gnupg.org/ftp/gcrypt/libgpg-error/libgpg-error-$LIBGPG_ERROR_VERSION.tar.bz2 || exit 1
+ [ -f libgpg-error-$LIBGPG_ERROR_VERSION.tar.bz2 ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://www.gnupg.org/ftp/gcrypt/libgpg-error/libgpg-error-$LIBGPG_ERROR_VERSION.tar.bz2
$no_build && echo "Skipping installation" && return
- bzcat libgpg-error-$LIBGPG_ERROR_VERSION.tar.bz2 | tar xf - || exit 1
+ bzcat libgpg-error-$LIBGPG_ERROR_VERSION.tar.bz2 | tar xf -
cd libgpg-error-$LIBGPG_ERROR_VERSION
- CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure "${CONFIGURE_OPTS[@]}"
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch libgpg-error-$LIBGPG_ERROR_VERSION-done
fi
}
uninstall_libgpg_error() {
- if [ ! -z "$installed_libgpg_error_version" ] ; then
+ if [ -n "$installed_libgpg_error_version" ] ; then
#
# libgcrypt depends on this, so uninstall it.
#
@@ -1389,8 +1277,8 @@ uninstall_libgpg_error() {
echo "Uninstalling libgpg-error:"
cd libgpg-error-$installed_libgpg_error_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm libgpg-error-$installed_libgpg_error_version-done
@@ -1418,9 +1306,9 @@ install_libgcrypt() {
fi
echo "Downloading, building, and installing libgcrypt:"
- [ -f libgcrypt-$LIBGCRYPT_VERSION.tar.gz ] || curl -L -O https://www.gnupg.org/ftp/gcrypt/libgcrypt/libgcrypt-$LIBGCRYPT_VERSION.tar.gz || exit 1
+ [ -f libgcrypt-$LIBGCRYPT_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://www.gnupg.org/ftp/gcrypt/libgcrypt/libgcrypt-$LIBGCRYPT_VERSION.tar.gz
$no_build && echo "Skipping installation" && return
- gzcat libgcrypt-$LIBGCRYPT_VERSION.tar.gz | tar xf - || exit 1
+ gzcat libgcrypt-$LIBGCRYPT_VERSION.tar.gz | tar xf -
cd libgcrypt-$LIBGCRYPT_VERSION
#
# The assembler language code is not compatible with the macOS
@@ -1431,20 +1319,26 @@ install_libgcrypt() {
#
# https://lists.freebsd.org/pipermail/freebsd-ports-bugs/2010-October/198809.html
#
- CFLAGS="$CFLAGS -std=gnu89 $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure --disable-asm || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ # We specify "unix" as the random number generator so that we
+ # don't try to use getentropy, because random/rndgetentropy.c
+ # *REQUIRES* Linux getrandom(), which we don't have. (This should
+ # not matter, as we only use this for decryption, as far as I know.)
+ #
+ CFLAGS="$CFLAGS -std=gnu89 $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}" --disable-asm --enable-random=unix
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch libgcrypt-$LIBGCRYPT_VERSION-done
fi
}
uninstall_libgcrypt() {
- if [ ! -z "$installed_libgcrypt_version" ] ; then
+ if [ -n "$installed_libgcrypt_version" ] ; then
echo "Uninstalling libgcrypt:"
cd libgcrypt-$installed_libgcrypt_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm libgcrypt-$installed_libgcrypt_version-done
@@ -1461,63 +1355,64 @@ uninstall_libgcrypt() {
}
install_gmp() {
- if [ "$GMP_VERSION" -a ! -f gmp-$GMP_VERSION-done ] ; then
+ if [ "$GMP_VERSION" ] && [ ! -f "gmp-$GMP_VERSION-done" ] ; then
echo "Downloading, building, and installing GMP:"
- [ -f gmp-$GMP_VERSION.tar.lz ] || curl -L -O https://gmplib.org/download/gmp/gmp-$GMP_VERSION.tar.lz || exit 1
+ [ -f "gmp-$GMP_VERSION.tar.xz" ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://gmplib.org/download/gmp/gmp-$GMP_VERSION.tar.xz
$no_build && echo "Skipping installation" && return
- lzip -c -d gmp-$GMP_VERSION.tar.lz | tar xf - || exit 1
- cd gmp-$GMP_VERSION
+ xzcat "gmp-$GMP_VERSION.tar.xz" | tar xf -
+ cd "gmp-$GMP_VERSION"
#
# Create a fat binary: https://gmplib.org/manual/Notes-for-Package-Builds.html
#
# According to
#
# https://www.mail-archive.com/gmp-bugs@gmplib.org/msg01492.html
- #
+ #
# and other pages, the Shiny New Linker in Xcode 15 causes this
# build to fail with "ld: branch8 out of range 384833 in
# ___gmpn_add_nc_x86_64"; linking with -ld64 is a workaround.
#
# For now, link with -ld64 on Xcode 15 and later.
#
- XCODE_VERSION=`xcodebuild -version | sed -n 's;Xcode \(.*\);\1;p'`
- XCODE_MAJOR_VERSION="`expr $XCODE_VERSION : '\([0-9][0-9]*\).*'`"
- XCODE_MINOR_VERSION="`expr $XCODE_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`"
- XCODE_DOTDOT_VERSION="`expr $XCODE_VERSION : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*'`"
+ XCODE_VERSION=$( xcodebuild -version | sed -n 's;Xcode \(.*\);\1;p' )
+ XCODE_MAJOR_VERSION="$( expr "$XCODE_VERSION" : '\([0-9][0-9]*\).*' )"
+ # XCODE_MINOR_VERSION="$( expr $XCODE_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )"
+ # XCODE_DOTDOT_VERSION="$( expr $XCODE_VERSION : '[0-9][0-9]*\.[0-9][0-9]*\.\([0-9][0-9]*\).*' )"
if [ "$XCODE_MAJOR_VERSION" -ge 15 ]
then
LD64_FLAG="-ld64"
else
LD64_FLAG=""
fi
- CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS $LD64_FLAG" ./configure --enable-fat || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS $LD64_FLAG" \
+ ./configure "${CONFIGURE_OPTS[@]}" --enable-fat
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
- touch gmp-$GMP_VERSION-done
+ touch "gmp-$GMP_VERSION-done"
fi
}
uninstall_gmp() {
- if [ ! -z "$installed_gmp_version" ] ; then
+ if [ -n "$installed_gmp_version" ] ; then
#
# Nettle depends on this, so uninstall it.
#
uninstall_nettle "$@"
echo "Uninstalling GMP:"
- cd gmp-$installed_gmp_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ cd "gmp-$installed_gmp_version"
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
- rm gmp-$installed_gmp_version-done
+ rm "gmp-$installed_gmp_version-done"
- if [ "$#" -eq 1 -a "$1" = "-r" ] ; then
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
#
# Get rid of the previously downloaded and unpacked version.
#
- rm -rf gmp-$installed_gmp_version
- rm -rf gmp-$installed_gmp_version.tar.lz
+ rm -rf "gmp-$installed_gmp_version"
+ rm -rf "gmp-$installed_gmp_version.tar.xz"
fi
installed_gmp_version=""
@@ -1525,40 +1420,41 @@ uninstall_gmp() {
}
install_libtasn1() {
- if [ "$LIBTASN1_VERSION" -a ! -f libtasn1-$LIBTASN1_VERSION-done ] ; then
+ if [ "$LIBTASN1_VERSION" ] && [ ! -f "libtasn1-$LIBTASN1_VERSION-done" ] ; then
echo "Downloading, building, and installing libtasn1:"
- [ -f libtasn1-$LIBTASN1_VERSION.tar.gz ] || curl -L -O https://ftpmirror.gnu.org/libtasn1/libtasn1-$LIBTASN1_VERSION.tar.gz || exit 1
+ [ -f "libtasn1-$LIBTASN1_VERSION.tar.gz" ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" "https://ftp.gnu.org/gnu/libtasn1/libtasn1-$LIBTASN1_VERSION.tar.gz"
$no_build && echo "Skipping installation" && return
- gzcat libtasn1-$LIBTASN1_VERSION.tar.gz | tar xf - || exit 1
- cd libtasn1-$LIBTASN1_VERSION
- CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ gzcat "libtasn1-$LIBTASN1_VERSION.tar.gz" | tar xf -
+ cd "libtasn1-$LIBTASN1_VERSION"
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}"
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
- touch libtasn1-$LIBTASN1_VERSION-done
+ touch "libtasn1-$LIBTASN1_VERSION-done"
fi
}
uninstall_libtasn1() {
- if [ ! -z "$installed_libtasn1_version" ] ; then
+ if [ -n "$installed_libtasn1_version" ] ; then
#
# p11-kit depends on this, so uninstall it.
#
uninstall_p11_kit "$@"
echo "Uninstalling libtasn1:"
- cd libtasn1-$installed_libtasn1_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ cd "libtasn1-$installed_libtasn1_version"
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
- rm libtasn1-$installed_libtasn1_version-done
+ rm "libtasn1-$installed_libtasn1_version-done"
- if [ "$#" -eq 1 -a "$1" = "-r" ] ; then
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
#
# Get rid of the previously downloaded and unpacked version.
#
- rm -rf libtasn1-$installed_libtasn1_version
- rm -rf libtasn1-$installed_libtasn1_version.tar.gz
+ rm -rf "libtasn1-$installed_libtasn1_version"
+ rm -rf "libtasn1-$installed_libtasn1_version.tar.gz"
fi
installed_libtasn1_version=""
@@ -1566,12 +1462,12 @@ uninstall_libtasn1() {
}
install_p11_kit() {
- if [ "$P11KIT_VERSION" -a ! -f p11-kit-$P11KIT_VERSION-done ] ; then
+ if [ "$P11KIT_VERSION" ] && [ ! -f "p11-kit-$P11KIT_VERSION-done" ] ; then
echo "Downloading, building, and installing p11-kit:"
- [ -f p11-kit-$P11KIT_VERSION.tar.xz ] || curl -L -O https://github.com/p11-glue/p11-kit/releases/download/$P11KIT_VERSION/p11-kit-$P11KIT_VERSION.tar.xz || exit 1
+ [ -f "p11-kit-$P11KIT_VERSION.tar.xz" ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" "https://github.com/p11-glue/p11-kit/releases/download/$P11KIT_VERSION/p11-kit-$P11KIT_VERSION.tar.xz"
$no_build && echo "Skipping installation" && return
- xzcat p11-kit-$P11KIT_VERSION.tar.xz | tar xf - || exit 1
- cd p11-kit-$P11KIT_VERSION
+ xzcat "p11-kit-$P11KIT_VERSION.tar.xz" | tar xf -
+ cd "p11-kit-$P11KIT_VERSION"
#
# Prior to Catalina, the libffi that's supplied with macOS
# doesn't support ffi_closure_alloc() or ffi_prep_closure_loc(),
@@ -1587,34 +1483,35 @@ install_p11_kit() {
# but it's not clear that this matters to us, so we just
# configure p11-kit not to use libffi.
#
- CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS -L/usr/local/lib" LIBS=-lintl ./configure --without-libffi --without-trust-paths || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LIBS=-lintl \
+ ./configure "${CONFIGURE_OPTS[@]}" --without-libffi --without-trust-paths
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
- touch p11-kit-$P11KIT_VERSION-done
+ touch "p11-kit-$P11KIT_VERSION-done"
fi
}
uninstall_p11_kit() {
- if [ ! -z "$installed_p11_kit_version" ] ; then
+ if [ -n "$installed_p11_kit_version" ] ; then
#
# Nettle depends on this, so uninstall it.
#
uninstall_nettle "$@"
echo "Uninstalling p11-kit:"
- cd p11-kit-$installed_p11_kit_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ cd "p11-kit-$installed_p11_kit_version"
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
- rm p11-kit-$installed_p11_kit_version-done
+ rm "p11-kit-$installed_p11_kit_version-done"
- if [ "$#" -eq 1 -a "$1" = "-r" ] ; then
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
#
# Get rid of the previously downloaded and unpacked version.
#
- rm -rf p11-kit-$installed_p11_kit_version
- rm -rf p11-kit-$installed_p11_kit_version.tar.xz
+ rm -rf "p11-kit-$installed_p11_kit_version"
+ rm -rf "p11-kit-$installed_p11_kit_version.tar.xz"
fi
installed_p11_kit_version=""
@@ -1622,40 +1519,41 @@ uninstall_p11_kit() {
}
install_nettle() {
- if [ "$NETTLE_VERSION" -a ! -f nettle-$NETTLE_VERSION-done ] ; then
+ if [ "$NETTLE_VERSION" ] && [ ! -f "nettle-$NETTLE_VERSION-done" ] ; then
echo "Downloading, building, and installing Nettle:"
- [ -f nettle-$NETTLE_VERSION.tar.gz ] || curl -L -O https://ftp.gnu.org/gnu/nettle/nettle-$NETTLE_VERSION.tar.gz || exit 1
+ [ -f "nettle-$NETTLE_VERSION.tar.gz" ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" "https://ftp.gnu.org/gnu/nettle/nettle-$NETTLE_VERSION.tar.gz"
$no_build && echo "Skipping installation" && return
- gzcat nettle-$NETTLE_VERSION.tar.gz | tar xf - || exit 1
- cd nettle-$NETTLE_VERSION
- CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS -I/usr/local/include" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS -L/usr/local/lib" ./configure || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ gzcat "nettle-$NETTLE_VERSION.tar.gz" | tar xf -
+ cd "nettle-$NETTLE_VERSION"
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}"
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
- touch nettle-$NETTLE_VERSION-done
+ touch "nettle-$NETTLE_VERSION-done"
fi
}
uninstall_nettle() {
- if [ ! -z "$installed_nettle_version" ] ; then
+ if [ -n "$installed_nettle_version" ] ; then
#
# GnuTLS depends on this, so uninstall it.
#
uninstall_gnutls "$@"
echo "Uninstalling Nettle:"
- cd nettle-$installed_nettle_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ cd "nettle-$installed_nettle_version"
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
- rm nettle-$installed_nettle_version-done
+ rm "nettle-$installed_nettle_version-done"
- if [ "$#" -eq 1 -a "$1" = "-r" ] ; then
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
#
# Get rid of the previously downloaded and unpacked version.
#
- rm -rf nettle-$installed_nettle_version
- rm -rf nettle-$installed_nettle_version.tar.gz
+ rm -rf "nettle-$installed_nettle_version"
+ rm -rf "nettle-$installed_nettle_version.tar.gz"
fi
installed_nettle_version=""
@@ -1663,55 +1561,46 @@ uninstall_nettle() {
}
install_gnutls() {
- if [ "$GNUTLS_VERSION" -a ! -f gnutls-$GNUTLS_VERSION-done ] ; then
+ if [ "$GNUTLS_VERSION" ] && [ ! -f "gnutls-$GNUTLS_VERSION-done" ] ; then
#
# GnuTLS requires Nettle.
#
- if [ -z $NETTLE_VERSION ]
+ if [ -z "$NETTLE_VERSION" ]
then
echo "GnuTLS requires Nettle, but you didn't install Nettle" 1>&2
exit 1
fi
echo "Downloading, building, and installing GnuTLS:"
- if [[ $GNUTLS_MAJOR_VERSION -ge 3 ]]
- then
- #
- # Starting with GnuTLS 3.x, the tarballs are compressed with
- # xz rather than bzip2.
- #
- [ -f gnutls-$GNUTLS_VERSION.tar.xz ] || curl -L -O https://www.gnupg.org/ftp/gcrypt/gnutls/v$GNUTLS_MAJOR_VERSION.$GNUTLS_MINOR_VERSION/gnutls-$GNUTLS_VERSION.tar.xz || exit 1
- $no_build && echo "Skipping installation" && return
- xzcat gnutls-$GNUTLS_VERSION.tar.xz | tar xf - || exit 1
- else
- [ -f gnutls-$GNUTLS_VERSION.tar.bz2 ] || curl -L -O https://www.gnupg.org/ftp/gcrypt/gnutls/v$GNUTLS_MAJOR_VERSION.$GNUTLS_MINOR_VERSION/gnutls-$GNUTLS_VERSION.tar.bz2 || exit 1
- $no_build && echo "Skipping installation" && return
- bzcat gnutls-$GNUTLS_VERSION.tar.bz2 | tar xf - || exit 1
- fi
+ [ -f gnutls-$GNUTLS_VERSION.tar.xz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" "https://www.gnupg.org/ftp/gcrypt/gnutls/v$GNUTLS_MAJOR_VERSION.$GNUTLS_MINOR_VERSION/gnutls-$GNUTLS_VERSION.tar.xz"
+ echo "$GNUTLS_SHA256 gnutls-$GNUTLS_VERSION.tar.xz" | shasum --algorithm 256 --check
+ $no_build && echo "Skipping installation" && return
+ tar -xf gnutls-$GNUTLS_VERSION.tar.xz
cd gnutls-$GNUTLS_VERSION
- CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS -I /usr/local/include" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS -I/usr/local/include/" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS -L/usr/local/lib" ./configure --with-included-unistring --disable-guile || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}" --with-included-unistring --disable-guile
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch gnutls-$GNUTLS_VERSION-done
fi
}
uninstall_gnutls() {
- if [ ! -z "$installed_gnutls_version" ] ; then
+ if [ -n "$installed_gnutls_version" ] ; then
echo "Uninstalling GnuTLS:"
- cd gnutls-$installed_gnutls_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ cd "gnutls-$installed_gnutls_version"
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
- rm gnutls-$installed_gnutls_version-done
+ rm "gnutls-$installed_gnutls_version-done" s
- if [ "$#" -eq 1 -a "$1" = "-r" ] ; then
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
#
# Get rid of the previously downloaded and unpacked version.
#
- rm -rf gnutls-$installed_gnutls_version
- rm -rf gnutls-$installed_gnutls_version.tar.bz2
+ rm -rf "gnutls-$installed_gnutls_version"
+ rm -rf "gnutls-$installed_gnutls_version.tar.bz2"
fi
installed_gnutls_version=""
@@ -1721,31 +1610,31 @@ uninstall_gnutls() {
install_lua() {
if [ "$LUA_VERSION" -a ! -f lua-$LUA_VERSION-done ] ; then
echo "Downloading, building, and installing Lua:"
- [ -f lua-$LUA_VERSION.tar.gz ] || curl -L -O https://www.lua.org/ftp/lua-$LUA_VERSION.tar.gz || exit 1
+ [ -f lua-$LUA_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://www.lua.org/ftp/lua-$LUA_VERSION.tar.gz
$no_build && echo "Skipping installation" && return
- gzcat lua-$LUA_VERSION.tar.gz | tar xf - || exit 1
+ gzcat lua-$LUA_VERSION.tar.gz | tar xf -
cd lua-$LUA_VERSION
- make MYCFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" MYLDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" $MAKE_BUILD_OPTS macosx || exit 1
- $DO_MAKE_INSTALL || exit 1
+ make INSTALL_TOP="$installation_prefix" MYCFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" MYLDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" macosx
+ $DO_MAKE INSTALL_TOP="$installation_prefix" install
cd ..
touch lua-$LUA_VERSION-done
fi
}
uninstall_lua() {
- if [ ! -z "$installed_lua_version" ] ; then
+ if [ -n "$installed_lua_version" ] ; then
echo "Uninstalling Lua:"
#
# Lua has no "make uninstall", so just remove stuff manually.
# There's no configure script, so there's no need for
# "make distclean", either; just do "make clean".
#
- (cd /usr/local/bin; $DO_RM -f lua luac)
- (cd /usr/local/include; $DO_RM -f lua.h luaconf.h lualib.h lauxlib.h lua.hpp)
- (cd /usr/local/lib; $DO_RM -f liblua.a)
- (cd /usr/local/man/man1; $DO_RM -f lua.1 luac.1)
+ (cd "$installation_prefix/bin"; $DO_RM -f lua luac)
+ (cd "$installation_prefix/include"; $DO_RM -f lua.h luaconf.h lualib.h lauxlib.h lua.hpp)
+ (cd "$installation_prefix/lib"; $DO_RM -f liblua.a)
+ (cd "$installation_prefix/man/man1"; $DO_RM -f lua.1 luac.1)
cd lua-$installed_lua_version
- make clean || exit 1
+ make clean
cd ..
rm lua-$installed_lua_version-done
@@ -1764,13 +1653,13 @@ uninstall_lua() {
install_snappy() {
if [ "$SNAPPY_VERSION" -a ! -f snappy-$SNAPPY_VERSION-done ] ; then
echo "Downloading, building, and installing snappy:"
- [ -f snappy-$SNAPPY_VERSION.tar.gz ] || curl -L -o snappy-$SNAPPY_VERSION.tar.gz https://github.com/google/snappy/archive/$SNAPPY_VERSION.tar.gz || exit 1
+ [ -f snappy-$SNAPPY_VERSION.tar.gz ] || curl "${CURL_LOCAL_NAME_OPTS[@]}" snappy-$SNAPPY_VERSION.tar.gz https://github.com/google/snappy/archive/$SNAPPY_VERSION.tar.gz
$no_build && echo "Skipping installation" && return
- gzcat snappy-$SNAPPY_VERSION.tar.gz | tar xf - || exit 1
+ gzcat snappy-$SNAPPY_VERSION.tar.gz | tar xf -
cd snappy-$SNAPPY_VERSION
if [ "$SNAPPY_VERSION" = "1.1.10" ] ; then
# This patch corresponds to https://github.com/google/snappy/commit/27f34a580be4a3becf5f8c0cba13433f53c21337
- patch -p0 <${topdir}/macosx-support-lib-patches/snappy-signed.patch || exit 1
+ patch -p0 < "${topdir}/tools/macos-setup-patches/snappy-signed.patch"
fi
mkdir build_dir
cd build_dir
@@ -1781,44 +1670,44 @@ install_snappy() {
# will carry that dependency with it, so linking with it should
# Just Work.
#
- MACOSX_DEPLOYMENT_TARGET=$min_osx_target SDKROOT="$SDKPATH" $DO_CMAKE -DBUILD_SHARED_LIBS=YES -DSNAPPY_BUILD_BENCHMARKS=NO -DSNAPPY_BUILD_TESTS=NO ../ || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ "${DO_CMAKE[@]}" -DBUILD_SHARED_LIBS=YES -DSNAPPY_BUILD_BENCHMARKS=NO -DSNAPPY_BUILD_TESTS=NO ..
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ../..
touch snappy-$SNAPPY_VERSION-done
fi
}
uninstall_snappy() {
- if [ ! -z "$installed_snappy_version" ] ; then
+ if [ -n "$installed_snappy_version" ] ; then
echo "Uninstalling snappy:"
cd snappy-$installed_snappy_version
#
# snappy uses cmake and doesn't support "make uninstall";
# just remove what we know it installs.
#
- # $DO_MAKE_UNINSTALL || exit 1
+ # $DO_MAKE_UNINSTALL
if [ -s build_dir/install_manifest.txt ] ; then
while read -r ; do $DO_RM -v "$REPLY" ; done < <(cat build_dir/install_manifest.txt; echo)
else
- $DO_RM -f /usr/local/lib/libsnappy.1.1.8.dylib \
- /usr/local/lib/libsnappy.1.dylib \
- /usr/local/lib/libsnappy.dylib \
- /usr/local/include/snappy-c.h \
- /usr/local/include/snappy-sinksource.h \
- /usr/local/include/snappy-stubs-public.h \
- /usr/local/include/snappy.h \
- /usr/local/lib/cmake/Snappy/SnappyConfig.cmake \
- /usr/local/lib/cmake/Snappy/SnappyConfigVersion.cmake \
- /usr/local/lib/cmake/Snappy/SnappyTargets-noconfig.cmake \
- /usr/local/lib/cmake/Snappy/SnappyTargets.cmake || exit 1
+ $DO_RM -f "$installation_prefix/lib/libsnappy.1.1.8.dylib" \
+ "$installation_prefix/lib/libsnappy.1.dylib" \
+ "$installation_prefix/lib/libsnappy.dylib" \
+ "$installation_prefix/include/snappy-c.h" \
+ "$installation_prefix/include/snappy-sinksource.h" \
+ "$installation_prefix/include/snappy-stubs-public.h" \
+ "$installation_prefix/include/snappy.h" \
+ "$installation_prefix/lib/cmake/Snappy/SnappyConfig.cmake" \
+ "$installation_prefix/lib/cmake/Snappy/SnappyConfigVersion.cmake" \
+ "$installation_prefix/lib/cmake/Snappy/SnappyTargets-noconfig.cmake" \
+ "$installation_prefix/lib/cmake/Snappy/SnappyTargets.cmake"
fi
#
# snappy uses cmake and doesn't support "make distclean";
#.just remove the entire build directory.
#
- # make distclean || exit 1
- rm -rf build_dir || exit 1
+ # make distclean
+ rm -rf build_dir
cd ..
rm snappy-$installed_snappy_version-done
@@ -1835,75 +1724,119 @@ uninstall_snappy() {
}
install_zstd() {
- if [ "$ZSTD_VERSION" -a ! -f zstd-$ZSTD_VERSION-done ] ; then
+ if [ "$ZSTD_VERSION" ] && [ ! -f zstd-$ZSTD_VERSION-done ] ; then
echo "Downloading, building, and installing zstd:"
- [ -f zstd-$ZSTD_VERSION.tar.gz ] || curl -L -O https://github.com/facebook/zstd/releases/download/v$ZSTD_VERSION/zstd-$ZSTD_VERSION.tar.gz || exit 1
+ [ -f zstd-$ZSTD_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://github.com/facebook/zstd/releases/download/v$ZSTD_VERSION/zstd-$ZSTD_VERSION.tar.gz
$no_build && echo "Skipping installation" && return
- gzcat zstd-$ZSTD_VERSION.tar.gz | tar xf - || exit 1
+ gzcat zstd-$ZSTD_VERSION.tar.gz | tar xf -
cd zstd-$ZSTD_VERSION
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ # We shouldn't have to specify DESTDIR.
+ # https://github.com/facebook/zstd/issues/3146
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ make PREFIX="$installation_prefix" DESTDIR="$installation_prefix" "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE PREFIX="$installation_prefix" install
cd ..
touch zstd-$ZSTD_VERSION-done
fi
}
uninstall_zstd() {
- if [ ! -z "$installed_zstd_version" ] ; then
+ if [ -n "$installed_zstd_version" ] ; then
echo "Uninstalling zstd:"
- cd zstd-$installed_zstd_version
- $DO_MAKE_UNINSTALL || exit 1
+ cd "zstd-$installed_zstd_version"
+ $DO_MAKE_UNINSTALL
#
# zstd has no configure script, so there's no need for
# "make distclean", and the Makefile supplied with it
# has no "make distclean" rule; just do "make clean".
#
- make clean || exit 1
+ make clean
cd ..
- rm zstd-$installed_zstd_version-done
+ rm "zstd-$installed_zstd_version-done"
- if [ "$#" -eq 1 -a "$1" = "-r" ] ; then
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
#
# Get rid of the previously downloaded and unpacked version.
#
- rm -rf zstd-$installed_zstd_version
- rm -rf zstd-$installed_zstd_version.tar.gz
+ rm -rf "zstd-$installed_zstd_version"
+ rm -rf "zstd-$installed_zstd_version.tar.gz"
fi
installed_zstd_version=""
fi
}
+#$ZLIBNG_VERSION
+install_zlibng() {
+ if [ "$ZLIBNG_VERSION" ] && [ ! -f zlib-ng-$ZLIBNG_VERSION-done ] ; then
+ echo "Downloading, building, and installing zlib-ng:"
+ [ -f $ZLIBNG_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://github.com/zlib-ng/zlib-ng/archive/refs/tags/$ZLIBNG_VERSION.tar.gz
+ $no_build && echo "Skipping installation" && return
+ gzcat $ZLIBNG_VERSION.tar.gz | tar xf -
+ cd zlib-ng-$ZLIBNG_VERSION
+ mkdir build
+ cd build
+ "${DO_CMAKE[@]}" ..
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
+ cd ../..
+ touch zlib-ng-$ZLIBNG_VERSION-done
+ fi
+}
+
+uninstall_zlibng() {
+ if [ -n "$installed_zstd_version" ] ; then
+ echo "Uninstalling zlibng:"
+ cd "zlib-ng-$installed_zlibng_version"
+ $DO_MAKE_UNINSTALL
+ #
+ # XXX not sure what to do here...
+ #
+ make clean
+ cd ..
+ rm "zlib-ng-$installed_zlibng_version-done"
+
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
+ #
+ # Get rid of the previously downloaded and unpacked version.
+ #
+ rm -rf "zlib-ng-$installed_zlibng_version"
+ rm -rf "zlib-ng-$installed_zlibng_version.tar.gz"
+ fi
+ installed_zlibng_version=""
+ fi
+}
install_libxml2() {
if [ "$LIBXML2_VERSION" -a ! -f libxml2-$LIBXML2_VERSION-done ] ; then
echo "Downloading, building, and installing libxml2:"
- LIBXML2_MAJOR_VERSION="`expr $LIBXML2_VERSION : '\([0-9][0-9]*\).*'`"
- LIBXML2_MINOR_VERSION="`expr $LIBXML2_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`"
+ LIBXML2_MAJOR_VERSION="$( expr "$LIBXML2_VERSION" : '\([0-9][0-9]*\).*' )"
+ LIBXML2_MINOR_VERSION="$( expr "$LIBXML2_VERSION" : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )"
LIBXML2_MAJOR_MINOR_VERSION=$LIBXML2_MAJOR_VERSION.$LIBXML2_MINOR_VERSION
- [ -f libxml2-$LIBXML2_VERSION.tar.gz ] || curl -L -O https://download.gnome.org/sources/libxml2/$LIBXML2_MAJOR_MINOR_VERSION/libxml2-$LIBXML2_VERSION.tar.xz || exit 1
+ [ -f libxml2-$LIBXML2_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://download.gnome.org/sources/libxml2/$LIBXML2_MAJOR_MINOR_VERSION/libxml2-$LIBXML2_VERSION.tar.xz
$no_build && echo "Skipping installation" && return
- xzcat libxml2-$LIBXML2_VERSION.tar.xz | tar xf - || exit 1
- cd libxml2-$LIBXML2_VERSION
+ xzcat libxml2-$LIBXML2_VERSION.tar.xz | tar xf -
+ cd "libxml2-$LIBXML2_VERSION"
#
# At least on macOS 12.0.1 with Xcode 13.1, when we build
# libxml2, the linker complains that we don't have the right
# to link with the Python framework, so don't build with
# Python.
#
- CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure --without-python || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}" --without-python
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch libxml2-$LIBXML2_VERSION-done
fi
}
uninstall_libxml2() {
- if [ ! -z "$installed_libxml2_version" ] ; then
+ if [ -n "$installed_libxml2_version" ] ; then
echo "Uninstalling libxml2:"
cd libxml2-$installed_libxml2_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm libxml2-$installed_libxml2_version-done
@@ -1920,7 +1853,7 @@ uninstall_libxml2() {
}
install_lz4() {
- if [ "$LZ4_VERSION" -a ! -f lz4-$LZ4_VERSION-done ] ; then
+ if [ "$LZ4_VERSION" ] && [ ! -f lz4-$LZ4_VERSION-done ] ; then
echo "Downloading, building, and installing lz4:"
#
# lz4 switched from sequentially numbered releases, named rN,
@@ -1945,12 +1878,12 @@ install_lz4() {
#
if [[ "$LZ4_VERSION" == r* ]]
then
- [ -f lz4-$LZ4_VERSION.tar.gz ] || curl -L -o lz4-$LZ4_VERSION.tar.gz https://github.com/lz4/lz4/archive/$LZ4_VERSION.tar.gz || exit 1
+ [ -f lz4-$LZ4_VERSION.tar.gz ] || curl "${CURL_LOCAL_NAME_OPTS[@]}" lz4-$LZ4_VERSION.tar.gz https://github.com/lz4/lz4/archive/$LZ4_VERSION.tar.gz
else
- [ -f lz4-$LZ4_VERSION.tar.gz ] || curl -L -o lz4-$LZ4_VERSION.tar.gz https://github.com/lz4/lz4/archive/v$LZ4_VERSION.tar.gz || exit 1
+ [ -f lz4-$LZ4_VERSION.tar.gz ] || curl "${CURL_LOCAL_NAME_OPTS[@]}" lz4-$LZ4_VERSION.tar.gz https://github.com/lz4/lz4/archive/v$LZ4_VERSION.tar.gz
fi
$no_build && echo "Skipping installation" && return
- gzcat lz4-$LZ4_VERSION.tar.gz | tar xf - || exit 1
+ gzcat lz4-$LZ4_VERSION.tar.gz | tar xf -
cd lz4-$LZ4_VERSION
#
# No configure script here, but it appears that if MOREFLAGS is
@@ -1958,29 +1891,30 @@ install_lz4() {
# and CXXFLAGS into FLAGS, which is used when building source
# files and libraries.
#
- MOREFLAGS="-D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ MOREFLAGS="-D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" \
+ make PREFIX="$installation_prefix" "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE PREFIX="$installation_prefix" install
cd ..
touch lz4-$LZ4_VERSION-done
fi
}
uninstall_lz4() {
- if [ ! -z "$installed_lz4_version" ] ; then
+ if [ -n "$installed_lz4_version" ] ; then
echo "Uninstalling lz4:"
- cd lz4-$installed_lz4_version
- $DO_MAKE_UNINSTALL || exit 1
+ cd "lz4-$installed_lz4_version"
+ $DO_MAKE_UNINSTALL
#
# lz4's Makefile doesn't support "make distclean"; just do
# "make clean". Perhaps not using autotools means that
# there's no need for "make distclean".
#
- # make distclean || exit 1
- make clean || exit 1
+ # make distclean
+ make clean
cd ..
- rm lz4-$installed_lz4_version-done
+ rm "lz4-$installed_lz4_version-done"
- if [ "$#" -eq 1 -a "$1" = "-r" ] ; then
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
#
# Get rid of the previously downloaded and unpacked version.
#
@@ -1990,8 +1924,8 @@ uninstall_lz4() {
# tree. Therefore, we have to remove the build tree
# as root.
#
- sudo rm -rf lz4-$installed_lz4_version
- rm -rf lz4-$installed_lz4_version.tar.gz
+ sudo rm -rf "lz4-$installed_lz4_version"
+ rm -rf "lz4-$installed_lz4_version.tar.gz"
fi
installed_lz4_version=""
@@ -2001,28 +1935,30 @@ uninstall_lz4() {
install_sbc() {
if [ "$SBC_VERSION" -a ! -f sbc-$SBC_VERSION-done ] ; then
echo "Downloading, building, and installing sbc:"
- [ -f sbc-$SBC_VERSION.tar.gz ] || curl -L -O https://www.kernel.org/pub/linux/bluetooth/sbc-$SBC_VERSION.tar.gz || exit 1
+ [ -f sbc-$SBC_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://www.kernel.org/pub/linux/bluetooth/sbc-$SBC_VERSION.tar.gz
$no_build && echo "Skipping installation" && return
- gzcat sbc-$SBC_VERSION.tar.gz | tar xf - || exit 1
+ gzcat sbc-$SBC_VERSION.tar.gz | tar xf -
cd sbc-$SBC_VERSION
- if [ "$DARWIN_PROCESSOR_ARCH" = "arm" ] ; then
- CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS -U__ARM_NEON__" CXXFLAGS="$CXXFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure --disable-tools --disable-tester --disable-shared || exit 1
+ if [ "$DARWIN_PROCESSOR_ARCH" = "arm64" ] ; then
+ CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS -U__ARM_NEON__" CXXFLAGS="$CXXFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}" --disable-tools --disable-tester --disable-shared
else
- CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure --disable-tools --disable-tester --disable-shared || exit 1
+ CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}" --disable-tools --disable-tester --disable-shared
fi
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch sbc-$SBC_VERSION-done
fi
}
uninstall_sbc() {
- if [ ! -z "$installed_sbc_version" ] ; then
+ if [ -n "$installed_sbc_version" ] ; then
echo "Uninstalling sbc:"
cd sbc-$installed_sbc_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm sbc-$installed_sbc_version-done
@@ -2041,24 +1977,25 @@ uninstall_sbc() {
install_maxminddb() {
if [ "$MAXMINDDB_VERSION" -a ! -f maxminddb-$MAXMINDDB_VERSION-done ] ; then
echo "Downloading, building, and installing MaxMindDB API:"
- [ -f libmaxminddb-$MAXMINDDB_VERSION.tar.gz ] || curl -L -O https://github.com/maxmind/libmaxminddb/releases/download/$MAXMINDDB_VERSION/libmaxminddb-$MAXMINDDB_VERSION.tar.gz || exit 1
+ [ -f libmaxminddb-$MAXMINDDB_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://github.com/maxmind/libmaxminddb/releases/download/$MAXMINDDB_VERSION/libmaxminddb-$MAXMINDDB_VERSION.tar.gz
$no_build && echo "Skipping installation" && return
- gzcat libmaxminddb-$MAXMINDDB_VERSION.tar.gz | tar xf - || exit 1
+ gzcat libmaxminddb-$MAXMINDDB_VERSION.tar.gz | tar xf -
cd libmaxminddb-$MAXMINDDB_VERSION
- CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}"
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch maxminddb-$MAXMINDDB_VERSION-done
fi
}
uninstall_maxminddb() {
- if [ ! -z "$installed_maxminddb_version" ] ; then
+ if [ -n "$installed_maxminddb_version" ] ; then
echo "Uninstalling MaxMindDB API:"
cd libmaxminddb-$installed_maxminddb_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm maxminddb-$installed_maxminddb_version-done
@@ -2077,24 +2014,26 @@ uninstall_maxminddb() {
install_c_ares() {
if [ "$CARES_VERSION" -a ! -f c-ares-$CARES_VERSION-done ] ; then
echo "Downloading, building, and installing C-Ares API:"
- [ -f c-ares-$CARES_VERSION.tar.gz ] || curl -L -O https://c-ares.org/download/c-ares-$CARES_VERSION.tar.gz || exit 1
+ # https://github.com/c-ares/c-ares/releases/download/v1.31.0/c-ares-1.31.0.tar.gz
+ [ -f c-ares-$CARES_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://github.com/c-ares/c-ares/releases/download/v$CARES_VERSION/c-ares-$CARES_VERSION.tar.gz
$no_build && echo "Skipping installation" && return
- gzcat c-ares-$CARES_VERSION.tar.gz | tar xf - || exit 1
+ gzcat c-ares-$CARES_VERSION.tar.gz | tar xf -
cd c-ares-$CARES_VERSION
- CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}"
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch c-ares-$CARES_VERSION-done
fi
}
uninstall_c_ares() {
- if [ ! -z "$installed_cares_version" ] ; then
+ if [ -n "$installed_cares_version" ] ; then
echo "Uninstalling C-Ares API:"
cd c-ares-$installed_cares_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm c-ares-$installed_cares_version-done
@@ -2113,42 +2052,42 @@ uninstall_c_ares() {
install_libssh() {
if [ "$LIBSSH_VERSION" -a ! -f libssh-$LIBSSH_VERSION-done ] ; then
echo "Downloading, building, and installing libssh:"
- LIBSSH_MAJOR_VERSION="`expr $LIBSSH_VERSION : '\([0-9][0-9]*\).*'`"
- LIBSSH_MINOR_VERSION="`expr $LIBSSH_VERSION : '[0-9][0-9]*\.\([0-9][0-9]*\).*'`"
+ LIBSSH_MAJOR_VERSION="$( expr "$LIBSSH_VERSION" : '\([0-9][0-9]*\).*' )"
+ LIBSSH_MINOR_VERSION="$( expr "$LIBSSH_VERSION" : '[0-9][0-9]*\.\([0-9][0-9]*\).*' )"
LIBSSH_MAJOR_MINOR_VERSION=$LIBSSH_MAJOR_VERSION.$LIBSSH_MINOR_VERSION
- [ -f libssh-$LIBSSH_VERSION.tar.xz ] || curl -L -O https://www.libssh.org/files/$LIBSSH_MAJOR_MINOR_VERSION/libssh-$LIBSSH_VERSION.tar.xz
+ [ -f libssh-$LIBSSH_VERSION.tar.xz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://www.libssh.org/files/$LIBSSH_MAJOR_MINOR_VERSION/libssh-$LIBSSH_VERSION.tar.xz
$no_build && echo "Skipping installation" && return
- xzcat libssh-$LIBSSH_VERSION.tar.xz | tar xf - || exit 1
- cd libssh-$LIBSSH_VERSION
+ xzcat libssh-$LIBSSH_VERSION.tar.xz | tar xf -
+ cd "libssh-$LIBSSH_VERSION"
mkdir build
cd build
- MACOSX_DEPLOYMENT_TARGET=$min_osx_target SDKROOT="$SDKPATH" $DO_CMAKE -DWITH_GCRYPT=1 ../ || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ "${DO_CMAKE[@]}" -DWITH_GCRYPT=1 ..
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ../..
touch libssh-$LIBSSH_VERSION-done
fi
}
uninstall_libssh() {
- if [ ! -z "$installed_libssh_version" ] ; then
+ if [ -n "$installed_libssh_version" ] ; then
echo "Uninstalling libssh:"
cd libssh-$installed_libssh_version
#
# libssh uses cmake and doesn't support "make uninstall";
# just remove what we know it installs.
#
- # $DO_MAKE_UNINSTALL || exit 1
- $DO_RM -rf /usr/local/lib/libssh* \
- /usr/local/include/libssh \
- /usr/local/lib/pkgconfig/libssh* \
- /usr/local/lib/cmake/libssh || exit 1
+ # $DO_MAKE_UNINSTALL
+ $DO_RM -rf "$installation_prefix"/lib/libssh* \
+ "$installation_prefix"/include/libssh \
+ "$installation_prefix"/lib/pkgconfig/libssh* \
+ "$installation_prefix"/lib/cmake/libssh
#
# libssh uses cmake and doesn't support "make distclean";
# just remove the entire build directory.
#
- # make distclean || exit 1
- rm -rf build || exit 1
+ # make distclean
+ rm -rf build
cd ..
rm libssh-$installed_libssh_version-done
@@ -2167,24 +2106,25 @@ uninstall_libssh() {
install_nghttp2() {
if [ "$NGHTTP2_VERSION" -a ! -f nghttp2-$NGHTTP2_VERSION-done ] ; then
echo "Downloading, building, and installing nghttp2:"
- [ -f nghttp2-$NGHTTP2_VERSION.tar.xz ] || curl -L -O https://github.com/nghttp2/nghttp2/releases/download/v$NGHTTP2_VERSION/nghttp2-$NGHTTP2_VERSION.tar.xz || exit 1
+ [ -f nghttp2-$NGHTTP2_VERSION.tar.xz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://github.com/nghttp2/nghttp2/releases/download/v$NGHTTP2_VERSION/nghttp2-$NGHTTP2_VERSION.tar.xz
$no_build && echo "Skipping installation" && return
- xzcat nghttp2-$NGHTTP2_VERSION.tar.xz | tar xf - || exit 1
+ xzcat nghttp2-$NGHTTP2_VERSION.tar.xz | tar xf -
cd nghttp2-$NGHTTP2_VERSION
- CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure --enable-lib-only || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}" --enable-lib-only
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch nghttp2-$NGHTTP2_VERSION-done
fi
}
uninstall_nghttp2() {
- if [ ! -z "$installed_nghttp2_version" ] ; then
+ if [ -n "$installed_nghttp2_version" ] ; then
echo "Uninstalling nghttp2:"
cd nghttp2-$installed_nghttp2_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm nghttp2-$installed_nghttp2_version-done
@@ -2203,24 +2143,25 @@ uninstall_nghttp2() {
install_nghttp3() {
if [ "$NGHTTP3_VERSION" -a ! -f nghttp3-$NGHTTP3_VERSION-done ] ; then
echo "Downloading, building, and installing nghttp3:"
- [ -f nghttp3-$NGHTTP3_VERSION.tar.xz ] || curl -L -O https://github.com/ngtcp2/nghttp3/releases/download/v$NGHTTP3_VERSION/nghttp3-$NGHTTP3_VERSION.tar.xz || exit 1
+ [ -f nghttp3-$NGHTTP3_VERSION.tar.xz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://github.com/ngtcp2/nghttp3/releases/download/v$NGHTTP3_VERSION/nghttp3-$NGHTTP3_VERSION.tar.xz
$no_build && echo "Skipping installation" && return
- xzcat nghttp3-$NGHTTP3_VERSION.tar.xz | tar xf - || exit 1
+ xzcat nghttp3-$NGHTTP3_VERSION.tar.xz | tar xf -
cd nghttp3-$NGHTTP3_VERSION
- CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure --enable-lib-only || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}" --enable-lib-only
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch nghttp3-$NGHTTP3_VERSION-done
fi
}
uninstall_nghttp3() {
- if [ ! -z "$installed_nghttp3_version" ] ; then
+ if [ -n "$installed_nghttp3_version" ] ; then
echo "Uninstalling nghttp3:"
cd nghttp3-$installed_nghttp3_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm nghttp3-$installed_nghttp3_version-done
@@ -2240,26 +2181,27 @@ install_libtiff() {
if [ "$LIBTIFF_VERSION" -a ! -f tiff-$LIBTIFF_VERSION-done ] ; then
echo "Downloading, building, and installing libtiff:"
[ -f tiff-$LIBTIFF_VERSION.tar.gz ] ||
- curl --fail -L -O https://download.osgeo.org/libtiff/tiff-$LIBTIFF_VERSION.tar.gz ||
- curl --fail -L -O https://download.osgeo.org/libtiff/old/tiff-$LIBTIFF_VERSION.tar.gz ||
+ curl "${CURL_REMOTE_NAME_OPTS[@]}" https://download.osgeo.org/libtiff/tiff-$LIBTIFF_VERSION.tar.gz ||
+ curl "${CURL_REMOTE_NAME_OPTS[@]}" https://download.osgeo.org/libtiff/old/tiff-$LIBTIFF_VERSION.tar.gz ||
exit 1
$no_build && echo "Skipping installation" && return
- gzcat tiff-$LIBTIFF_VERSION.tar.gz | tar xf - || exit 1
+ gzcat tiff-$LIBTIFF_VERSION.tar.gz | tar xf -
cd tiff-$LIBTIFF_VERSION
- CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}"
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch tiff-$LIBTIFF_VERSION-done
fi
}
uninstall_libtiff() {
- if [ ! -z "$installed_libtiff_version" ] ; then
+ if [ -n "$installed_libtiff_version" ] ; then
echo "Uninstalling libtiff:"
cd tiff-$installed_libtiff_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm tiff-$installed_libtiff_version-done
@@ -2278,30 +2220,31 @@ uninstall_libtiff() {
install_spandsp() {
if [ "$SPANDSP_VERSION" -a ! -f spandsp-$SPANDSP_VERSION-done ] ; then
echo "Downloading, building, and installing SpanDSP:"
- [ -f spandsp-$SPANDSP_VERSION.tar.gz ] || curl -L -O https://www.soft-switch.org/downloads/spandsp/spandsp-$SPANDSP_VERSION.tar.gz || exit 1
+ [ -f spandsp-$SPANDSP_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://www.soft-switch.org/downloads/spandsp/spandsp-$SPANDSP_VERSION.tar.gz
$no_build && echo "Skipping installation" && return
- gzcat spandsp-$SPANDSP_VERSION.tar.gz | tar xf - || exit 1
+ gzcat spandsp-$SPANDSP_VERSION.tar.gz | tar xf -
cd spandsp-$SPANDSP_VERSION
#
# Don't use -Wunused-but-set-variable, as it's not supported
# by all the gcc versions in the versions of Xcode that we
# support.
#
- patch -p0 <${topdir}/macosx-support-lib-patches/spandsp-configure-patch || exit 1
- CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ patch -p0 < "${topdir}/tools/macos-setup-patches/spandsp-configure-patch"
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}"
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch spandsp-$SPANDSP_VERSION-done
fi
}
uninstall_spandsp() {
- if [ ! -z "$installed_spandsp_version" ] ; then
+ if [ -n "$installed_spandsp_version" ] ; then
echo "Uninstalling SpanDSP:"
cd spandsp-$installed_spandsp_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm spandsp-$installed_spandsp_version-done
@@ -2320,24 +2263,25 @@ uninstall_spandsp() {
install_speexdsp() {
if [ "$SPEEXDSP_VERSION" -a ! -f speexdsp-$SPEEXDSP_VERSION-done ] ; then
echo "Downloading, building, and installing SpeexDSP:"
- [ -f speexdsp-$SPEEXDSP_VERSION.tar.gz ] || curl -L -O https://ftp.osuosl.org/pub/xiph/releases/speex/speexdsp-$SPEEXDSP_VERSION.tar.gz || exit 1
+ [ -f speexdsp-$SPEEXDSP_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://ftp.osuosl.org/pub/xiph/releases/speex/speexdsp-$SPEEXDSP_VERSION.tar.gz
$no_build && echo "Skipping installation" && return
- gzcat speexdsp-$SPEEXDSP_VERSION.tar.gz | tar xf - || exit 1
+ gzcat speexdsp-$SPEEXDSP_VERSION.tar.gz | tar xf -
cd speexdsp-$SPEEXDSP_VERSION
- CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}"
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch speexdsp-$SPEEXDSP_VERSION-done
fi
}
uninstall_speexdsp() {
- if [ ! -z "$installed_speexdsp_version" ] ; then
+ if [ -n "$installed_speexdsp_version" ] ; then
echo "Uninstalling SpeexDSP:"
cd speexdsp-$installed_speexdsp_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm speexdsp-$installed_speexdsp_version-done
@@ -2356,39 +2300,39 @@ uninstall_speexdsp() {
install_bcg729() {
if [ "$BCG729_VERSION" -a ! -f bcg729-$BCG729_VERSION-done ] ; then
echo "Downloading, building, and installing bcg729:"
- [ -f bcg729-$BCG729_VERSION.tar.gz ] || curl -L -O https://gitlab.linphone.org/BC/public/bcg729/-/archive/$BCG729_VERSION/bcg729-$BCG729_VERSION.tar.gz || exit 1
+ [ -f bcg729-$BCG729_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://gitlab.linphone.org/BC/public/bcg729/-/archive/$BCG729_VERSION/bcg729-$BCG729_VERSION.tar.gz
$no_build && echo "Skipping installation" && return
- gzcat bcg729-$BCG729_VERSION.tar.gz | tar xf - || exit 1
+ gzcat bcg729-$BCG729_VERSION.tar.gz | tar xf -
cd bcg729-$BCG729_VERSION
mkdir build_dir
cd build_dir
- MACOSX_DEPLOYMENT_TARGET=$min_osx_target SDKROOT="$SDKPATH" $DO_CMAKE ../ || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ "${DO_CMAKE[@]}" ..
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ../..
touch bcg729-$BCG729_VERSION-done
fi
}
uninstall_bcg729() {
- if [ ! -z "$installed_bcg729_version" ] ; then
+ if [ -n "$installed_bcg729_version" ] ; then
echo "Uninstalling bcg729:"
cd bcg729-$installed_bcg729_version
#
# bcg729 uses cmake on macOS and doesn't support "make uninstall";
# just remove what we know it installs.
#
- # $DO_MAKE_UNINSTALL || exit 1
- $DO_RM -rf /usr/local/share/Bcg729 \
- /usr/local/lib/libbcg729* \
- /usr/local/include/bcg729 \
- /usr/local/lib/pkgconfig/libbcg729* || exit 1
+ # $DO_MAKE_UNINSTALL
+ $DO_RM -rf "$installation_prefix"/share/Bcg729 \
+ "$installation_prefix"/lib/libbcg729* \
+ "$installation_prefix"/include/bcg729 \
+ "$installation_prefix"/lib/pkgconfig/libbcg729*
#
# bcg729 uses cmake on macOS and doesn't support "make distclean";
# just remove the enire build directory.
#
- # make distclean || exit 1
- rm -rf build_dir || exit 1
+ # make distclean
+ rm -rf build_dir
cd ..
rm bcg729-$installed_bcg729_version-done
@@ -2407,13 +2351,14 @@ uninstall_bcg729() {
install_ilbc() {
if [ -n "$ILBC_VERSION" ] && [ ! -f ilbc-$ILBC_VERSION-done ] ; then
echo "Downloading, building, and installing iLBC:"
- [ -f libilbc-$ILBC_VERSION.tar.bz ] || curl --location --remote-name https://github.com/TimothyGu/libilbc/releases/download/v$ILBC_VERSION/libilbc-$ILBC_VERSION.tar.bz2 || exit 1
+ [ -f libilbc-$ILBC_VERSION.tar.bz ] || curl --location --remote-name https://github.com/TimothyGu/libilbc/releases/download/v$ILBC_VERSION/libilbc-$ILBC_VERSION.tar.bz2
$no_build && echo "Skipping installation" && return
- bzcat libilbc-$ILBC_VERSION.tar.bz2 | tar xf - || exit 1
- cd libilbc-$ILBC_VERSION || exit 1
- CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ bzcat libilbc-$ILBC_VERSION.tar.bz2 | tar xf -
+ cd libilbc-$ILBC_VERSION
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}"
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch ilbc-$ILBC_VERSION-done
fi
@@ -2422,9 +2367,9 @@ install_ilbc() {
uninstall_ilbc() {
if [ -n "$installed_ilbc_version" ] ; then
echo "Uninstalling iLBC:"
- cd "libilbc-$installed_ilbc_version" || exit 1
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ cd "libilbc-$installed_ilbc_version"
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm "ilbc-$installed_ilbc_version-done"
@@ -2440,27 +2385,66 @@ uninstall_ilbc() {
fi
}
+install_opencore_amr() {
+ if [ "$OPENCORE_AMR_VERSION" ] && [ ! -f opencore-amr-$OPENCORE_AMR_VERSION-done ] ; then
+ echo "Downloading, building, and installing opencore-amr:"
+ [ -f opencore-amr-$OPENCORE_AMR_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://downloads.sourceforge.net/project/opencore-amr/opencore-amr/opencore-amr-$OPENCORE_AMR_VERSION.tar.gz
+ echo "$OPENCORE_AMR_SHA256 opencore-amr-$OPENCORE_AMR_VERSION.tar.gz" | shasum --algorithm 256 --check
+ $no_build && echo "Skipping installation" && return
+ tar -xf opencore-amr-$OPENCORE_AMR_VERSION.tar.gz
+ cd opencore-amr-$OPENCORE_AMR_VERSION
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}"
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
+ cd ..
+ touch opencore-amr-$OPENCORE_AMR_VERSION-done
+ fi
+}
+
+uninstall_opencore_amr() {
+ if [ -n "$installed_opencore_amr_version" ] ; then
+ echo "Uninstalling opencore-amr:"
+ cd "opencore-amr-$installed_opencore_amr_version"
+ $DO_MAKE_UNINSTALL
+ make distclean
+ cd ..
+ rm "opencore-amr-$installed_opencore_amr_version-done"
+
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
+ #
+ # Get rid of the previously downloaded and unpacked version.
+ #
+ rm -rf "opencore-amr-$installed_opencore_amr_version"
+ rm -rf "opencore-amr-$installed_opencore_amr_version.tar.gz"
+ fi
+
+ installed_opencore_amr_version=""
+ fi
+}
+
install_opus() {
if [ "$OPUS_VERSION" -a ! -f opus-$OPUS_VERSION-done ] ; then
echo "Downloading, building, and installing opus:"
- [ -f opus-$OPUS_VERSION.tar.gz ] || curl -L -O https://downloads.xiph.org/releases/opus/opus-$OPUS_VERSION.tar.gz || exit 1
+ [ -f opus-$OPUS_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://downloads.xiph.org/releases/opus/opus-$OPUS_VERSION.tar.gz
$no_build && echo "Skipping installation" && return
- gzcat opus-$OPUS_VERSION.tar.gz | tar xf - || exit 1
+ gzcat opus-$OPUS_VERSION.tar.gz | tar xf -
cd opus-$OPUS_VERSION
- CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ CFLAGS="$CFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}"
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ..
touch opus-$OPUS_VERSION-done
fi
}
uninstall_opus() {
- if [ ! -z "$installed_opus_version" ] ; then
+ if [ -n "$installed_opus_version" ] ; then
echo "Uninstalling opus:"
cd opus-$installed_opus_version
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ..
rm opus-$installed_opus_version-done
@@ -2476,26 +2460,164 @@ uninstall_opus() {
fi
}
-install_python3() {
- # The macos11 installer can be deployed to older versions, down to
- # 10.9 (Mavericks), but is still considered experimental so continue
- # to use the 64-bit installer (10.9) on earlier releases for now.
- local macver=x10.9
- if [[ $DARWIN_MAJOR_VERSION -gt 19 ]]; then
- # The macos11 installer is required for Arm-based Macs, which require
- # macOS 11 Big Sur. Note that the package name is "11.0" (no x) for
- # 3.9.1 but simply "11" for 3.9.2 (and later)
- if [[ $PYTHON3_VERSION = 3.9.1 ]]; then
- macver=11.0
- else
- macver=11
+install_jsoncpp() {
+ if [ "$JSONCPP_VERSION" ] && [ ! -f "jsoncpp-$JSONCPP_VERSION-done" ] ; then
+ echo "Downloading, building, and installing JsonCpp:"
+ [ -f "jsoncpp-$JSONCPP_VERSION.tar.gz" ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" --remote-header-name "https://github.com/open-source-parsers/jsoncpp/archive/refs/tags/$JSONCPP_VERSION.tar.gz"
+ $no_build && echo "Skipping installation" && return
+ tar -xf "jsoncpp-$JSONCPP_VERSION.tar.gz"
+ cd "jsoncpp-$JSONCPP_VERSION"
+ mkdir build_dir
+ cd build_dir
+ "${DO_CMAKE[@]}" -DBUILD_OBJECT_LIBS=OFF -DBUILD_SHARED_LIBS=ON -DBUILD_STATIC_LIBS=OFF -DJSONCPP_WITH_POST_BUILD_UNITTEST=OFF ..
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
+ cd ../..
+ touch "jsoncpp-$JSONCPP_VERSION-done"
+ fi
+}
+
+uninstall_jsoncpp() {
+ if [ "$installed_jsoncpp_version" ] && [ -s "jsoncpp-$installed_jsoncpp_version/build_dir/install_manifest.txt" ] ; then
+ echo "Uninstalling JsonCpp:"
+ while read -r ; do $DO_RM -v "$REPLY" ; done < <(cat "jsoncpp-$installed_jsoncpp_version/build_dir/install_manifest.txt"; echo)
+ rm "jsoncpp-$JSONCPP_VERSION-done"
+
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
+ #
+ # Get rid of the previously downloaded and unpacked version.
+ #
+ rm -rf "jsoncpp-$installed_jsoncpp_version"
+ rm -rf "jsoncpp-$installed_jsoncpp_version.tar.gz"
+ fi
+
+ installed_jsoncpp_version=""
+ fi
+}
+
+install_onetbb() {
+ if [ "$ONETBB_VERSION" ] && [ ! -f "onetbb-$ONETBB_VERSION-done" ] ; then
+ echo "Downloading, building, and installing oneTBB:"
+ [ -f "oneTBB-$ONETBB_VERSION.tar.gz" ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" --remote-header-name "https://github.com/oneapi-src/oneTBB/archive/refs/tags/v$ONETBB_VERSION.tar.gz"
+ $no_build && echo "Skipping installation" && return
+ tar -xf "oneTBB-$ONETBB_VERSION.tar.gz"
+ cd "oneTBB-$ONETBB_VERSION"
+ mkdir build_dir
+ cd build_dir
+ "${DO_CMAKE[@]}" -DBUILD_SHARED_LIBS=ON -DTBB_TEST=OFF ..
+ make "${MAKE_BUILD_OPTS[@]}" tbb
+ $DO_MAKE_INSTALL
+ cd ../..
+ touch "onetbb-$ONETBB_VERSION-done"
+ fi
+}
+
+uninstall_onetbb() {
+ if [ "$installed_onetbb_version" ] && [ -s "oneTBB-$installed_onetbb_version/build_dir/install_manifest.txt" ] ; then
+ echo "Uninstalling oneTBB:"
+ while read -r ; do $DO_RM -v "$REPLY" ; done < <(cat "oneTBB-$installed_onetbb_version/build_dir/install_manifest.txt"; echo)
+ rm "onetbb-$installed_onetbb_version-done"
+
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
+ #
+ # Get rid of the previously downloaded and unpacked version.
+ #
+ rm -rf "oneTBB-$installed_onetbb_version"
+ rm -rf "oneTBB-$installed_onetbb_version.tar.gz"
+ fi
+
+ installed_onetbb_version=""
+ fi
+}
+
+install_re2() {
+ if [ "$RE2_VERSION" ] && [ ! -f "re2-$RE2_VERSION-done" ] ; then
+ echo "Downloading, building, and installing RE2:"
+ [ -f "re2-$RE2_VERSION.tar.gz" ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" --remote-header-name "https://github.com/google/re2/archive/refs/tags/$RE2_VERSION.tar.gz"
+ $no_build && echo "Skipping installation" && return
+ tar -xf "re2-$RE2_VERSION.tar.gz"
+ cd "re2-$RE2_VERSION"
+ mkdir build_dir
+ cd build_dir
+ "${DO_CMAKE[@]}" -DBUILD_SHARED_LIBS=ON -DRE2_BUILD_TESTING=OFF ..
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
+ cd ../..
+ touch "re2-$RE2_VERSION-done"
+ fi
+}
+
+uninstall_re2() {
+ if [ -n "$installed_re2_version" ] && [ -s "re2-$installed_re2_version/build_dir/install_manifest.txt" ] ; then
+ echo "Uninstalling RE2:"
+ while read -r ; do $DO_RM -v "$REPLY" ; done < <(cat "re2-$installed_re2_version/build_dir/install_manifest.txt"; echo)
+ rm "re2-$installed_re2_version-done"
+
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
+ #
+ # Get rid of the previously downloaded and unpacked version.
+ #
+ rm -rf "re2-$installed_re2_version"
+ rm -rf "re2-$installed_re2_version.tar.gz"
+ fi
+
+ installed_re2_version=""
+ fi
+}
+
+install_falco_libs() {
+ if [ "$FALCO_LIBS_VERSION" ] && [ ! -f "falco-libs-$FALCO_LIBS_VERSION-done" ] ; then
+ echo "Downloading, building, and installing libsinsp and libscap:"
+ [ -f "falco-libs-$FALCO_LIBS_VERSION.tar.gz" ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" --remote-header-name "https://github.com/falcosecurity/libs/archive/refs/tags/$FALCO_LIBS_VERSION.tar.gz"
+ $no_build && echo "Skipping installation" && return
+ mv "libs-$FALCO_LIBS_VERSION.tar.gz" "falco-libs-$FALCO_LIBS_VERSION.tar.gz"
+ tar -xf "falco-libs-$FALCO_LIBS_VERSION.tar.gz"
+ mv "libs-$FALCO_LIBS_VERSION" "falco-libs-$FALCO_LIBS_VERSION"
+ cd "falco-libs-$FALCO_LIBS_VERSION"
+ patch -p1 < "${topdir}/tools/macos-setup-patches/falco-uthash_h-install.patch"
+ patch -p1 < "${topdir}/tools/macos-setup-patches/falco-include-dirs.patch"
+ mkdir build_dir
+ cd build_dir
+ "${DO_CMAKE[@]}" -DBUILD_SHARED_LIBS=ON -DMINIMAL_BUILD=ON -DCREATE_TEST_TARGETS=OFF \
+ -DUSE_BUNDLED_DEPS=ON -DUSE_BUNDLED_CARES=OFF -DUSE_BUNDLED_ZLIB=OFF \
+ -DUSE_BUNDLED_JSONCPP=OFF -DUSE_BUNDLED_TBB=OFF -DUSE_BUNDLED_RE2=OFF \
+ ..
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
+ cd ../..
+ touch "falco-libs-$FALCO_LIBS_VERSION-done"
+ fi
+}
+
+uninstall_falco_libs() {
+ if [ -n "$installed_falco_libs_version" ] && [ -s "falco-libs-$installed_falco_libs_version/build_dir/install_manifest.txt" ] ; then
+ echo "Uninstalling Falco libs:"
+ $DO_RM "$installation_prefix"/include/falcosecurity/uthash.h
+ while read -r ; do $DO_RM -v "$REPLY" ; done < <(cat "falco-libs-$installed_falco_libs_version/build_dir/install_manifest.txt"; echo)
+ rm "falco-libs-$installed_falco_libs_version-done"
+
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
+ #
+ # Get rid of the previously downloaded and unpacked version.
+ #
+ rm -rf "falco-libs-$installed_falco_libs_version"
+ rm -rf "falco-libs-$installed_falco_libs_version.tar.gz"
fi
+
+ installed_falco_libs_version=""
fi
+}
+
+install_python3() {
+ # The macos11 universal2 installer can be deployed to older versions,
+ # down to 10.9 (Mavericks). The 10.9 installer was deprecated in 3.9.8
+ # and stopped being released after 3.9.13
+ local macver=11
if [ "$PYTHON3_VERSION" -a ! -f python3-$PYTHON3_VERSION-done ] ; then
echo "Downloading and installing python3:"
- [ -f python-$PYTHON3_VERSION-macos$macver.pkg ] || curl -L -O https://www.python.org/ftp/python/$PYTHON3_VERSION/python-$PYTHON3_VERSION-macos$macver.pkg || exit 1
+ [ -f python-$PYTHON3_VERSION-macos$macver.pkg ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://www.python.org/ftp/python/$PYTHON3_VERSION/python-$PYTHON3_VERSION-macos$macver.pkg
$no_build && echo "Skipping installation" && return
- sudo installer -target / -pkg python-$PYTHON3_VERSION-macos$macver.pkg || exit 1
+ sudo installer -target / -pkg python-$PYTHON3_VERSION-macos$macver.pkg
touch python3-$PYTHON3_VERSION-done
#
@@ -2506,7 +2628,7 @@ install_python3() {
#
# Strip off any dot-dot component in $PYTHON3_VERSION.
#
- python_version=`echo $PYTHON3_VERSION | sed 's/\([1-9][0-9]*\.[1-9][0-9]*\).*/\1/'`
+ python_version=$( echo "$PYTHON3_VERSION" | sed 's/\([1-9][0-9]*\.[1-9][0-9]*\).*/\1/' )
#
# Now treat Meson as being in the directory in question.
#
@@ -2524,12 +2646,12 @@ install_python3() {
uninstall_python3() {
# Major version (e.g. "3.7")
local PYTHON_VERSION=${installed_python3_version%.*}
- if [ ! -z "$installed_python3_version" ] ; then
+ if [ -n "$installed_python3_version" ] ; then
echo "Uninstalling python3:"
frameworkdir="/Library/Frameworks/Python.framework/Versions/$PYTHON_VERSION"
sudo rm -rf "$frameworkdir"
sudo rm -rf "/Applications/Python $PYTHON_VERSION"
- sudo find /usr/local/bin -maxdepth 1 -lname "*$frameworkdir/bin/*" -delete
+ sudo find "$installation_prefix"/bin -maxdepth 1 -lname "*$frameworkdir/bin/*" -delete
# Remove three symlinks and empty directories. Removing directories
# might fail if for some reason multiple versions are installed.
sudo rm /Library/Frameworks/Python.framework/Headers
@@ -2560,39 +2682,39 @@ uninstall_python3() {
install_brotli() {
if [ "$BROTLI_VERSION" -a ! -f brotli-$BROTLI_VERSION-done ] ; then
echo "Downloading, building, and installing brotli:"
- [ -f brotli-$BROTLI_VERSION.tar.gz ] || curl -L -o brotli-$BROTLI_VERSION.tar.gz https://github.com/google/brotli/archive/v$BROTLI_VERSION.tar.gz || exit 1
+ [ -f brotli-$BROTLI_VERSION.tar.gz ] || curl "${CURL_LOCAL_NAME_OPTS[@]}" brotli-$BROTLI_VERSION.tar.gz https://github.com/google/brotli/archive/v$BROTLI_VERSION.tar.gz
$no_build && echo "Skipping installation" && return
- gzcat brotli-$BROTLI_VERSION.tar.gz | tar xf - || exit 1
+ gzcat brotli-$BROTLI_VERSION.tar.gz | tar xf -
cd brotli-$BROTLI_VERSION
mkdir build_dir
cd build_dir
- MACOSX_DEPLOYMENT_TARGET=$min_osx_target SDKROOT="$SDKPATH" $DO_CMAKE ../ || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ "${DO_CMAKE[@]}" ..
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ../..
touch brotli-$BROTLI_VERSION-done
fi
}
uninstall_brotli() {
- if [ ! -z "$installed_brotli_version" ] ; then
+ if [ -n "$installed_brotli_version" ] ; then
echo "Uninstalling brotli:"
cd brotli-$installed_brotli_version
#
# brotli uses cmake on macOS and doesn't support "make uninstall";
# just remove what we know it installs.
#
- # $DO_MAKE_UNINSTALL || exit 1
- $DO_RM -rf /usr/local/bin/brotli \
- /usr/local/lib/libbrotli* \
- /usr/local/include/brotli \
- /usr/local/lib/pkgconfig/libbrotli* || exit 1
+ # $DO_MAKE_UNINSTALL
+ $DO_RM -rf "$installation_prefix"/bin/brotli \
+ "$installation_prefix"/lib/libbrotli* \
+ "$installation_prefix"/include/brotli \
+ "$installation_prefix"/lib/pkgconfig/libbrotli*
#
# brotli uses cmake on macOS and doesn't support "make distclean";
# just remove the enire build directory.
#
- # make distclean || exit 1
- rm -rf build_dir || exit 1
+ # make distclean
+ rm -rf build_dir
cd ..
rm brotli-$installed_brotli_version-done
@@ -2611,25 +2733,26 @@ uninstall_brotli() {
install_minizip() {
if [ "$ZLIB_VERSION" ] && [ ! -f minizip-$ZLIB_VERSION-done ] ; then
echo "Downloading, building, and installing zlib for minizip:"
- [ -f zlib-$ZLIB_VERSION.tar.gz ] || curl -L -o zlib-$ZLIB_VERSION.tar.gz https://zlib.net/zlib-$ZLIB_VERSION.tar.gz || exit 1
+ [ -f zlib-$ZLIB_VERSION.tar.gz ] || curl "${CURL_LOCAL_NAME_OPTS[@]}" zlib-$ZLIB_VERSION.tar.gz https://zlib.net/fossils/zlib-$ZLIB_VERSION.tar.gz
$no_build && echo "Skipping installation" && return
- gzcat zlib-$ZLIB_VERSION.tar.gz | tar xf - || exit 1
+ gzcat zlib-$ZLIB_VERSION.tar.gz | tar xf -
#
# minizip ships both with a minimal Makefile that doesn't
# support "make install", "make uninstall", or "make distclean",
# and with a Makefile.am file that, if we do an autoreconf,
# gives us a configure script, and a Makefile.in that, if we run
- # the configure script, gives us a Makefile that supports ll of
+ # the configure script, gives us a Makefile that supports all of
# those targets, and that installs a pkg-config .pc file for
# minizip.
#
# So that's what we do.
#
- cd zlib-$ZLIB_VERSION/contrib/minizip || exit 1
+ cd zlib-$ZLIB_VERSION/contrib/minizip
LIBTOOLIZE=glibtoolize autoreconf --force --install
- CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" ./configure || exit 1
- make $MAKE_BUILD_OPTS || exit 1
- $DO_MAKE_INSTALL || exit 1
+ CFLAGS="$CFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" CXXFLAGS="$CXXFLAGS -D_FORTIFY_SOURCE=0 $VERSION_MIN_FLAGS $SDKFLAGS" LDFLAGS="$LDFLAGS $VERSION_MIN_FLAGS $SDKFLAGS" \
+ ./configure "${CONFIGURE_OPTS[@]}"
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
cd ../../..
touch minizip-$ZLIB_VERSION-done
fi
@@ -2639,8 +2762,8 @@ uninstall_minizip() {
if [ -n "$installed_minizip_version" ] ; then
echo "Uninstalling minizip:"
cd zlib-$installed_minizip_version/contrib/minizip
- $DO_MAKE_UNINSTALL || exit 1
- make distclean || exit 1
+ $DO_MAKE_UNINSTALL
+ make distclean
cd ../../..
rm minizip-$installed_minizip_version-done
@@ -2657,16 +2780,54 @@ uninstall_minizip() {
fi
}
+install_minizip_ng() {
+ if [ "$MINIZIPNG_VERSION" ] && [ ! -f minizip-ng-$MINIZIPNG_VERSION-done ] ; then
+ echo "Downloading, building, and installing minizip-ng:"
+ [ -f $MINIZIPNG_VERSION.tar.gz ] || curl "${CURL_REMOTE_NAME_OPTS[@]}" https://github.com/zlib-ng/minizip-ng/archive/refs/tags/$MINIZIPNG_VERSION.tar.gz
+ $no_build && echo "Skipping installation" && return
+ gzcat $MINIZIPNG_VERSION.tar.gz | tar xf -
+ cd minizip-ng-$MINIZIPNG_VERSION
+ mkdir build
+ cd build
+ "${DO_CMAKE[@]}" ..
+ make "${MAKE_BUILD_OPTS[@]}"
+ $DO_MAKE_INSTALL
+ cd ../..
+ touch minizip-ng-$MINIZIPNG_VERSION-done
+ fi
+}
+
+uninstall_minizip_ng() {
+ if [ -n "$installed_minizip_ng_version" ] ; then
+ echo "Uninstalling minizip:"
+ cd minizip-ng-$installed_minizip_ng_version/contrib/minizip
+ $DO_MAKE_UNINSTALL
+ make distclean
+ cd ../../..
+
+ rm minizip-ng-$installed_minizip_ng_version-done
+
+ if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
+ #
+ # Get rid of the previously downloaded and unpacked version.
+ #
+ rm -rf minizip-ng-$installed_minizip_ng_version
+ rm -rf minizip-ng-$installed_minizip_ng_version.tar.gz
+ fi
+
+ installed_minizip_ng_version=""
+ fi
+}
install_sparkle() {
if [ "$SPARKLE_VERSION" ] && [ ! -f sparkle-$SPARKLE_VERSION-done ] ; then
echo "Downloading and installing Sparkle:"
#
- # Download the tarball and unpack it in /usr/local/Sparkle-x.y.z
+ # Download the tarball and unpack it in $installation_prefix/Sparkle-x.y.z
#
- [ -f Sparkle-$SPARKLE_VERSION.tar.xz ] || curl -L -o Sparkle-$SPARKLE_VERSION.tar.xz https://github.com/sparkle-project/Sparkle/releases/download/$SPARKLE_VERSION/Sparkle-$SPARKLE_VERSION.tar.xz || exit 1
+ [ -f Sparkle-$SPARKLE_VERSION.tar.xz ] || curl "${CURL_LOCAL_NAME_OPTS[@]}" Sparkle-$SPARKLE_VERSION.tar.xz https://github.com/sparkle-project/Sparkle/releases/download/$SPARKLE_VERSION/Sparkle-$SPARKLE_VERSION.tar.xz
$no_build && echo "Skipping installation" && return
- test -d "/usr/local/Sparkle-$SPARKLE_VERSION" || sudo mkdir "/usr/local/Sparkle-$SPARKLE_VERSION"
- sudo tar -C "/usr/local/Sparkle-$SPARKLE_VERSION" -xpof Sparkle-$SPARKLE_VERSION.tar.xz
+ test -d "$installation_prefix/Sparkle-$SPARKLE_VERSION" || sudo mkdir "$installation_prefix/Sparkle-$SPARKLE_VERSION"
+ sudo tar -C "$installation_prefix/Sparkle-$SPARKLE_VERSION" -xpof Sparkle-$SPARKLE_VERSION.tar.xz
touch sparkle-$SPARKLE_VERSION-done
fi
}
@@ -2674,7 +2835,10 @@ install_sparkle() {
uninstall_sparkle() {
if [ -n "$installed_sparkle_version" ]; then
echo "Uninstalling Sparkle:"
- sudo rm -rf "/usr/local/Sparkle-$installed_sparkle_version"
+ sudo rm -rf "$installation_prefix/Sparkle-$installed_sparkle_version"
+
+ rm sparkle-$installed_sparkle_version-done
+
if [ "$#" -eq 1 ] && [ "$1" = "-r" ] ; then
rm -f "Sparkle-$installed_sparkle_version.tar.xz"
fi
@@ -2688,7 +2852,7 @@ install_all() {
# Check whether the versions we have installed are the versions
# requested; if not, uninstall the installed versions.
#
- if [ ! -z "$installed_brotli_version" -a \
+ if [ -n "$installed_brotli_version" -a \
"$installed_brotli_version" != "$BROTLI_VERSION" ] ; then
echo "Installed brotli version is $installed_brotli_version"
if [ -z "$BROTLI_VERSION" ] ; then
@@ -2699,7 +2863,7 @@ install_all() {
uninstall_brotli -r
fi
- if [ ! -z "$installed_python3_version" -a \
+ if [ -n "$installed_python3_version" -a \
"$installed_python3_version" != "$PYTHON3_VERSION" ] ; then
echo "Installed python3 version is $installed_python3_version"
if [ -z "$PYTHON3_VERSION" ] ; then
@@ -2710,7 +2874,7 @@ install_all() {
uninstall_python3 -r
fi
- if [ ! -z "$installed_bcg729_version" -a \
+ if [ -n "$installed_bcg729_version" -a \
"$installed_bcg729_version" != "$BCG729_VERSION" ] ; then
echo "Installed bcg729 version is $installed_bcg729_version"
if [ -z "$BCG729_VERSION" ] ; then
@@ -2732,6 +2896,17 @@ install_all() {
uninstall_ilbc -r
fi
+ if [ -n "$installed_opencore_amr_version" ] \
+ && [ "$installed_opencore_amr_version" != "$OPENCORE_AMR_VERSION" ] ; then
+ echo "Installed opencore-amr version is $installed_opencore_amr_version"
+ if [ -z "$OPENCORE_AMR_VERSION" ] ; then
+ echo "opencore-amr is not requested"
+ else
+ echo "Requested opencore-amr version is $OPENCORE_AMR_VERSION"
+ fi
+ uninstall_opencore_amr -r
+ fi
+
if [ -n "$installed_opus_version" ] \
&& [ "$installed_opus_version" != "$OPUS_VERSION" ] ; then
echo "Installed opus version is $installed_opus_version"
@@ -2743,7 +2918,7 @@ install_all() {
uninstall_opus -r
fi
- if [ ! -z "$installed_spandsp_version" -a \
+ if [ -n "$installed_spandsp_version" -a \
"$installed_spandsp_version" != "$SPANDSP_VERSION" ] ; then
echo "Installed SpanDSP version is $installed_spandsp_version"
if [ -z "$SPANDSP_VERSION" ] ; then
@@ -2754,7 +2929,7 @@ install_all() {
uninstall_spandsp -r
fi
- if [ ! -z "$installed_speexdsp_version" -a \
+ if [ -n "$installed_speexdsp_version" -a \
"$installed_speexdsp_version" != "$SPEEXDSP_VERSION" ] ; then
echo "Installed SpeexDSP version is $installed_speexdsp_version"
if [ -z "$SPEEXDSP_VERSION" ] ; then
@@ -2765,7 +2940,7 @@ install_all() {
uninstall_speexdsp -r
fi
- if [ ! -z "$installed_libtiff_version" -a \
+ if [ -n "$installed_libtiff_version" -a \
"$installed_libtiff_version" != "$LIBTIFF_VERSION" ] ; then
echo "Installed libtiff version is $installed_libtiff_version"
if [ -z "$LIBTIFF_VERSION" ] ; then
@@ -2776,7 +2951,7 @@ install_all() {
uninstall_libtiff -r
fi
- if [ ! -z "$installed_nghttp2_version" -a \
+ if [ -n "$installed_nghttp2_version" -a \
"$installed_nghttp2_version" != "$NGHTTP2_VERSION" ] ; then
echo "Installed nghttp2 version is $installed_nghttp2_version"
if [ -z "$NGHTTP2_VERSION" ] ; then
@@ -2787,7 +2962,7 @@ install_all() {
uninstall_nghttp2 -r
fi
- if [ ! -z "$installed_nghttp3_version" -a \
+ if [ -n "$installed_nghttp3_version" -a \
"$installed_nghttp3_version" != "$NGHTTP3_VERSION" ] ; then
echo "Installed nghttp3 version is $installed_nghttp3_version"
if [ -z "$NGHTTP3_VERSION" ] ; then
@@ -2798,7 +2973,7 @@ install_all() {
uninstall_nghttp3 -r
fi
- if [ ! -z "$installed_libssh_version" -a \
+ if [ -n "$installed_libssh_version" -a \
"$installed_libssh_version" != "$LIBSSH_VERSION" ] ; then
echo "Installed libssh version is $installed_libssh_version"
if [ -z "$LIBSSH_VERSION" ] ; then
@@ -2809,7 +2984,7 @@ install_all() {
uninstall_libssh -r
fi
- if [ ! -z "$installed_cares_version" -a \
+ if [ -n "$installed_cares_version" -a \
"$installed_cares_version" != "$CARES_VERSION" ] ; then
echo "Installed C-Ares version is $installed_cares_version"
if [ -z "$CARES_VERSION" ] ; then
@@ -2820,7 +2995,7 @@ install_all() {
uninstall_c_ares -r
fi
- if [ ! -z "$installed_maxminddb_version" -a \
+ if [ -n "$installed_maxminddb_version" -a \
"$installed_maxminddb_version" != "$MAXMINDDB_VERSION" ] ; then
echo "Installed MaxMindDB API version is $installed_maxminddb_version"
if [ -z "$MAXMINDDB_VERSION" ] ; then
@@ -2831,7 +3006,7 @@ install_all() {
uninstall_maxminddb -r
fi
- if [ ! -z "$installed_sbc_version" -a \
+ if [ -n "$installed_sbc_version" -a \
"$installed_sbc_version" != "$SBC_VERSION" ] ; then
echo "Installed SBC version is $installed_sbc_version"
if [ -z "$SBC_VERSION" ] ; then
@@ -2842,7 +3017,7 @@ install_all() {
uninstall_sbc -r
fi
- if [ ! -z "$installed_lz4_version" -a \
+ if [ -n "$installed_lz4_version" -a \
"$installed_lz4_version" != "$LZ4_VERSION" ] ; then
echo "Installed LZ4 version is $installed_lz4_version"
if [ -z "$LZ4_VERSION" ] ; then
@@ -2853,7 +3028,7 @@ install_all() {
uninstall_lz4 -r
fi
- if [ ! -z "$installed_libxml2_version" -a \
+ if [ -n "$installed_libxml2_version" -a \
"$installed_libxml2_version" != "$LIBXML2_VERSION" ] ; then
echo "Installed libxml2 version is $installed_libxml2_version"
if [ -z "$LIBXML2_VERSION" ] ; then
@@ -2864,7 +3039,7 @@ install_all() {
uninstall_libxml2 -r
fi
- if [ ! -z "$installed_snappy_version" -a \
+ if [ -n "$installed_snappy_version" -a \
"$installed_snappy_version" != "$SNAPPY_VERSION" ] ; then
echo "Installed SNAPPY version is $installed_snappy_version"
if [ -z "$SNAPPY_VERSION" ] ; then
@@ -2875,7 +3050,27 @@ install_all() {
uninstall_snappy -r
fi
- if [ ! -z "$installed_lua_version" -a \
+ if [ -n "$installed_zstd_version" ] && [ "$installed_zstd_version" != "$ZSTD_VERSION" ] ; then
+ echo "Installed zstd version is $installed_zstd_version"
+ if [ -z "$ZSTD_VERSION" ] ; then
+ echo "zstd is not requested"
+ else
+ echo "Requested zstd version is $ZSTD_VERSION"
+ fi
+ uninstall_zstd -r
+ fi
+
+ if [ -n "$installed_zlibng_version" ] && [ "$installed_zlibng_version" != "$ZLIBNG_VERSION" ] ; then
+ echo "Installed zlibng version is $installed_zlibng_version"
+ if [ -z "$ZLIBNG_VERSION" ] ; then
+ echo "zlibng is not requested"
+ else
+ echo "Requested zlibng version is $ZLIBNG_VERSION"
+ fi
+ uninstall_zlibng -r
+ fi
+
+ if [ -n "$installed_lua_version" -a \
"$installed_lua_version" != "$LUA_VERSION" ] ; then
echo "Installed Lua version is $installed_lua_version"
if [ -z "$LUA_VERSION" ] ; then
@@ -2886,8 +3081,7 @@ install_all() {
uninstall_lua -r
fi
- if [ ! -z "$installed_gnutls_version" -a \
- "$installed_gnutls_version" != "$GNUTLS_VERSION" ] ; then
+ if [ -n "$installed_gnutls_version" ] && [ "$installed_gnutls_version" != "$GNUTLS_VERSION" ] ; then
echo "Installed GnuTLS version is $installed_gnutls_version"
if [ -z "$GNUTLS_VERSION" ] ; then
echo "GnuTLS is not requested"
@@ -2897,7 +3091,7 @@ install_all() {
uninstall_gnutls -r
fi
- if [ ! -z "$installed_nettle_version" -a \
+ if [ -n "$installed_nettle_version" -a \
"$installed_nettle_version" != "$NETTLE_VERSION" ] ; then
echo "Installed Nettle version is $installed_nettle_version"
if [ -z "$NETTLE_VERSION" ] ; then
@@ -2908,7 +3102,7 @@ install_all() {
uninstall_nettle -r
fi
- if [ ! -z "$installed_gmp_version" -a \
+ if [ -n "$installed_gmp_version" -a \
"$installed_gmp_version" != "$GMP_VERSION" ] ; then
echo "Installed GMP version is $installed_gmp_version"
if [ -z "$GMP_VERSION" ] ; then
@@ -2919,7 +3113,7 @@ install_all() {
uninstall_gmp -r
fi
- if [ ! -z "$installed_p11_kit_version" -a \
+ if [ -n "$installed_p11_kit_version" -a \
"$installed_p11_kit_version" != "$P11KIT_VERSION" ] ; then
echo "Installed p11-kit version is $installed_p11_kit_version"
if [ -z "$P11KIT_VERSION" ] ; then
@@ -2930,7 +3124,7 @@ install_all() {
uninstall_p11_kit -r
fi
- if [ ! -z "$installed_libtasn1_version" -a \
+ if [ -n "$installed_libtasn1_version" -a \
"$installed_libtasn1_version" != "$LIBTASN1_VERSION" ] ; then
echo "Installed libtasn1 version is $installed_libtasn1_version"
if [ -z "$LIBTASN1_VERSION" ] ; then
@@ -2941,7 +3135,7 @@ install_all() {
uninstall_libtasn1 -r
fi
- if [ ! -z "$installed_libgcrypt_version" -a \
+ if [ -n "$installed_libgcrypt_version" -a \
"$installed_libgcrypt_version" != "$LIBGCRYPT_VERSION" ] ; then
echo "Installed libgcrypt version is $installed_libgcrypt_version"
if [ -z "$LIBGCRYPT_VERSION" ] ; then
@@ -2952,7 +3146,7 @@ install_all() {
uninstall_libgcrypt -r
fi
- if [ ! -z "$installed_libgpg_error_version" -a \
+ if [ -n "$installed_libgpg_error_version" -a \
"$installed_libgpg_error_version" != "$LIBGPG_ERROR_VERSION" ] ; then
echo "Installed libgpg-error version is $installed_libgpg_error_version"
if [ -z "$LIBGPG_ERROR_VERSION" ] ; then
@@ -2963,7 +3157,7 @@ install_all() {
uninstall_libgpg_error -r
fi
- if [ ! -z "$installed_libsmi_version" -a \
+ if [ -n "$installed_libsmi_version" -a \
"$installed_libsmi_version" != "$LIBSMI_VERSION" ] ; then
echo "Installed libsmi version is $installed_libsmi_version"
if [ -z "$LIBSMI_VERSION" ] ; then
@@ -2974,7 +3168,7 @@ install_all() {
uninstall_libsmi -r
fi
- if [ ! -z "$installed_qt_version" -a \
+ if [ -n "$installed_qt_version" -a \
"$installed_qt_version" != "$QT_VERSION" ] ; then
echo "Installed Qt version is $installed_qt_version"
if [ -z "$QT_VERSION" ] ; then
@@ -2985,7 +3179,7 @@ install_all() {
uninstall_qt -r
fi
- if [ ! -z "$installed_glib_version" -a \
+ if [ -n "$installed_glib_version" -a \
"$installed_glib_version" != "$GLIB_VERSION" ] ; then
echo "Installed GLib version is $installed_glib_version"
if [ -z "$GLIB_VERSION" ] ; then
@@ -2996,7 +3190,7 @@ install_all() {
uninstall_glib -r
fi
- if [ ! -z "$installed_pkg_config_version" -a \
+ if [ -n "$installed_pkg_config_version" -a \
"$installed_pkg_config_version" != "$PKG_CONFIG_VERSION" ] ; then
echo "Installed pkg-config version is $installed_pkg_config_version"
if [ -z "$PKG_CONFIG_VERSION" ] ; then
@@ -3007,7 +3201,7 @@ install_all() {
uninstall_pkg_config -r
fi
- if [ ! -z "$installed_gettext_version" -a \
+ if [ -n "$installed_gettext_version" -a \
"$installed_gettext_version" != "$GETTEXT_VERSION" ] ; then
echo "Installed GNU gettext version is $installed_gettext_version"
if [ -z "$GETTEXT_VERSION" ] ; then
@@ -3018,7 +3212,7 @@ install_all() {
uninstall_gettext -r
fi
- if [ ! -z "$installed_ninja_version" -a \
+ if [ -n "$installed_ninja_version" -a \
"$installed_ninja_version" != "$NINJA_VERSION" ] ; then
echo "Installed Ninja version is $installed_ninja_version"
if [ -z "$NINJA_VERSION" ] ; then
@@ -3029,7 +3223,7 @@ install_all() {
uninstall_ninja -r
fi
- if [ ! -z "$installed_asciidoctorpdf_version" -a \
+ if [ -n "$installed_asciidoctorpdf_version" -a \
"$installed_asciidoctorpdf_version" != "$ASCIIDOCTORPDF_VERSION" ] ; then
echo "Installed Asciidoctor-pdf version is $installed_asciidoctorpdf_version"
if [ -z "$ASCIIDOCTORPDF_VERSION" ] ; then
@@ -3044,7 +3238,7 @@ install_all() {
uninstall_asciidoctorpdf -r
fi
- if [ ! -z "$installed_asciidoctor_version" -a \
+ if [ -n "$installed_asciidoctor_version" -a \
"$installed_asciidoctor_version" != "$ASCIIDOCTOR_VERSION" ] ; then
echo "Installed Asciidoctor version is $installed_asciidoctor_version"
if [ -z "$ASCIIDOCTOR_VERSION" ] ; then
@@ -3059,7 +3253,7 @@ install_all() {
uninstall_asciidoctor -r
fi
- if [ ! -z "$installed_cmake_version" -a \
+ if [ -n "$installed_cmake_version" -a \
"$installed_cmake_version" != "$CMAKE_VERSION" ] ; then
echo "Installed CMake version is $installed_cmake_version"
if [ -z "$CMAKE_VERSION" ] ; then
@@ -3070,7 +3264,7 @@ install_all() {
uninstall_cmake -r
fi
- if [ ! -z "$installed_libtool_version" -a \
+ if [ -n "$installed_libtool_version" -a \
"$installed_libtool_version" != "$LIBTOOL_VERSION" ] ; then
echo "Installed GNU libtool version is $installed_libtool_version"
if [ -z "$LIBTOOL_VERSION" ] ; then
@@ -3081,7 +3275,7 @@ install_all() {
uninstall_libtool -r
fi
- if [ ! -z "$installed_automake_version" -a \
+ if [ -n "$installed_automake_version" -a \
"$installed_automake_version" != "$AUTOMAKE_VERSION" ] ; then
echo "Installed GNU automake version is $installed_automake_version"
if [ -z "$AUTOMAKE_VERSION" ] ; then
@@ -3092,7 +3286,7 @@ install_all() {
uninstall_automake -r
fi
- if [ ! -z "$installed_autoconf_version" -a \
+ if [ -n "$installed_autoconf_version" -a \
"$installed_autoconf_version" != "$AUTOCONF_VERSION" ] ; then
echo "Installed GNU autoconf version is $installed_autoconf_version"
if [ -z "$AUTOCONF_VERSION" ] ; then
@@ -3103,14 +3297,9 @@ install_all() {
uninstall_autoconf -r
fi
- if [ ! -z "$installed_pcre_version" -a \
- "$installed_pcre_version" != "$PCRE_VERSION" ] ; then
- echo "Installed pcre version is $installed_pcre_version"
- if [ -z "$PCRE_VERSION" ] ; then
- echo "pcre is not requested"
- else
- echo "Requested pcre version is $PCRE_VERSION"
- fi
+ if [ -n "$installed_pcre_version" ] ; then
+ echo "Installed pcre1 version is $installed_pcre_version"
+ echo "(We no longer build with pcre1)"
uninstall_pcre -r
fi
@@ -3125,18 +3314,12 @@ install_all() {
uninstall_pcre2 -r
fi
- if [ ! -z "$installed_lzip_version" -a \
- "$installed_lzip_version" != "$LZIP_VERSION" ] ; then
- echo "Installed lzip version is $installed_lzip_version"
- if [ -z "$LZIP_VERSION" ] ; then
- echo "lzip is not requested"
- else
- echo "Requested lzip version is $LZIP_VERSION"
- fi
+ if [ -n "$installed_lzip_version" ] ; then
+ echo "Removing legacy install of lzip"
uninstall_lzip -r
fi
- if [ ! -z "$installed_xz_version" -a \
+ if [ -n "$installed_xz_version" -a \
"$installed_xz_version" != "$XZ_VERSION" ] ; then
echo "Installed xz version is $installed_xz_version"
if [ -z "$XZ_VERSION" ] ; then
@@ -3147,7 +3330,7 @@ install_all() {
uninstall_xz -r
fi
- if [ ! -z "$installed_curl_version" -a \
+ if [ -n "$installed_curl_version" -a \
"$installed_curl_version" != "$CURL_VERSION" ] ; then
echo "Installed curl version is $installed_curl_version"
if [ -z "$CURL_VERSION" ] ; then
@@ -3158,7 +3341,7 @@ install_all() {
uninstall_curl -r
fi
- if [ ! -z "$installed_minizip_version" -a \
+ if [ -n "$installed_minizip_version" -a \
"$installed_minizip_version" != "$ZLIB_VERSION" ] ; then
echo "Installed minizip (zlib) version is $installed_minizip_version"
if [ -z "$ZLIB_VERSION" ] ; then
@@ -3169,7 +3352,17 @@ install_all() {
uninstall_minizip -r
fi
- if [ ! -z "$installed_sparkle_version" -a \
+ if [ -n "$installed_minizip_ng_version" ] && [ "$installed_minizip_ng_version" != "$MINIZIPNG_VERSION" ] ; then
+ echo "Installed minizip-ng version is $installed_minizip_ng_version"
+ if [ -z "$MINIZIPNG_VERSION" ] ; then
+ echo "minizip-ng is not requested"
+ else
+ echo "Requested minizip-ng version is $MINIZIPNG_VERSION"
+ fi
+ uninstall_minizip_ng -r
+ fi
+
+ if [ -n "$installed_sparkle_version" -a \
"$installed_sparkle_version" != "$SPARKLE_VERSION" ] ; then
echo "Installed Sparkle version is $installed_sparkle_version"
if [ -z "$SPARKLE_VERSION" ] ; then
@@ -3180,20 +3373,56 @@ install_all() {
uninstall_sparkle -r
fi
+ if [ "$installed_falco_libs_version" ] && [ "$installed_falco_libs_version" != "$FALCO_LIBS_VERSION" ] ; then
+ echo "Installed Falco libs (libsinsp and libscap) version is $installed_falco_libs_version"
+ if [ -z "$FALCO_LIBS_VERSION" ] ; then
+ echo "Falco libs is not requested"
+ else
+ echo "Requested Falco libs version is $FALCO_LIBS_VERSION"
+ fi
+ uninstall_falco_libs -r
+ fi
+
+ if [ "$installed_jsoncpp_version" ] && [ "$installed_jsoncpp_version" != "$JSONCPP_VERSION" ] ; then
+ echo "Installed JsonCpp version is $installed_jsoncpp_version"
+ if [ -z "$JSONCPP_VERSION" ] ; then
+ echo "JsonCpp is not requested"
+ else
+ echo "Requested JsonCpp version is $JSONCPP_VERSION"
+ fi
+ uninstall_jsoncpp -r
+ fi
+
+ if [ "$installed_onetbb_version" ] && [ "$installed_onetbb_version" != "$ONETBB_VERSION" ] ; then
+ echo "Installed oneTBB version is $installed_onetbb_version"
+ if [ -z "$ONETBB_VERSION" ] ; then
+ echo "oneTBB is not requested"
+ else
+ echo "Requested oneTBB version is $ONETBB_VERSION"
+ fi
+ uninstall_onetbb -r
+ fi
+
+ if [ "$installed_re2_version" ] && [ "$installed_re2_version" != "$RE2_VERSION" ] ; then
+ echo "Installed RE2 version is $installed_re2_version"
+ if [ -z "$RE2_VERSION" ] ; then
+ echo "RE2 is not requested"
+ else
+ echo "Requested RE2 version is $RE2_VERSION"
+ fi
+ uninstall_re2 -r
+ fi
+
#
# Start with curl: we may need it to download and install xz.
#
install_curl
#
- # Now intall xz: it is the sole download format of glib later than 2.31.2.
+ # Now install xz: it is the sole download format of glib later than 2.31.2.
#
install_xz
- install_lzip
-
- install_pcre
-
install_autoconf
install_automake
@@ -3284,6 +3513,8 @@ install_all() {
install_zstd
+ install_zlibng
+
install_libxml2
install_lz4
@@ -3310,13 +3541,25 @@ install_all() {
install_ilbc
+ install_opencore_amr
+
install_opus
install_brotli
install_minizip
+ install_minizip_ng
+
install_sparkle
+
+ install_re2
+
+ install_onetbb
+
+ install_jsoncpp
+
+ install_falco_libs
}
uninstall_all() {
@@ -3333,14 +3576,26 @@ uninstall_all() {
# We also do a "make distclean", so that we don't have leftovers from
# old configurations.
#
+ uninstall_falco_libs
+
+ uninstall_jsoncpp
+
+ uninstall_onetbb
+
+ uninstall_re2
+
uninstall_sparkle
uninstall_minizip
+ uninstall_minizip_ng
+
uninstall_brotli
uninstall_opus
+ uninstall_opencore_amr
+
uninstall_ilbc
uninstall_bcg729
@@ -3365,6 +3620,8 @@ uninstall_all() {
uninstall_zstd
+ uninstall_zlibng
+
uninstall_libxml2
uninstall_lz4
@@ -3424,6 +3681,7 @@ uninstall_all() {
uninstall_pcre
+ # Legacy, remove
uninstall_lzip
uninstall_xz
@@ -3432,66 +3690,12 @@ uninstall_all() {
fi
}
-#
-# Do we have permission to write in /usr/local?
-#
-# If so, assume we have permission to write in its subdirectories.
-# (If that's not the case, this test needs to check the subdirectories
-# as well.)
-#
-# If not, do "make install", "make uninstall", "ninja install",
-# "ninja uninstall", the removes for dependencies that don't support
-# "make uninstall" or "ninja uninstall", the renames of [g]libtool*,
-# and the writing of a libffi .pc file with sudo.
-#
-if [ -w /usr/local ]
-then
- DO_MAKE_INSTALL="make install"
- DO_MAKE_UNINSTALL="make uninstall"
- DO_NINJA_INSTALL="ninja -C _build install"
- DO_NINJA_UNINSTALL="ninja -C _build uninstall"
- DO_TEE_TO_PC_FILE="tee"
- DO_RM="rm"
- DO_MV="mv"
-else
- DO_MAKE_INSTALL="sudo make install"
- DO_MAKE_UNINSTALL="sudo make uninstall"
- DO_NINJA_INSTALL="sudo ninja -C _build install"
- DO_NINJA_UNINSTALL="sudo ninja -C _build uninstall"
- DO_TEE_TO_PC_FILE="sudo tee"
- DO_RM="sudo rm"
- DO_MV="sudo mv"
-fi
-
-#
-# When building with CMake, don't build libraries with an install path
-# that begins with @rpath because that will cause binaries linked with it
-# to use that path as the library to look for, and that will cause the
-# run-time linker, at least on macOS 14 and later, not to find the library
-# in /usr/local/lib unless you explicitly set DYLD_LIBRARY_PATH to include
-# /usr/local/lib. That means that you get "didn't find libpcre" errors if
-# you try to run binaries from a build unless you set DYLD_LIBRARYPATH to
-# include /usr/local/lib.
-#
-# However, setting CMAKE_MACOSX_RPATH to OFF causes the installed
-# library just to have the file name of the library as its install
-# name. It needs to be the full installed path of the library in
-# order to make running binaries from the build directory work, so
-# we set CMAKE_INSTALL_NAME_DIR to /usr/local/lib.
-#
-# packaging/macosx/osx-app.sh will convert *all* libraries in
-# the app bundle to have an @rpath install name, so this won't
-# break anything there; it just fixes the ability to run from the
-# build directory.
-#
-DO_CMAKE="cmake -DCMAKE_MACOSX_RPATH=OFF -DCMAKE_INSTALL_NAME_DIR=/usr/local/lib"
-
# This script is meant to be run in the source root. The following
-# code will attempt to get you there, but is not perfect (particulary
+# code will attempt to get you there, but is not perfect (particularly
# if someone copies the script).
-topdir=`pwd`/`dirname $0`/..
-cd $topdir
+topdir="$( pwd )/$( dirname "$0" )/.."
+cd "$topdir"
# Preference of the support libraries directory:
# ${MACOSX_SUPPORT_LIBS}
@@ -3517,7 +3721,7 @@ for i in /Developer/SDKs \
do
if [ -d "$i" ]
then
- min_osx_target=`sw_vers -productVersion | sed 's/\([0-9]*\)\.\([0-9]*\)\.[0-9]*/\1.\2/'`
+ min_osx_target=$( sw_vers -productVersion | sed 's/\([0-9]*\)\.\([0-9]*\)\.[0-9]*/\1.\2/' )
break
fi
done
@@ -3534,78 +3738,159 @@ done
no_build=false
-while getopts ht:un name
+installation_prefix=/usr/local
+
+while getopts hnp:t:u name
do
case $name in
- u)
- do_uninstall=yes
+ h|\?)
+ echo "Usage: macos-setup.sh [ -n ] [ -p <installation prefix> ] [ -t <target> ] [ -u ]" 1>&1
+ exit 0
;;
n)
no_build=true
;;
+ p)
+ installation_prefix="$OPTARG"
+ ;;
t)
min_osx_target="$OPTARG"
;;
- h|?)
- echo "Usage: macos-setup.sh [ -t <target> ] [ -u ] [ -n ]" 1>&1
- exit 0
+ u)
+ do_uninstall=yes
;;
esac
done
#
+# Create our custom installation prefix if needed.
+#
+if [ "$installation_prefix" != "/usr/local" ] ; then
+ export PATH="$installation_prefix/bin:$PATH"
+ if [ ! -d "$installation_prefix" ] ; then
+ echo "Creating $installation_prefix"
+ $DO_MKDIR "$installation_prefix"
+ fi
+fi
+
+#
+# Do we have permission to write in $installation_prefix?
+#
+# If so, assume we have permission to write in its subdirectories.
+# (If that's not the case, this test needs to check the subdirectories
+# as well.)
+#
+# If not, do "make install", "make uninstall", "ninja install",
+# "ninja uninstall", the removes for dependencies that don't support
+# "make uninstall" or "ninja uninstall", the renames of [g]libtool*,
+# and the writing of a libffi .pc file with sudo.
+#
+if [ -w "$installation_prefix" ]
+then
+ DO_MAKE="make"
+ DO_MAKE_INSTALL="make install"
+ DO_MAKE_UNINSTALL="make uninstall"
+ DO_NINJA_INSTALL="ninja -C _build install"
+ DO_NINJA_UNINSTALL="ninja -C _build uninstall"
+ DO_TEE_TO_PC_FILE="tee"
+ DO_RM="rm"
+ DO_MV="mv"
+else
+ DO_MAKE="sudo make"
+ DO_MAKE_INSTALL="sudo make install"
+ DO_MAKE_UNINSTALL="sudo make uninstall"
+ DO_NINJA_INSTALL="sudo ninja -C _build install"
+ DO_NINJA_UNINSTALL="sudo ninja -C _build uninstall"
+ DO_TEE_TO_PC_FILE="sudo tee"
+ DO_RM="sudo rm"
+ DO_MV="sudo mv"
+fi
+
+#
+# When building with CMake, don't build libraries with an install path
+# that begins with @rpath because that will cause binaries linked with it
+# to use that path as the library to look for, and that will cause the
+# run-time linker, at least on macOS 14 and later, not to find the library
+# in $installation_prefix/lib unless you explicitly set DYLD_LIBRARY_PATH to include
+# $installation_prefix/lib. That means that you get "didn't find libpcre" errors if
+# you try to run binaries from a build unless you set DYLD_LIBRARYPATH to
+# include $installation_prefix/lib.
+#
+# However, setting CMAKE_MACOSX_RPATH to OFF causes the installed
+# library just to have the file name of the library as its install
+# name. It needs to be the full installed path of the library in
+# order to make running binaries from the build directory work, so
+# we set CMAKE_INSTALL_NAME_DIR to $installation_prefix/lib.
+#
+# packaging/macosx/osx-app.sh will convert *all* libraries in
+# the app bundle to have an @rpath install name, so this won't
+# break anything there; it just fixes the ability to run from the
+# build directory.
+#
+DO_CMAKE=( cmake
+ -DCMAKE_OSX_DEPLOYMENT_TARGET="$min_osx_target"
+ -DSDKROOT="$SDKPATH"
+ -DCMAKE_MACOSX_RPATH=OFF
+ -DCMAKE_INSTALL_PREFIX="$installation_prefix"
+ -DCMAKE_INSTALL_NAME_DIR="$installation_prefix/lib"
+ )
+
+#
# Get the version numbers of installed packages, if any.
#
if [ -d "${MACOSX_SUPPORT_LIBS}" ]
then
cd "${MACOSX_SUPPORT_LIBS}"
- installed_xz_version=`ls xz-*-done 2>/dev/null | sed 's/xz-\(.*\)-done/\1/'`
- installed_lzip_version=`ls lzip-*-done 2>/dev/null | sed 's/lzip-\(.*\)-done/\1/'`
- installed_pcre_version=`ls pcre-*-done 2>/dev/null | sed 's/pcre-\(.*\)-done/\1/'`
- installed_pcre2_version=$(ls pcre2-*-done 2>/dev/null | sed 's/pcre2-\(.*\)-done/\1/')
- installed_autoconf_version=`ls autoconf-*-done 2>/dev/null | sed 's/autoconf-\(.*\)-done/\1/'`
- installed_automake_version=`ls automake-*-done 2>/dev/null | sed 's/automake-\(.*\)-done/\1/'`
- installed_libtool_version=`ls libtool-*-done 2>/dev/null | sed 's/libtool-\(.*\)-done/\1/'`
- installed_cmake_version=`ls cmake-*-done 2>/dev/null | sed 's/cmake-\(.*\)-done/\1/'`
- installed_ninja_version=`ls ninja-*-done 2>/dev/null | sed 's/ninja-\(.*\)-done/\1/'`
- installed_asciidoctor_version=`ls asciidoctor-*-done 2>/dev/null | sed 's/asciidoctor-\(.*\)-done/\1/'`
- installed_asciidoctorpdf_version=`ls asciidoctorpdf-*-done 2>/dev/null | sed 's/asciidoctorpdf-\(.*\)-done/\1/'`
- installed_gettext_version=`ls gettext-*-done 2>/dev/null | sed 's/gettext-\(.*\)-done/\1/'`
- installed_pkg_config_version=`ls pkg-config-*-done 2>/dev/null | sed 's/pkg-config-\(.*\)-done/\1/'`
- installed_glib_version=`ls glib-*-done 2>/dev/null | sed 's/glib-\(.*\)-done/\1/'`
- installed_qt_version=`ls qt-*-done 2>/dev/null | sed 's/qt-\(.*\)-done/\1/'`
- installed_libsmi_version=`ls libsmi-*-done 2>/dev/null | sed 's/libsmi-\(.*\)-done/\1/'`
- installed_libgpg_error_version=`ls libgpg-error-*-done 2>/dev/null | sed 's/libgpg-error-\(.*\)-done/\1/'`
- installed_libgcrypt_version=`ls libgcrypt-*-done 2>/dev/null | sed 's/libgcrypt-\(.*\)-done/\1/'`
- installed_gmp_version=`ls gmp-*-done 2>/dev/null | sed 's/gmp-\(.*\)-done/\1/'`
- installed_libtasn1_version=`ls libtasn1-*-done 2>/dev/null | sed 's/libtasn1-\(.*\)-done/\1/'`
- installed_p11_kit_version=`ls p11-kit-*-done 2>/dev/null | sed 's/p11-kit-\(.*\)-done/\1/'`
- installed_nettle_version=`ls nettle-*-done 2>/dev/null | sed 's/nettle-\(.*\)-done/\1/'`
- installed_gnutls_version=`ls gnutls-*-done 2>/dev/null | sed 's/gnutls-\(.*\)-done/\1/'`
- installed_lua_version=`ls lua-*-done 2>/dev/null | sed 's/lua-\(.*\)-done/\1/'`
- installed_snappy_version=`ls snappy-*-done 2>/dev/null | sed 's/snappy-\(.*\)-done/\1/'`
- installed_zstd_version=`ls zstd-*-done 2>/dev/null | sed 's/zstd-\(.*\)-done/\1/'`
- installed_libxml2_version=`ls libxml2-*-done 2>/dev/null | sed 's/libxml2-\(.*\)-done/\1/'`
- installed_lz4_version=`ls lz4-*-done 2>/dev/null | sed 's/lz4-\(.*\)-done/\1/'`
- installed_sbc_version=`ls sbc-*-done 2>/dev/null | sed 's/sbc-\(.*\)-done/\1/'`
- installed_maxminddb_version=`ls maxminddb-*-done 2>/dev/null | sed 's/maxminddb-\(.*\)-done/\1/'`
- installed_cares_version=`ls c-ares-*-done 2>/dev/null | sed 's/c-ares-\(.*\)-done/\1/'`
- installed_libssh_version=`ls libssh-*-done 2>/dev/null | sed 's/libssh-\(.*\)-done/\1/'`
- installed_nghttp2_version=`ls nghttp2-*-done 2>/dev/null | sed 's/nghttp2-\(.*\)-done/\1/'`
- installed_nghttp3_version=`ls nghttp3-*-done 2>/dev/null | sed 's/nghttp3-\(.*\)-done/\1/'`
- installed_libtiff_version=`ls tiff-*-done 2>/dev/null | sed 's/tiff-\(.*\)-done/\1/'`
- installed_spandsp_version=`ls spandsp-*-done 2>/dev/null | sed 's/spandsp-\(.*\)-done/\1/'`
- installed_speexdsp_version=`ls speexdsp-*-done 2>/dev/null | sed 's/speexdsp-\(.*\)-done/\1/'`
- installed_bcg729_version=`ls bcg729-*-done 2>/dev/null | sed 's/bcg729-\(.*\)-done/\1/'`
- installed_ilbc_version=`ls ilbc-*-done 2>/dev/null | sed 's/ilbc-\(.*\)-done/\1/'`
- installed_opus_version=`ls opus-*-done 2>/dev/null | sed 's/opus-\(.*\)-done/\1/'`
- installed_python3_version=`ls python3-*-done 2>/dev/null | sed 's/python3-\(.*\)-done/\1/'`
- installed_brotli_version=`ls brotli-*-done 2>/dev/null | sed 's/brotli-\(.*\)-done/\1/'`
- installed_minizip_version=`ls minizip-*-done 2>/dev/null | sed 's/minizip-\(.*\)-done/\1/'`
- installed_sparkle_version=`ls sparkle-*-done 2>/dev/null | sed 's/sparkle-\(.*\)-done/\1/'`
-
- cd $topdir
+ installed_xz_version=$( ls xz-*-done 2>/dev/null | sed 's/xz-\(.*\)-done/\1/' )
+ installed_lzip_version=$( ls lzip-*-done 2>/dev/null | sed 's/lzip-\(.*\)-done/\1/' )
+ installed_pcre_version=$( ls pcre-*-done 2>/dev/null | sed 's/pcre-\(.*\)-done/\1/' )
+ installed_pcre2_version=$( ls pcre2-*-done 2>/dev/null | sed 's/pcre2-\(.*\)-done/\1/' )
+ installed_autoconf_version=$( ls autoconf-*-done 2>/dev/null | sed 's/autoconf-\(.*\)-done/\1/' )
+ installed_automake_version=$( ls automake-*-done 2>/dev/null | sed 's/automake-\(.*\)-done/\1/' )
+ installed_libtool_version=$( ls libtool-*-done 2>/dev/null | sed 's/libtool-\(.*\)-done/\1/' )
+ installed_cmake_version=$( ls cmake-*-done 2>/dev/null | sed 's/cmake-\(.*\)-done/\1/' )
+ installed_ninja_version=$( ls ninja-*-done 2>/dev/null | sed 's/ninja-\(.*\)-done/\1/' )
+ installed_asciidoctor_version=$( ls asciidoctor-*-done 2>/dev/null | sed 's/asciidoctor-\(.*\)-done/\1/' )
+ installed_asciidoctorpdf_version=$( ls asciidoctorpdf-*-done 2>/dev/null | sed 's/asciidoctorpdf-\(.*\)-done/\1/' )
+ installed_gettext_version=$( ls gettext-*-done 2>/dev/null | sed 's/gettext-\(.*\)-done/\1/' )
+ installed_pkg_config_version=$( ls pkg-config-*-done 2>/dev/null | sed 's/pkg-config-\(.*\)-done/\1/' )
+ installed_glib_version=$( ls glib-*-done 2>/dev/null | sed 's/glib-\(.*\)-done/\1/' )
+ installed_qt_version=$( ls qt-*-done 2>/dev/null | sed 's/qt-\(.*\)-done/\1/' )
+ installed_libsmi_version=$( ls libsmi-*-done 2>/dev/null | sed 's/libsmi-\(.*\)-done/\1/' )
+ installed_libgpg_error_version=$( ls libgpg-error-*-done 2>/dev/null | sed 's/libgpg-error-\(.*\)-done/\1/' )
+ installed_libgcrypt_version=$( ls libgcrypt-*-done 2>/dev/null | sed 's/libgcrypt-\(.*\)-done/\1/' )
+ installed_gmp_version=$( ls gmp-*-done 2>/dev/null | sed 's/gmp-\(.*\)-done/\1/' )
+ installed_libtasn1_version=$( ls libtasn1-*-done 2>/dev/null | sed 's/libtasn1-\(.*\)-done/\1/' )
+ installed_p11_kit_version=$( ls p11-kit-*-done 2>/dev/null | sed 's/p11-kit-\(.*\)-done/\1/' )
+ installed_nettle_version=$( ls nettle-*-done 2>/dev/null | sed 's/nettle-\(.*\)-done/\1/' )
+ installed_gnutls_version=$( ls gnutls-*-done 2>/dev/null | sed 's/gnutls-\(.*\)-done/\1/' )
+ installed_lua_version=$( ls lua-*-done 2>/dev/null | sed 's/lua-\(.*\)-done/\1/' )
+ installed_snappy_version=$( ls snappy-*-done 2>/dev/null | sed 's/snappy-\(.*\)-done/\1/' )
+ installed_zstd_version=$( ls zstd-*-done 2>/dev/null | sed 's/zstd-\(.*\)-done/\1/' )
+ installed_zlibng_version=$( ls zlibng-*-done 2>/dev/null | sed 's/zlibng-\(.*\)-done/\1/' )
+ installed_libxml2_version=$( ls libxml2-*-done 2>/dev/null | sed 's/libxml2-\(.*\)-done/\1/' )
+ installed_lz4_version=$( ls lz4-*-done 2>/dev/null | sed 's/lz4-\(.*\)-done/\1/' )
+ installed_sbc_version=$( ls sbc-*-done 2>/dev/null | sed 's/sbc-\(.*\)-done/\1/' )
+ installed_maxminddb_version=$( ls maxminddb-*-done 2>/dev/null | sed 's/maxminddb-\(.*\)-done/\1/' )
+ installed_cares_version=$( ls c-ares-*-done 2>/dev/null | sed 's/c-ares-\(.*\)-done/\1/' )
+ installed_libssh_version=$( ls libssh-*-done 2>/dev/null | sed 's/libssh-\(.*\)-done/\1/' )
+ installed_nghttp2_version=$( ls nghttp2-*-done 2>/dev/null | sed 's/nghttp2-\(.*\)-done/\1/' )
+ installed_nghttp3_version=$( ls nghttp3-*-done 2>/dev/null | sed 's/nghttp3-\(.*\)-done/\1/' )
+ installed_libtiff_version=$( ls tiff-*-done 2>/dev/null | sed 's/tiff-\(.*\)-done/\1/' )
+ installed_spandsp_version=$( ls spandsp-*-done 2>/dev/null | sed 's/spandsp-\(.*\)-done/\1/' )
+ installed_speexdsp_version=$( ls speexdsp-*-done 2>/dev/null | sed 's/speexdsp-\(.*\)-done/\1/' )
+ installed_bcg729_version=$( ls bcg729-*-done 2>/dev/null | sed 's/bcg729-\(.*\)-done/\1/' )
+ installed_ilbc_version=$( ls ilbc-*-done 2>/dev/null | sed 's/ilbc-\(.*\)-done/\1/' )
+ installed_opencore_amr_version=$( ls opencore-amr-*-done 2>/dev/null | sed 's/opencore-amr-\(.*\)-done/\1/' )
+ installed_opus_version=$( ls opus-*-done 2>/dev/null | sed 's/opus-\(.*\)-done/\1/' )
+ installed_python3_version=$( ls python3-*-done 2>/dev/null | sed 's/python3-\(.*\)-done/\1/' )
+ installed_brotli_version=$( ls brotli-*-done 2>/dev/null | sed 's/brotli-\(.*\)-done/\1/' )
+ installed_minizip_version=$( ls minizip-*-done 2>/dev/null | sed 's/minizip-\(.*\)-done/\1/' )
+ installed_minizip_ng_version=$( ls minizip-ng-*-done 2>/dev/null | sed 's/minizip-ng-\(.*\)-done/\1/' )
+ installed_sparkle_version=$( ls sparkle-*-done 2>/dev/null | sed 's/sparkle-\(.*\)-done/\1/' )
+
+ cd "$topdir"
fi
if [ "$do_uninstall" = "yes" ]
@@ -3621,15 +3906,22 @@ fi
# However, we *are* setting them in the environment, for our own
# nefarious purposes, so start them out as "-g -O2".
#
-CFLAGS="-g -O2"
-CXXFLAGS="-g -O2"
+export CFLAGS="-g -O2 -I$installation_prefix/include"
+export CXXFLAGS="-g -O2 -I$installation_prefix/include"
+export LDFLAGS="-L$installation_prefix/lib"
+export PKG_CONFIG_PATH="$installation_prefix/lib/pkgconfig"
+CONFIGURE_OPTS=( --prefix="$installation_prefix" )
# if no make options are present, set default options
+# Should we just set MAKEFLAGS instead?
if [ -z "$MAKE_BUILD_OPTS" ] ; then
# by default use 1.5x number of cores for parallel build
- MAKE_BUILD_OPTS="-j $(( $(sysctl -n hw.logicalcpu) * 3 / 2))"
+ MAKE_BUILD_OPTS=( -j $(( $(sysctl -n hw.logicalcpu) * 3 / 2)) )
fi
+CURL_REMOTE_NAME_OPTS=(--fail-with-body --location --remote-name)
+CURL_LOCAL_NAME_OPTS=(--fail-with-body --location --output)
+
#
# If we have a target release, look for the oldest SDK that's for an
# OS equal to or later than that one, and build libraries against it
@@ -3639,7 +3931,7 @@ fi
# versions of the same release, or earlier releases if the minimum is
# earlier.
#
-if [ ! -z "$min_osx_target" ]
+if [ -n "$min_osx_target" ]
then
#
# Get the major and minor version of the target release.
@@ -3651,8 +3943,8 @@ then
#
# major.minor.
#
- min_osx_target_major=`echo "$min_osx_target" | sed -n 's/\([1-9][0-9]*\)\..*/\1/p'`
- min_osx_target_minor=`echo "$min_osx_target" | sed -n 's/[1-9][0-9]*\.\(.*\)/\1/p'`
+ min_osx_target_major=$( echo "$min_osx_target" | sed -n 's/\([1-9][0-9]*\)\..*/\1/p' )
+ min_osx_target_minor=$( echo "$min_osx_target" | sed -n 's/[1-9][0-9]*\.\(.*\)/\1/p' )
;;
[1-9][0-9])
@@ -3694,15 +3986,15 @@ then
# Get a list of all the SDKs in that directory, if any.
# We assume it'll be a while before there's a macOS 100. :-)
#
- sdklist=`(cd "$sdksdir"; ls -d MacOSX[1-9][0-9].[0-9]*.sdk 2>/dev/null)`
+ sdklist=$( (cd "$sdksdir"; ls -d MacOSX[1-9][0-9].[0-9]*.sdk 2>/dev/null) )
for sdk in $sdklist
do
#
# Get the major and minor version for this SDK.
#
- sdk_major=`echo "$sdk" | sed -n 's/MacOSX\([1-9][0-9]*\)\..*\.sdk/\1/p'`
- sdk_minor=`echo "$sdk" | sed -n 's/MacOSX[1-9][0-9]*\.\(.*\)\.sdk/\1/p'`
+ sdk_major=$( echo "$sdk" | sed -n 's/MacOSX\([1-9][0-9]*\)\..*\.sdk/\1/p' )
+ sdk_minor=$( echo "$sdk" | sed -n 's/MacOSX[1-9][0-9]*\.\(.*\)\.sdk/\1/p' )
#
# Is it for the deployment target or some later release?
@@ -3732,30 +4024,31 @@ then
echo "Using the $sdk_major.$sdk_minor SDK"
#
- # Make sure there are links to /usr/local/include and /usr/local/lib
+ # Make sure there are links to $installation_prefix/include and $installation_prefix/lib
# in the SDK's usr/local.
#
- if [ ! -e $SDKPATH/usr/local/include ]
- then
- if [ ! -d $SDKPATH/usr/local ]
- then
- sudo mkdir $SDKPATH/usr/local
- fi
- sudo ln -s /usr/local/include $SDKPATH/usr/local/include
- fi
- if [ ! -e $SDKPATH/usr/local/lib ]
- then
- if [ ! -d $SDKPATH/usr/local ]
- then
- sudo mkdir $SDKPATH/usr/local
- fi
- sudo ln -s /usr/local/lib $SDKPATH/usr/local/lib
- fi
+ # XXX - Is this needed any more?
+# if [ ! -e $SDKPATH$installation_prefix/include ]
+# then
+# if [ ! -d $SDKPATH$installation_prefix ]
+# then
+# sudo mkdir $SDKPATH$installation_prefix
+# fi
+# sudo ln -s $installation_prefix/include $SDKPATH$installation_prefix/include
+# fi
+# if [ ! -e $SDKPATH$installation_prefix/lib ]
+# then
+# if [ ! -d $SDKPATH$installation_prefix ]
+# then
+# sudo mkdir $SDKPATH$installation_prefix
+# fi
+# sudo ln -s $installation_prefix/lib $SDKPATH$installation_prefix/lib
+# fi
#
# Set the minimum OS version for which to build to the specified
# minimum target OS version, so we don't, for example, end up using
- # linker features supported by the OS verson on which we're building
+ # linker features supported by the OS version on which we're building
# but not by the target version.
#
VERSION_MIN_FLAGS="-mmacosx-version-min=$min_osx_target"
@@ -3767,9 +4060,6 @@ then
fi
-export CFLAGS
-export CXXFLAGS
-
#
# You need Xcode or the command-line tools installed to get the compilers (xcrun checks both).
#
@@ -3801,15 +4091,13 @@ if [ "$QT_VERSION" ]; then
fi
fi
-export PKG_CONFIG_PATH=/usr/local/lib/pkgconfig
-
#
# Do all the downloads and untarring in a subdirectory, so all that
# stuff can be removed once we've installed the support libraries.
if [ ! -d "${MACOSX_SUPPORT_LIBS}" ]
then
- mkdir "${MACOSX_SUPPORT_LIBS}" || exit 1
+ mkdir "${MACOSX_SUPPORT_LIBS}"
fi
cd "${MACOSX_SUPPORT_LIBS}"
@@ -3820,11 +4108,9 @@ echo ""
#
# Indicate what paths to use for pkg-config and cmake.
#
-pkg_config_path=/usr/local/lib/pkgconfig
if [ "$QT_VERSION" ]; then
qt_base_path=$HOME/Qt$QT_VERSION/$QT_VERSION/clang_64
- pkg_config_path="$pkg_config_path":"$qt_base_path/lib/pkgconfig"
- CMAKE_PREFIX_PATH="$CMAKE_PREFIX_PATH":"$qt_base_path/lib/cmake"
+ # CMAKE_PREFIX_PATH="$PACKAGE_CONFIG_PATH:$qt_base_path/lib/cmake"
fi
if $no_build; then
@@ -3833,7 +4119,7 @@ if $no_build; then
fi
if [ "$QT_VERSION" ]; then
- if [ -f qt-$QT_VERSION-done ]; then
+ if [ -f "qt-$QT_VERSION-done" ]; then
echo "You are now prepared to build Wireshark."
else
echo "Qt was not installed; you will have to install it in order to build the"
@@ -3851,18 +4137,16 @@ fi
echo
echo "To build:"
echo
-echo "export PKG_CONFIG_PATH=$pkg_config_path"
-echo "export CMAKE_PREFIX_PATH=$CMAKE_PREFIX_PATH"
echo "export PATH=$PATH:$qt_base_path/bin"
echo
echo "mkdir build; cd build"
-if [ ! -z "$NINJA_VERSION" ]; then
+if [ -n "$NINJA_VERSION" ]; then
echo "cmake -G Ninja .."
echo "ninja wireshark_app_bundle logray_app_bundle # (Modify as needed)"
echo "ninja install/strip"
else
echo "cmake .."
- echo "make $MAKE_BUILD_OPTS wireshark_app_bundle logray_app_bundle # (Modify as needed)"
+ echo "make ${MAKE_BUILD_OPTS[*]} wireshark_app_bundle logray_app_bundle # (Modify as needed)"
echo "make install/strip"
fi
echo
diff --git a/tools/make-bluetooth.py b/tools/make-bluetooth.py
new file mode 100755
index 00000000..71942b91
--- /dev/null
+++ b/tools/make-bluetooth.py
@@ -0,0 +1,368 @@
+#!/usr/bin/env python3
+#
+# Wireshark - Network traffic analyzer
+# By Gerald Combs <gerald@wireshark.org>
+# Copyright 1998 Gerald Combs
+#
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+'''
+make-bluetooth - Generate value_strings containing bluetooth uuids and company identifiers.
+It makes use of the databases from
+The Bluetooth SIG Repository: https://bitbucket.org/bluetooth-SIG/public/src/main/assigned_numbers/
+and processes the YAML into human-readable strings to go into packet-bluetooth.c.
+'''
+
+import sys
+import urllib.request, urllib.error, urllib.parse
+import yaml
+
+base_url = "https://bitbucket.org/bluetooth-SIG/public/raw/HEAD/assigned_numbers/"
+
+MIN_UUIDS = 1400 # 1424 as of 31-12-2023
+MIN_COMPANY_IDS = 3400 # 3405 as of 31-12-2023
+
+##
+## UUIDs
+##
+
+'''
+List of all YAML files to retrieve, the lists of UUIDs to put into the value_string
+and other information.
+Unfortunately the encoding of the names among the YAML files is inconsistent,
+to say the least. This will need post-processing.
+Also the previous value_string contained additional uuids, which are not currently
+present in the databases. Prepare the lists with these uuids so they are not lost.
+When they do appear in the databases they must be removed here.
+'''
+
+uuids_sources = [
+{ # 0x0001
+ "yaml": "protocol_identifiers.yaml",
+ "description": "Protocol Identifiers",
+ "unCamelCase": True,
+ "unlist": [],
+ "list": [
+ { "uuid": 0x001D, "name": "UDI C-Plane" },
+ ]
+},
+{ # 0x1000
+ "yaml": "service_class.yaml",
+ "description": "Service Class",
+ "unCamelCase": True,
+ "unlist": [],
+ "list": [
+ # Then we have this weird one stuck in between "Service Class"
+ # from browse_group_identifiers.yaml
+ { "uuid": 0x1002, "name": "Public Browse Group" },
+ # And some from other sources
+ { "uuid": 0x1129, "name": "Video Conferencing GW" },
+ { "uuid": 0x112A, "name": "UDI MT" },
+ { "uuid": 0x112B, "name": "UDI TA" },
+ { "uuid": 0x112C, "name": "Audio/Video" },
+ ]
+},
+{ # 0x1600
+ "yaml": "mesh_profile_uuids.yaml",
+ "description": "Mesh Profile",
+ "unCamelCase": False,
+ "unlist": [],
+ "list": []
+},
+{ # 0x1800
+ "yaml": "service_uuids.yaml",
+ "description": "Service",
+ "unCamelCase": False,
+ "unlist": [],
+ "list": []
+},
+{ # 0x2700
+ "yaml": "units.yaml",
+ "description": "Units",
+ "unCamelCase": False,
+ "unlist": [],
+ "list": []
+},
+{ # 0x2800
+ "yaml": "declarations.yaml",
+ "description": "Declarations",
+ "unCamelCase": False,
+ "unlist": [],
+ "list": []
+},
+{ # 0x2900
+ "yaml": "descriptors.yaml",
+ "description": "Descriptors",
+ "unCamelCase": False,
+ "unlist": [],
+ "list": []
+},
+{ # 0x2a00
+ "yaml": "characteristic_uuids.yaml",
+ "description": "Characteristics",
+ "unCamelCase": False,
+ "unlist": [],
+ "list": [
+ # Then we have these weird ones stuck in between "Characteristics"
+ # from object_types.yaml
+ { "uuid": 0x2ACA, "name": "Unspecified" },
+ { "uuid": 0x2ACB, "name": "Directory Listing" },
+ # And some from other sources
+ { "uuid": 0x2A0B, "name": "Exact Time 100" },
+ { "uuid": 0x2A10, "name": "Secondary Time Zone" },
+ { "uuid": 0x2A15, "name": "Time Broadcast" },
+ { "uuid": 0x2A1A, "name": "Battery Power State" },
+ { "uuid": 0x2A1B, "name": "Battery Level State" },
+ { "uuid": 0x2A1F, "name": "Temperature Celsius" },
+ { "uuid": 0x2A20, "name": "Temperature Fahrenheit" },
+ { "uuid": 0x2A2F, "name": "Position 2D" },
+ { "uuid": 0x2A30, "name": "Position 3D" },
+ { "uuid": 0x2A3A, "name": "Removable" },
+ { "uuid": 0x2A3B, "name": "Service Required" },
+ { "uuid": 0x2A3C, "name": "Scientific Temperature Celsius" },
+ { "uuid": 0x2A3D, "name": "String" },
+ { "uuid": 0x2A3E, "name": "Network Availability" },
+ { "uuid": 0x2A56, "name": "Digital" },
+ { "uuid": 0x2A57, "name": "Digital Output" },
+ { "uuid": 0x2A58, "name": "Analog" },
+ { "uuid": 0x2A59, "name": "Analog Output" },
+ { "uuid": 0x2A62, "name": "Pulse Oximetry Control Point" },
+ # These have somehow disappeared. We keep them for if they were used.
+ { "uuid": 0x2BA9, "name": "Media Player Icon Object Type" },
+ { "uuid": 0x2BAA, "name": "Track Segments Object Type" },
+ { "uuid": 0x2BAB, "name": "Track Object Type" },
+ { "uuid": 0x2BAC, "name": "Group Object Type" },
+ ]
+},
+{ # 0xfxxx
+ "yaml": "member_uuids.yaml",
+ "description": "Members",
+ "unCamelCase": False,
+ "unlist": [],
+ "list": [
+ # This they really screwed up. The UUID was moved to sdo_uuids,
+ # thereby breaking the range and ordering completely.
+ { "uuid": 0xFCCC, "name": "Wi-Fi Easy Connect Specification" },
+ ]
+},
+{ # 0xffef (and 0xfccc)
+ "yaml": "sdo_uuids.yaml",
+ "description": "SDO",
+ "unCamelCase": False,
+ "unlist": [ 0xFCCC,
+ ],
+ "list": []
+}]
+
+'''
+Retrieve the YAML files defining the UUIDs and add them to the lists
+'''
+for uuids in uuids_sources:
+ req_headers = { 'User-Agent': 'Wireshark make-bluetooth' }
+ try:
+ req = urllib.request.Request(base_url + 'uuids/' + uuids["yaml"], headers=req_headers)
+ response = urllib.request.urlopen(req)
+ lines = response.read().decode('UTF-8', 'replace')
+ except Exception as e:
+ print("Failed to get UUIDs at {url}, because of: {e}".format(url=base_url + 'uuids/' + uuids["yaml"], e=e), file=sys.stderr)
+ sys.exit(1)
+
+ uuids_dir = yaml.safe_load(lines)
+ for uuid in uuids_dir["uuids"]:
+ if uuid["uuid"] not in uuids["unlist"]:
+ uuids["list"].append(uuid)
+
+'''
+Go through the lists and perform general and specific transforms.
+Several exceptional cases are addressed directly by their UUID, because of the inconsistent nature
+by which their name is constructed.
+When they appear more sensibly in the databases they must be removed here.
+When new inconsistent entries appear in the databases their transforms can be added here,
+but also add their UUID below.
+'''
+for uuids in uuids_sources:
+ for uuid in uuids["list"]:
+ # Handle a few exceptional cases
+ if uuid["uuid"] == 0x001E:
+ uuid["name"] = "MCAP Control Channel"
+ elif uuid["uuid"] == 0x001F:
+ uuid["name"] = "MCAP Data Channel"
+ elif uuid["uuid"] == 0x1102:
+ uuid["name"] = "LAN Access Using PPP"
+ elif uuid["uuid"] == 0x1104:
+ uuid["name"] = "IrMC Sync"
+ elif uuid["uuid"] == 0x1105:
+ uuid["name"] = "OBEX Object Push"
+ elif uuid["uuid"] == 0x1106:
+ uuid["name"] = "OBEX File Transfer"
+ elif uuid["uuid"] == 0x1107:
+ uuid["name"] = "IrMC Sync Command"
+ elif uuid["uuid"] == 0x1200:
+ uuid["name"] = "PnP Information"
+ elif uuid["uuid"] == 0x2B8C:
+ uuid["name"] = "CO\u2082 Concentration"
+ else:
+ # And these in general
+ uuid["name"] = uuid["name"].replace("_", " ")
+ uuid["name"] = uuid["name"].replace('"', '\\"')
+
+'''
+Go through the lists and, for those lists flagged as such, perform the unCamelCase transform
+on all the names in that list.
+Several exceptional cases were addressed directly by their UUID and must be excluded from this
+transform.
+When additional characters indicating a break in words appear in database entries they can be
+added to break_chars.
+'''
+for uuids in uuids_sources:
+ if uuids["unCamelCase"]:
+ for uuid in uuids["list"]:
+ # if not a few exceptional cases (see above)
+ if uuid["uuid"] not in [0x001E, 0x001F, 0x1102, 0x1104, 0x1105, 0x1106, 0x1107, 0x1200, 0x2B8C]:
+ # Parse through the names and look for capital letters; when
+ # not preceded by another capital letter or one of break_chars, insert a space
+ break_chars = [" ", "-", "+", "/", "(", ".", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]
+ was_break = True # fake space at beginning of string
+ was_upper = False
+ name = ""
+ for character in uuid["name"]:
+ is_upper = True if character.isupper() else False
+ if is_upper and not was_break and not was_upper:
+ name += " "
+ name += character
+ was_break = True if character in break_chars else False
+ was_upper = is_upper
+ uuid["name"] = name
+
+'''
+To be able to generate a value_string_ext array the entries need to be sorted.
+'''
+for uuids in uuids_sources:
+ uuids_sorted = sorted(uuids["list"], key=lambda uuid: uuid["uuid"])
+ uuids["list"] = uuids_sorted
+
+'''
+Do a check on duplicate entries.
+While at it, do a count of the number of UUIDs retrieved.
+'''
+prev_uuid = 0
+uuid_count = 0
+for uuids in uuids_sources:
+ for uuid in uuids["list"]:
+ if uuid["uuid"] > prev_uuid:
+ prev_uuid = uuid["uuid"]
+ else:
+ print("Duplicate UUID detected: 0x{uuid:04X}".format(uuid=uuid["uuid"]), file=sys.stderr)
+ sys.exit(1)
+ uuid_count += len(uuids["list"])
+
+'''
+Sanity check to see if enough entries were retrieved
+'''
+if (uuid_count < MIN_UUIDS):
+ print("There are fewer UUIDs than expected: got {count} but was expecting {minimum}".format(count=uuid_count, minimum=MIN_UUIDS), file=sys.stderr)
+ sys.exit(1)
+
+'''
+Finally output the annotated source code for the value_string
+'''
+print("const value_string bluetooth_uuid_vals[] = {")
+
+for uuids in uuids_sources:
+ print(" /* {description} - {base_url}uuids/{yaml} */".format(description=uuids["description"], base_url=base_url, yaml=uuids["yaml"]))
+ for uuid in uuids["list"]:
+ print(" {{ 0x{uuid:04X}, \"{name}\" }},".format(uuid=uuid["uuid"], name=uuid["name"]))
+
+print(" { 0, NULL }")
+print("};")
+print("value_string_ext bluetooth_uuid_vals_ext = VALUE_STRING_EXT_INIT(bluetooth_uuid_vals);")
+print("")
+
+##
+## Company Identifiers
+##
+
+'''
+List of the YAML files to retrieve and the lists of values to put into the value_string.
+Also the previous value_string contained additional company IDs, which are not currently
+present in the databases. Prepare the lists with these company IDs so they are not lost.
+When they do appear in the databases they must be removed here.
+'''
+
+company_ids_sources = [
+{
+ "yaml": "company_identifiers.yaml",
+ "list": [
+ # Some from other sources
+ { "value": 0x0418, "name": "Alpine Electronics Inc." },
+ { "value": 0x0943, "name": "Inovonics Corp." },
+ { "value": 0xFFFF, "name": "For use in internal and interoperability tests" },
+ ]
+}]
+
+'''
+Retrieve the YAML files defining the company IDs and add them to the lists
+'''
+for company_ids in company_ids_sources:
+ req_headers = { 'User-Agent': 'Wireshark make-bluetooth' }
+ try:
+ req = urllib.request.Request(base_url + 'company_identifiers/' + company_ids["yaml"], headers=req_headers)
+ response = urllib.request.urlopen(req)
+ lines = response.read().decode('UTF-8', 'replace')
+ except Exception as e:
+ print("Failed to get company IDs at {url}, because of: {e}".format(url=base_url + 'company_identifiers/' + company_ids["yaml"], e=e), file=sys.stderr)
+ sys.exit(-1)
+
+ company_ids_dir = yaml.safe_load(lines)
+ company_ids["list"].extend(company_ids_dir["company_identifiers"])
+
+'''
+Go through the lists and perform general transforms.
+'''
+for company_ids in company_ids_sources:
+ for company_id in company_ids["list"]:
+ company_id["name"] = company_id["name"].replace('"', '\\"')
+
+'''
+To be able to generate a value_string_ext array the entries need to be sorted.
+'''
+for company_ids in company_ids_sources:
+ company_ids_sorted = sorted(company_ids["list"], key=lambda company_id: company_id['value'])
+ company_ids["list"] = company_ids_sorted
+
+'''
+Do a check on duplicate entries.
+While at it, do a count of the number of company IDs retrieved.
+'''
+prev_company_id = -1
+company_id_count = 0
+for company_ids in company_ids_sources:
+ for company_id in company_ids["list"]:
+ if company_id["value"] > prev_company_id:
+ prev_company_id = company_id["value"]
+ else:
+ print("Duplicate company ID detected: 0x{company_id:04X}".format(company_id=company_id["value"]), file=sys.stderr)
+ sys.exit(1)
+ company_id_count += len(company_ids["list"])
+
+'''
+Sanity check to see if enough entries were retrieved
+'''
+if company_id_count < MIN_COMPANY_IDS:
+ print("There are fewer company IDs than expected: got {count} but was expecting {minimum}".format(count=company_id_count, minimum=MIN_COMPANY_IDS), file=sys.stderr)
+ sys.exit(1)
+
+'''
+Finally output the source code for the value_string
+'''
+print("/* Taken from {base_url}company_identifiers/{yaml} */".format(base_url=base_url, yaml=company_ids_sources[0]["yaml"]))
+print("static const value_string bluetooth_company_id_vals[] = {")
+
+for company_ids in company_ids_sources:
+ for company_id in company_ids["list"]:
+ print(" {{ 0x{company_id:04X}, \"{name}\" }},".format(company_id=company_id["value"], name=company_id["name"]))
+
+print(" { 0, NULL }")
+print("};")
+print("value_string_ext bluetooth_company_id_vals_ext = VALUE_STRING_EXT_INIT(bluetooth_company_id_vals);")
+
diff --git a/tools/make-enterprises.py b/tools/make-enterprises.py
index 1b2b2d0d..1491e548 100755
--- a/tools/make-enterprises.py
+++ b/tools/make-enterprises.py
@@ -31,19 +31,19 @@ FORMERLY_PATTERN = r" \(((formerly|previously) .*)\)"
LOOKUP_FUNCTION = r"""
const char* global_enterprises_lookup(uint32_t value)
{
- if (value > table.max_idx) {
+ if (value >= array_length(table)) {
return NULL;
}
- else return table.values[value];
+ return table[value];
}
"""
DUMP_FUNCTION = r"""
void global_enterprises_dump(FILE *fp)
{
- for (size_t idx = 0; idx <= table.max_idx; idx++) {
- if (table.values[idx] != NULL) {
- fprintf(fp, "%zu\t%s\n", idx, table.values[idx]);
+ for (size_t idx = 0; idx < array_length(table); idx++) {
+ if (table[idx] != NULL) {
+ fprintf(fp, "%zu\t%s\n", idx, table[idx]);
}
}
}
@@ -100,30 +100,23 @@ class CFile:
# Include header files
self.f.write('#include "config.h"\n\n')
self.f.write('#include <stddef.h>\n')
+ self.f.write('#include <wsutil/array.h>\n')
self.f.write('#include "enterprises.h"\n')
self.f.write('\n\n')
def __del__(self):
- self.f.write('typedef struct\n')
- self.f.write('{\n')
- self.f.write(' uint32_t max_idx;\n')
- self.f.write(' const char* values[' + str(self.highest_num+1) + '];\n')
- self.f.write('} global_enterprises_table_t;\n\n')
-
# Write static table
- self.f.write('static global_enterprises_table_t table =\n')
+ self.f.write('static const char * const table[] =\n')
self.f.write('{\n')
# Largest index
- self.f.write(' ' + str(self.highest_num) + ',\n')
- self.f.write(' {\n')
# Entries (read from dict)
for n in range(0, self.highest_num+1):
if n not in self.mappings:
# There are some gaps, write a NULL entry so can lookup by index
- line = ' NULL'
+ line = ' NULL'
else:
- line = ' "' + self.mappings[n] + '"'
- # Add coma.
+ line = ' "' + self.mappings[n] + '"'
+ # Add comma.
if n < self.highest_num:
line += ','
# Add number as aligned comment.
@@ -132,8 +125,6 @@ class CFile:
self.f.write(line+'\n')
# End of array
- self.f.write(' }\n')
- # End of struct
self.f.write('};\n')
print('Re-generated', self.filename)
diff --git a/tools/make-enums.py b/tools/make-enums.py
index b6a2835e..74274313 100755
--- a/tools/make-enums.py
+++ b/tools/make-enums.py
@@ -51,7 +51,7 @@ def parse_files(infiles, outfile):
source += """
#define ENUM(arg) { #arg, arg }
-static ws_enum_t all_enums[] = {
+static ws_enum_t const all_enums[] = {
"""
definitions = parser.defs['values']
diff --git a/tools/make-iana-ip.py b/tools/make-iana-ip.py
new file mode 100755
index 00000000..f6e0a86c
--- /dev/null
+++ b/tools/make-iana-ip.py
@@ -0,0 +1,209 @@
+#!/usr/bin/env python3
+#
+# Wireshark - Network traffic analyzer
+# By Gerald Combs <gerald@wireshark.org>
+# Copyright 1998 Gerald Combs
+#
+# SPDX-License-Identifier: GPL-2.0-or-later
+'''Update the IANA IP registry file.
+
+Make-iana-ip creates a file containing information about IPv4/IPv6 allocation blocks.
+'''
+
+import csv
+import io
+import ipaddress
+import os
+import re
+import sys
+import urllib.request, urllib.error, urllib.parse
+
+def exit_msg(msg=None, status=1):
+ if msg is not None:
+ sys.stderr.write(msg + '\n\n')
+ sys.stderr.write(__doc__ + '\n')
+ sys.exit(status)
+
+def open_url(url):
+ '''Open a URL.
+ Returns a tuple containing the body and response dict. The body is a
+ str in Python 3 and bytes in Python 2 in order to be compatible with
+ csv.reader.
+ '''
+
+ if len(sys.argv) > 1:
+ url_path = os.path.join(sys.argv[1], url[1])
+ url_fd = open(url_path)
+ body = url_fd.read()
+ url_fd.close()
+ else:
+ url_path = '/'.join(url)
+
+ req_headers = { 'User-Agent': 'Wireshark iana-ip' }
+ try:
+ req = urllib.request.Request(url_path, headers=req_headers)
+ response = urllib.request.urlopen(req)
+ body = response.read().decode('UTF-8', 'replace')
+ except Exception:
+ exit_msg('Error opening ' + url_path)
+
+ return body
+
+class IPv4SpecialBlock(ipaddress.IPv4Network):
+ @staticmethod
+ def ip_get_subnet_mask(bits):
+ masks = (
+ 0x00000000,
+ 0x80000000, 0xc0000000, 0xe0000000, 0xf0000000,
+ 0xf8000000, 0xfc000000, 0xfe000000, 0xff000000,
+ 0xff800000, 0xffc00000, 0xffe00000, 0xfff00000,
+ 0xfff80000, 0xfffc0000, 0xfffe0000, 0xffff0000,
+ 0xffff8000, 0xffffc000, 0xffffe000, 0xfffff000,
+ 0xfffff800, 0xfffffc00, 0xfffffe00, 0xffffff00,
+ 0xffffff80, 0xffffffc0, 0xffffffe0, 0xfffffff0,
+ 0xfffffff8, 0xfffffffc, 0xfffffffe, 0xffffffff)
+ if bits > 32:
+ ValueError("Expected bit mask less or equal to 32")
+ return masks[bits]
+
+ def __str__(self):
+ addr = self.network_address
+ mask = self.prefixlen
+ line = '{{ .ipv4 = {{ {:#x}, {:#010x} }} }}'.format(addr, self.ip_get_subnet_mask(mask))
+ return line
+
+class IPv6SpecialBlock(ipaddress.IPv6Network):
+ @staticmethod
+ def addr_c_array(byte_array):
+ if len(byte_array) != 16:
+ raise ValueError("Expected byte array of length 16")
+ c_array = ", ".join(f"0x{byte:02x}" for byte in byte_array)
+ return f"{{ {c_array} }}"
+
+ def __str__(self):
+ addr = self.network_address.packed
+ mask = self.prefixlen
+ line = '{{ .ipv6 = {{ {}, {} }} }}'.format(self.addr_c_array(addr), mask)
+ return line
+
+class IPRegistry(list):
+ @staticmethod
+ def true_or_false(val):
+ if val == 'True':
+ return '1'
+ elif val == 'False':
+ return '0'
+ else:
+ return '-1'
+
+ def append(self, row):
+ ip, name, _, _, termin_date, source, destination, forward, glob, reserved = row
+ if termin_date[0].isdigit():
+ # skip allocations that have expired
+ return
+ name = re.sub(r'\[.*\]', '', name)
+ name = '"' + name.replace('"', '\\"') + '"'
+ source = self.true_or_false(source)
+ destination = self.true_or_false(destination)
+ forward = self.true_or_false(forward)
+ glob = self.true_or_false(glob)
+ reserved = self.true_or_false(reserved)
+ super().append([ip, name, source, destination, forward, glob, reserved])
+
+class IPv4Registry(IPRegistry):
+ @staticmethod
+ def ipv4_addr_and_mask(s):
+ ip = IPv4SpecialBlock(s)
+ return ip
+
+ def append(self, row):
+ # some lines contain multiple (comma separated) blocks
+ ip_list = row[0].split(',')
+ for s in ip_list:
+ # remove annotations like "1.1.1.1 [2]"
+ ip_str = s.split()[0]
+ row = [self.ipv4_addr_and_mask(ip_str)] + row[1:]
+ super().append(row)
+
+ def dump(self, fd):
+ self.sort()
+ fd.write('_U_ static const struct ws_iana_ip_special_block __ipv4_special_block[] = {\n')
+ for row in self:
+ line = ' {{ 4, {}, {}, {}, {}, {}, {}, {} }},\n'.format(*row)
+ fd.write(line)
+ fd.write('};\n')
+
+class IPv6Registry(IPRegistry):
+ @staticmethod
+ def ipv6_addr_and_mask(s):
+ ip_str = s.split()[0]
+ ip = IPv6SpecialBlock(ip_str)
+ return ip
+
+ def append(self, row):
+ # remove annotations like "1.1.1.1 [2]"
+ ip_str = row[0].split()[0]
+ row = [self.ipv6_addr_and_mask(ip_str)] + row[1:]
+ super().append(row)
+
+ def dump(self, fd):
+ self.sort()
+ fd.write('// GCC bug?\n')
+ fd.write('DIAG_OFF(missing-braces)\n')
+ fd.write('_U_ static const struct ws_iana_ip_special_block __ipv6_special_block[] = {\n')
+ for row in self:
+ line = \
+''' {{ 6, {},
+ {}, {}, {}, {}, {}, {} }},\n'''.format(*row)
+ fd.write(line)
+ fd.write('};\n')
+ fd.write('DIAG_ON(missing-braces)\n')
+
+IANA_URLS = {
+ 'IPv4': { 'url': ["https://www.iana.org/assignments/iana-ipv4-special-registry/", "iana-ipv4-special-registry-1.csv"], 'min_entries': 2 },
+ 'IPv6': { 'url': ["https://www.iana.org/assignments/iana-ipv6-special-registry/", "iana-ipv6-special-registry-1.csv"], 'min_entries': 2 },
+}
+
+def dump_registry(db, fd, reg):
+ db_url = IANA_URLS[db]['url']
+ print('Loading {} data from {}'.format(db, db_url))
+ body = open_url(db_url)
+ iana_csv = csv.reader(body.splitlines())
+
+ # Pop the title row.
+ next(iana_csv)
+ for iana_row in iana_csv:
+ # Address Block,Name,RFC,Allocation Date,Termination Date,Source,Destination,Forwardable,Globally Reachable,Reserved-by-Protocol
+ # ::1/128,Loopback Address,[RFC4291],2006-02,N/A,False,False,False,False,True
+ reg.append(iana_row)
+
+ if len(reg) < IANA_URLS[db]['min_entries']:
+ exit_msg("Too few {} entries. Got {}, wanted {}".format(db, len(reg), IANA_URLS[db]['min_entries']))
+
+ reg.dump(fd)
+
+def main():
+ iana_path = os.path.join('epan', 'iana-ip-data.c')
+
+ try:
+ fd = io.open(iana_path, 'w', encoding='UTF-8')
+ except Exception:
+ exit_msg("Couldn't open \"{}\" file for reading".format(iana_path))
+
+ fd.write('''/*
+ * This file was generated by running ./tools/make-iana-ip.py.
+ *
+ * SPDX-License-Identifier: GPL-2.0-or-later
+ */
+
+#include "iana-ip.h"
+
+''')
+
+ dump_registry('IPv4', fd, IPv4Registry())
+ fd.write('\n')
+ dump_registry('IPv6', fd, IPv6Registry())
+ fd.close()
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/make-isobus.py b/tools/make-isobus.py
index ce0259c7..792f2b33 100644..100755
--- a/tools/make-isobus.py
+++ b/tools/make-isobus.py
@@ -41,7 +41,6 @@ def open_url_zipped(url):
return zipfile.ZipFile(io.BytesIO(body))
def main():
- this_dir = os.path.dirname(__file__)
isobus_output_path = os.path.join('epan', 'dissectors', 'packet-isobus-parameters.h')
isobus_zip_url = [ "https://www.isobus.net/isobus/attachments/", "isoExport_csv.zip"]
@@ -132,7 +131,7 @@ def main():
pgn_id, pgn_name, = row[:2]
if not pgn_name.startswith("Proprietary B"):
pgn_names[int(pgn_id)] = pgn_name.replace("\"","'")
- except:
+ except Exception:
pass
# prepare output file
@@ -164,7 +163,7 @@ def main():
output_fd.write(" { 0, NULL }\n")
output_fd.write("};\n")
- output_fd.write("static value_string_ext isobus_industry_groups_ext = VALUE_STRING_EXT_INIT(_isobus_industry_groups);\n\n");
+ output_fd.write("static value_string_ext isobus_industry_groups_ext = VALUE_STRING_EXT_INIT(_isobus_industry_groups);\n\n")
# Write Vehicle System Names
output_fd.write("/* key: 256 * Industry-Group-ID + Vehicle-Group-ID */\n")
@@ -175,7 +174,7 @@ def main():
output_fd.write(" { 0, NULL }\n")
output_fd.write("};\n")
- output_fd.write("static value_string_ext isobus_vehicle_systems_ext = VALUE_STRING_EXT_INIT(_isobus_vehicle_systems);\n\n");
+ output_fd.write("static value_string_ext isobus_vehicle_systems_ext = VALUE_STRING_EXT_INIT(_isobus_vehicle_systems);\n\n")
# Write Global Name Functions
output_fd.write("static const value_string _isobus_global_name_functions[] = {\n")
@@ -185,7 +184,7 @@ def main():
output_fd.write(" { 0, NULL }\n")
output_fd.write("};\n")
- output_fd.write("static value_string_ext isobus_global_name_functions_ext = VALUE_STRING_EXT_INIT(_isobus_global_name_functions);\n\n");
+ output_fd.write("static value_string_ext isobus_global_name_functions_ext = VALUE_STRING_EXT_INIT(_isobus_global_name_functions);\n\n")
# IG Specific Global Name Functions
output_fd.write("/* key: 65536 * Industry-Group-ID + 256 * Vehicle-System-ID + Function-ID */\n")
@@ -196,7 +195,7 @@ def main():
output_fd.write(" { 0, NULL }\n")
output_fd.write("};\n")
- output_fd.write("static value_string_ext isobus_ig_specific_name_functions_ext = VALUE_STRING_EXT_INIT(_isobus_ig_specific_name_functions);\n\n");
+ output_fd.write("static value_string_ext isobus_ig_specific_name_functions_ext = VALUE_STRING_EXT_INIT(_isobus_ig_specific_name_functions);\n\n")
# Write Manufacturers
output_fd.write("static const value_string _isobus_manufacturers[] = {\n")
@@ -206,7 +205,7 @@ def main():
output_fd.write(" { 0, NULL }\n")
output_fd.write("};\n")
- output_fd.write("static value_string_ext isobus_manufacturers_ext = VALUE_STRING_EXT_INIT(_isobus_manufacturers);\n\n");
+ output_fd.write("static value_string_ext isobus_manufacturers_ext = VALUE_STRING_EXT_INIT(_isobus_manufacturers);\n\n")
# PGN Names
output_fd.write("static const value_string _isobus_pgn_names[] = {\n")
@@ -216,7 +215,7 @@ def main():
output_fd.write(" { 0, NULL }\n")
output_fd.write("};\n")
- output_fd.write("static value_string_ext isobus_pgn_names_ext = VALUE_STRING_EXT_INIT(_isobus_pgn_names);\n\n");
+ output_fd.write("static value_string_ext isobus_pgn_names_ext = VALUE_STRING_EXT_INIT(_isobus_pgn_names);\n\n")
output_fd.write("#endif /* __PACKET_ISOBUS_PARAMETERS_H__ */")
if __name__ == '__main__':
diff --git a/tools/make-manuf.py b/tools/make-manuf.py
index 22f3aa03..8f006405 100755
--- a/tools/make-manuf.py
+++ b/tools/make-manuf.py
@@ -36,7 +36,7 @@ def exit_msg(msg=None, status=1):
def open_url(url):
'''Open a URL.
Returns a tuple containing the body and response dict. The body is a
- str in Python 3 and bytes in Python 2 in order to be compatibile with
+ str in Python 3 and bytes in Python 2 in order to be compatible with
csv.reader.
'''
@@ -230,7 +230,6 @@ def prefix_to_oui(prefix, prefix_map):
return '{}/{:d}'.format(oui, int(pfx_len)), kind
def main():
- this_dir = os.path.dirname(__file__)
manuf_path = os.path.join('epan', 'manuf-data.c')
ieee_d = {
@@ -246,7 +245,7 @@ def main():
MA_S: {},
}
- min_total = 35000; # 35830 as of 2018-09-05
+ min_total = 35000 # 35830 as of 2018-09-05
total_added = 0
# Add IEEE entries from each of their databases
@@ -276,9 +275,10 @@ def main():
# "Watts A\S"
manuf = manuf.replace('\\', '/')
if manuf == 'IEEE Registration Authority':
+ # These are held for subdivision into MA-M/MA-S
continue
- if manuf == 'Private':
- continue
+ #if manuf == 'Private':
+ # continue
if oui in oui_d[kind]:
action = 'Skipping'
print('{} - {} IEEE "{}" in favor of "{}"'.format(oui, action, manuf, oui_d[kind][oui]))
diff --git a/tools/make-no-reassembly-profile.py b/tools/make-no-reassembly-profile.py
index cd68155a..25ae0153 100755
--- a/tools/make-no-reassembly-profile.py
+++ b/tools/make-no-reassembly-profile.py
@@ -33,12 +33,12 @@ def main():
# Make sure plugin prefs are present.
cp = subprocess.run([tshark_path, '-G', 'plugins'], stdout=subprocess.PIPE, check=True, encoding='utf-8')
plugin_lines = cp.stdout.splitlines()
- dissector_count = len(tuple(filter(lambda p: re.search('\sdissector\s', p), plugin_lines)))
+ dissector_count = len(tuple(filter(lambda p: re.search(r'\sdissector\s', p), plugin_lines)))
if dissector_count < MIN_PLUGINS:
print('Found {} plugins but require {}.'.format(dissector_count, MIN_PLUGINS))
sys.exit(1)
- rd_pref_re = re.compile('^#\s*(.*(reassembl|desegment)\S*):\s*TRUE')
+ rd_pref_re = re.compile(r'^#\s*(.*(reassembl|desegment)\S*):\s*TRUE')
out_prefs = [
'# Generated by ' + os.path.basename(__file__), '',
'####### Protocols ########', '',
diff --git a/tools/make-packet-dcm.py b/tools/make-packet-dcm.py
index 51cbcf10..d122dbff 100755
--- a/tools/make-packet-dcm.py
+++ b/tools/make-packet-dcm.py
@@ -123,6 +123,10 @@ vrs = {i+1: get_texts_in_row(x)[0].split(maxsplit=1) for i,x in enumerate(get_tr
uid_trs = get_trs(part06, "table_A-1")
uid_rows = [get_texts_in_row(x) for x in uid_trs]
+wkfr_trs = get_trs(part06, "table_A-2")
+wkfr_rows = [get_texts_in_row(x) for x in wkfr_trs]
+uid_rows += [x[:3] + ['Well-known frame of reference'] + x[3:] for x in wkfr_rows]
+
def uid_define_name(uid):
if uid[1] == "(Retired)":
return f'"{uid[0]}"'
@@ -168,7 +172,7 @@ extern "C" {
""" + "\n".join(f"#define DCM_VR_{vr[0]} {i:2d} /* {vr[1]:25s} */" for i,vr in vrs.items()) + """
/* Following must be in the same order as the definitions above */
-static const gchar* dcm_tag_vr_lookup[] = {
+static const char* dcm_tag_vr_lookup[] = {
" ",
""" + ",\n ".join(",".join(f'"{x[1][0]}"' for x in j[1]) for j in itertools.groupby(vrs.items(), lambda i: (i[0]-1)//8)) + """
};
@@ -188,12 +192,12 @@ static const gchar* dcm_tag_vr_lookup[] = {
*/
typedef struct dcm_tag {
- const guint32 tag;
- const gchar *description;
- const gchar *vr;
- const gchar *vm;
- const gboolean is_retired;
- const gboolean add_to_summary; /* Add to parent's item description */
+ const uint32_t tag;
+ const char *description;
+ const char *vr;
+ const char *vm;
+ const bool is_retired;
+ const bool add_to_summary; /* Add to parent's item description */
} dcm_tag_t;
static dcm_tag_t const dcm_tag_data[] = {
@@ -225,9 +229,9 @@ static dcm_tag_t const dcm_tag_data[] = {
*/
typedef struct dcm_uid {
- const gchar *value;
- const gchar *name;
- const gchar *type;
+ const char *value;
+ const char *name;
+ const char *type;
} dcm_uid_t;
""" + "\n".join(f'#define {uid_define_name(uid)} "{uid[0]}"'
diff --git a/tools/make-pci-ids.py b/tools/make-pci-ids.py
index 093637d7..0aadef14 100755
--- a/tools/make-pci-ids.py
+++ b/tools/make-pci-ids.py
@@ -35,6 +35,9 @@ CODE_PREFIX = """\
#include <config.h>
#include <stddef.h>
+#include <stdlib.h>
+
+#include "wsutil/array.h"
#include "pci-ids.h"
@@ -59,38 +62,10 @@ typedef struct
"""
CODE_POSTFIX = """
-static pci_vid_index_t const *get_vid_index(uint16_t vid)
+static int vid_search(const void *key, const void *tbl_entry)
{
- uint32_t start_index = 0;
- uint32_t end_index = 0;
- uint32_t idx = 0;
-
- end_index = sizeof(pci_vid_index)/sizeof(pci_vid_index[0]);
-
- while(start_index != end_index)
- {
- if(end_index - start_index == 1)
- {
- if(pci_vid_index[start_index].vid == vid)
- return &pci_vid_index[start_index];
-
- break;
- }
-
- idx = (start_index + end_index)/2;
-
- if(pci_vid_index[idx].vid < vid)
- start_index = idx;
- else
- if(pci_vid_index[idx].vid > vid)
- end_index = idx;
- else
- return &pci_vid_index[idx];
-
- }
-
- return NULL;
-
+ return (int)*(const uint16_t *)key -
+ (int)((const pci_vid_index_t *)tbl_entry)->vid;
}
const char *pci_id_str(uint16_t vid, uint16_t did, uint16_t svid, uint16_t ssid)
@@ -100,7 +75,7 @@ const char *pci_id_str(uint16_t vid, uint16_t did, uint16_t svid, uint16_t ssid)
pci_vid_index_t const *index_ptr;
pci_id_t const *ids_ptr;
- index_ptr = get_vid_index(vid);
+ index_ptr = bsearch(&vid, pci_vid_index, array_length(pci_vid_index), sizeof pci_vid_index[0], vid_search);
if(index_ptr == NULL)
return not_found;
diff --git a/tools/make-plugin-reg.py b/tools/make-plugin-reg.py
index 2b9bc345..135850c2 100755
--- a/tools/make-plugin-reg.py
+++ b/tools/make-plugin-reg.py
@@ -116,6 +116,7 @@ reg_code += """
/* plugins are DLLs on Windows */
#define WS_BUILD_DLL
#include "ws_symbol_export.h"
+#include <wsutil/plugins.h>
"""
@@ -139,16 +140,29 @@ for symbol in regs['codec_register']:
for symbol in regs['register_tap_listener']:
reg_code += "void register_tap_listener_%s(void);\n" % (symbol)
+DESCRIPTION_FLAG = {
+ 'plugin': 'WS_PLUGIN_DESC_DISSECTOR',
+ 'plugin_wtap': 'WS_PLUGIN_DESC_FILE_TYPE',
+ 'plugin_codec': 'WS_PLUGIN_DESC_CODEC',
+ 'plugin_tap': 'WS_PLUGIN_DESC_TAP_LISTENER'
+}
+
reg_code += """
-WS_DLL_PUBLIC_DEF const gchar plugin_version[] = PLUGIN_VERSION;
+WS_DLL_PUBLIC_DEF const char plugin_version[] = PLUGIN_VERSION;
WS_DLL_PUBLIC_DEF const int plugin_want_major = VERSION_MAJOR;
WS_DLL_PUBLIC_DEF const int plugin_want_minor = VERSION_MINOR;
WS_DLL_PUBLIC void plugin_register(void);
+WS_DLL_PUBLIC uint32_t plugin_describe(void);
+
+uint32_t plugin_describe(void)
+{
+ return %s;
+}
void plugin_register(void)
{
-"""
+""" % DESCRIPTION_FLAG[registertype]
if registertype == "plugin":
for symbol in regs['proto_reg']:
diff --git a/tools/make-regs.py b/tools/make-regs.py
index 376b3c65..f17a3f4a 100755
--- a/tools/make-regs.py
+++ b/tools/make-regs.py
@@ -64,11 +64,11 @@ const unsigned long dissector_reg_handoff_count = {1};
output += gen_prototypes(protos)
output += "\n"
- output += gen_array(protos, "dissector_reg_t dissector_reg_proto")
+ output += gen_array(protos, "dissector_reg_t const dissector_reg_proto")
output += "\n"
output += gen_prototypes(handoffs)
output += "\n"
- output += gen_array(handoffs, "dissector_reg_t dissector_reg_handoff")
+ output += gen_array(handoffs, "dissector_reg_t const dissector_reg_handoff")
with open(outfile, "w") as f:
f.write(output)
@@ -96,7 +96,7 @@ const unsigned wtap_module_count = {0};
output += gen_prototypes(wtap_modules)
output += "\n"
- output += gen_array(wtap_modules, "wtap_module_reg_t wtap_module_reg")
+ output += gen_array(wtap_modules, "wtap_module_reg_t const wtap_module_reg")
with open(outfile, "w") as f:
f.write(output)
@@ -124,7 +124,7 @@ const unsigned long tap_reg_listener_count = {0};
output += gen_prototypes(taps)
output += "\n"
- output += gen_array(taps, "tap_reg_t tap_reg_listener")
+ output += gen_array(taps, "tap_reg_t const tap_reg_listener")
with open(outfile, "w") as f:
f.write(output)
@@ -143,7 +143,7 @@ if __name__ == "__main__":
outfile = sys.argv[2]
if sys.argv[3].startswith("@"):
with open(sys.argv[3][1:]) as f:
- infiles = [l.strip() for l in f.readlines()]
+ infiles = [line.strip() for line in f.readlines()]
else:
infiles = sys.argv[3:]
diff --git a/tools/make-services.py b/tools/make-services.py
index 0f832bec..db2afd3d 100755
--- a/tools/make-services.py
+++ b/tools/make-services.py
@@ -9,6 +9,14 @@
#
# SPDX-License-Identifier: GPL-2.0-or-later
+import sys
+import getopt
+import csv
+import re
+import collections
+import urllib.request, urllib.error, urllib.parse
+import codecs
+
iana_svc_url = 'https://www.iana.org/assignments/service-names-port-numbers/service-names-port-numbers.csv'
__doc__ = '''\
@@ -18,13 +26,6 @@ url defaults to
%s
''' % (iana_svc_url)
-import sys
-import getopt
-import csv
-import re
-import collections
-import urllib.request, urllib.error, urllib.parse
-import codecs
services_file = 'epan/services-data.c'
@@ -105,7 +106,7 @@ def parse_rows(svc_fd):
if description == service or description == service.replace("-", " "):
description = None
- if not port in services_map:
+ if port not in services_map:
services_map[port] = collections.OrderedDict()
# Remove some duplicates (first entry wins)
@@ -117,7 +118,7 @@ def parse_rows(svc_fd):
if proto_exists:
continue
- if not service in services_map[port]:
+ if service not in services_map[port]:
services_map[port][service] = [description]
services_map[port][service].append(proto)
@@ -229,10 +230,12 @@ def main(argv):
* service names, e.g. TCP port 80 -> http.
*
* It is subject to copyright and being used with IANA's permission:
- * https://www.wireshark.org/lists/wireshark-dev/200708/msg00160.html
+ * https://lists.wireshark.org/archives/wireshark-dev/200708/msg00160.html
*
* The original file can be found at:
* %s
+ *
+ * Generated by tools/make-services.py
*/
''' % (iana_svc_url))
@@ -259,27 +262,27 @@ def main(argv):
return e[0]
return max_port
- out.write("static ws_services_entry_t global_tcp_udp_services_table[] = {\n")
+ out.write("static const ws_services_entry_t global_tcp_udp_services_table[] = {\n")
for e in tcp_udp:
max_port = write_entry(out, e, max_port)
out.write("};\n\n")
- out.write("static ws_services_entry_t global_tcp_services_table[] = {\n")
+ out.write("static const ws_services_entry_t global_tcp_services_table[] = {\n")
for e in tcp:
max_port = write_entry(out, e, max_port)
out.write("};\n\n")
- out.write("static ws_services_entry_t global_udp_services_table[] = {\n")
+ out.write("static const ws_services_entry_t global_udp_services_table[] = {\n")
for e in udp:
max_port = write_entry(out, e, max_port)
out.write("};\n\n")
- out.write("static ws_services_entry_t global_sctp_services_table[] = {\n")
+ out.write("static const ws_services_entry_t global_sctp_services_table[] = {\n")
for e in sctp:
max_port = write_entry(out, e, max_port)
out.write("};\n\n")
- out.write("static ws_services_entry_t global_dccp_services_table[] = {\n")
+ out.write("static const ws_services_entry_t global_dccp_services_table[] = {\n")
for e in dccp:
max_port = write_entry(out, e, max_port)
out.write("};\n\n")
diff --git a/tools/make-usb.py b/tools/make-usb.py
index 65408034..8b7bd369 100755
--- a/tools/make-usb.py
+++ b/tools/make-usb.py
@@ -40,7 +40,7 @@ for i in range(256):
for utf8line in lines:
# Convert single backslashes to double (escaped) backslashes, escape quotes, etc.
utf8line = utf8line.rstrip()
- utf8line = re.sub("\?+", "?", utf8line)
+ utf8line = re.sub(r"\?+", "?", utf8line)
line = ''.join(escapes[byte] for byte in utf8line.encode('utf8'))
if line == "# Vendors, devices and interfaces. Please keep sorted.":
diff --git a/tools/make-version.py b/tools/make-version.py
index 05458b65..a3416812 100755
--- a/tools/make-version.py
+++ b/tools/make-version.py
@@ -44,8 +44,8 @@ GIT_ABBREV_LENGTH = 12
# If the text "$Format" is still present, it means that
# git archive did not replace the $Format string, which
# means that this not a git archive.
-GIT_EXPORT_SUBST_H = '2acd1a854babc4caae980ef9ed79ad36b6bc0362'
-GIT_EXPORT_SUBST_D = 'tag: wireshark-4.2.6, tag: v4.2.6, refs/merge-requests/16375/head, refs/keep-around/2acd1a854babc4caae980ef9ed79ad36b6bc0362'
+GIT_EXPORT_SUBST_H = '009a163470b581c7d3ee66d89c819cef1f9e50fe'
+GIT_EXPORT_SUBST_D = 'tag: wireshark-4.4.0, tag: v4.4.0, refs/merge-requests/17013/head, refs/keep-around/009a163470b581c7d3ee66d89c819cef1f9e50fe'
IS_GIT_ARCHIVE = not GIT_EXPORT_SUBST_H.startswith('$Format')
@@ -94,7 +94,7 @@ def update_debian_changelog(src_dir, repo_data):
changelog_contents = fh.read()
CHANGELOG_PATTERN = r"^.*"
- text_replacement = f"wireshark ({repo_data['version_major']}.{repo_data['version_minor']}.{repo_data['version_patch']}{repo_data['package_string']}) unstable; urgency=low"
+ text_replacement = f"wireshark ({repo_data['version_major']}.{repo_data['version_minor']}.{repo_data['version_patch']}{repo_data['package_string']}) UNRELEASED; urgency=low"
# Note: Only need to replace the first line, so we don't use re.MULTILINE or re.DOTALL
new_changelog_contents = re.sub(CHANGELOG_PATTERN, text_replacement, changelog_contents)
with open(deb_changelog_filepath, mode='w', encoding='utf-8') as fh:
@@ -110,9 +110,9 @@ def create_version_file(version_f, repo_data):
def update_attributes_asciidoc(src_dir, repo_data):
- # Read docbook/attributes.adoc, then write it back out with an updated
+ # Read doc/attributes.adoc, then write it back out with an updated
# wireshark-version replacement line.
- asiidoc_filepath = os.path.join(src_dir, "docbook", "attributes.adoc")
+ asiidoc_filepath = os.path.join(src_dir, "doc", "attributes.adoc")
with open(asiidoc_filepath, encoding='utf-8') as fh:
asciidoc_contents = fh.read()
@@ -129,8 +129,8 @@ def update_attributes_asciidoc(src_dir, repo_data):
def update_docinfo_asciidoc(src_dir, repo_data):
doc_paths = []
- doc_paths += [os.path.join(src_dir, 'docbook', 'wsdg_src', 'developer-guide-docinfo.xml')]
- doc_paths += [os.path.join(src_dir, 'docbook', 'wsug_src', 'user-guide-docinfo.xml')]
+ doc_paths += [os.path.join(src_dir, 'doc', 'wsdg_src', 'developer-guide-docinfo.xml')]
+ doc_paths += [os.path.join(src_dir, 'doc', 'wsug_src', 'user-guide-docinfo.xml')]
for doc_path in doc_paths:
with open(doc_path, encoding='utf-8') as fh:
@@ -184,33 +184,44 @@ def update_versioned_files(src_dir, set_version, repo_data):
def generate_version_h(repo_data):
# Generate new contents of version.h from repository data
+ num_commits_line = '#define VCS_NUM_COMMITS "0"\n'
+
+ commit_id_line = '/* #undef VCS_COMMIT_ID */\n'
+
if not repo_data.get('enable_vcsversion'):
- return "/* #undef VCSVERSION */\n"
+ return '/* #undef VCS_VERSION */\n' + num_commits_line + commit_id_line
+
+ if repo_data.get('num_commits'):
+ num_commits_line = f'#define VCS_NUM_COMMITS "{int(repo_data["num_commits"])}"\n'
+
+ if repo_data.get('commit_id'):
+ commit_id_line = f'#define VCS_COMMIT_ID "{repo_data["commit_id"]}"'
if repo_data.get('git_description'):
# Do not bother adding the git branch, the git describe output
# normally contains the base tag and commit ID which is more
# than sufficient to determine the actual source tree.
- return f'#define VCSVERSION "{repo_data["git_description"]}"\n'
+ return f'#define VCS_VERSION "{repo_data["git_description"]}"\n' + num_commits_line + commit_id_line
if repo_data.get('last_change') and repo_data.get('num_commits'):
version_string = f"v{repo_data['version_major']}.{repo_data['version_minor']}.{repo_data['version_patch']}"
- vcs_line = f'#define VCSVERSION "{version_string}-Git-{repo_data["num_commits"]}"\n'
- return vcs_line
+ vcs_line = f'#define VCS_VERSION "{version_string}-Git-{repo_data["num_commits"]}"\n'
+ return vcs_line + num_commits_line + commit_id_line
if repo_data.get('commit_id'):
- vcs_line = f'#define VCSVERSION "Git commit {repo_data["commit_id"]}"\n'
- return vcs_line
+ vcs_line = f'#define VCS_VERSION "Git commit {repo_data["commit_id"]}"\n'
+ return vcs_line + num_commits_line + commit_id_line
+
+ vcs_line = '#define VCS_VERSION "Git Rev Unknown from unknown"\n'
- vcs_line = '#define VCSVERSION "Git Rev Unknown from unknown"\n'
- return vcs_line
+ return vcs_line + num_commits_line + commit_id_line
def print_VCS_REVISION(version_file, repo_data, set_vcs):
# Write the version control system's version to $version_file.
# Don't change the file if it is not needed.
#
- # XXX - We might want to add VCSVERSION to CMakeLists.txt so that it can
+ # XXX - We might want to add VCS_VERSION to CMakeLists.txt so that it can
# generate vcs_version.h independently.
new_version_h = generate_version_h(repo_data)
@@ -418,7 +429,7 @@ def read_repo_info(src_dir, tagged_version_extra, untagged_version_extra):
def main():
parser = argparse.ArgumentParser(description='Wireshark file and package versions')
action_group = parser.add_mutually_exclusive_group()
- action_group.add_argument('--set-version', '-v', metavar='<x.y.z>', type=parse_versionstring, help='Set the major, minor, and patch versions in the top-level CMakeLists.txt, docbook/attributes.adoc, packaging/debian/changelog, and the CMakeLists.txt for all libraries to the provided version number')
+ action_group.add_argument('--set-version', '-v', metavar='<x.y.z>', type=parse_versionstring, help='Set the major, minor, and patch versions in the top-level CMakeLists.txt, doc/attributes.adoc, packaging/debian/changelog, and the CMakeLists.txt for all libraries to the provided version number')
action_group.add_argument('--set-release', '-r', action='store_true', help='Set the extra release information in the top-level CMakeLists.txt based on either default or command-line specified options.')
setrel_group = parser.add_argument_group()
setrel_group.add_argument('--tagged-version-extra', '-t', default="", help="Extra version information format to use when a tag is found. No format \
diff --git a/tools/make-wsluarm.py b/tools/make-wsluarm.py
new file mode 100755
index 00000000..52330756
--- /dev/null
+++ b/tools/make-wsluarm.py
@@ -0,0 +1,458 @@
+#!/usr/bin/env python3
+#
+# make-wsluarm.py
+#
+# By Gerald Combs <gerald@wireshark.org>
+# Based on make-wsluarm.pl by Luis E. Garcia Onatnon <luis.ontanon@gmail.com> and Hadriel Kaplan
+#
+# Wireshark - Network traffic analyzer
+# By Gerald Combs <gerald@wireshark.org>
+# Copyright 1998 Gerald Combs
+#
+# SPDX-License-Identifier: GPL-2.0-or-later
+'''\
+WSLUA's Reference Manual Generator
+
+This reads Doxygen-style comments in C code and generates wslua API documentation
+formatted as AsciiDoc.
+
+Behavior as documented by Hadriel:
+- Allows modules (i.e., WSLUA_MODULE) to have detailed descriptions
+- Two (or more) line breaks in comments result in separate paragraphs
+- Any indent with a single leading star '*' followed by space is a bulleted list item
+ reducing indent or having an extra linebreak stops the list
+- Any indent with a leading digits-dot followed by space, i.e. "1. ", is a numbered list item
+ reducing indent or having an extra linebreak stops the list
+'''
+
+import argparse
+import logging
+import os
+import re
+import sys
+
+from enum import Enum
+from string import Template
+
+def parse_desc(description):
+ '''\
+Break up descriptions based on newlines and keywords. Some processing
+is done for code blocks and lists, but the output is otherwise left
+intact. Assumes the input has been stripped.
+'''
+
+ c_lines = description.strip().splitlines()
+
+ if len(c_lines) < 1:
+ return ''
+
+ adoc_lines = []
+ cli = iter(c_lines)
+ for line in cli:
+ raw_len = len(line)
+ line = line.lstrip()
+ indent = raw_len - len(line)
+
+ # If we find "[source,...]" then treat it as a block
+ if re.search(r'\[source.*\]', line):
+ # The next line *should* be a delimiter...
+ block_delim = next(cli).strip()
+ line += f'\n{block_delim}\n'
+ block_line = next(cli)
+ # XXX try except StopIteration
+ while block_line.strip() != block_delim:
+ # Keep eating lines until the closing delimiter.
+ # XXX Strip indent spaces?
+ line += block_line + '\n'
+ block_line = next(cli)
+ line += block_delim + '\n'
+
+ adoc_lines.append(line)
+ elif re.match(r'^\s*$', line):
+ # line is either empty or just whitespace, and we're not in a @code block
+ # so it's the end of a previous paragraph, beginning of new one
+ adoc_lines.append('')
+ else:
+ # We have a regular line, not in a @code block.
+ # Add it as-is.
+
+ # if line starts with "@version" or "@since", make it a "Since:"
+ if re.match(r'^@(version|since)\s+', line):
+ line = re.sub(r'^@(version|since)\s+', 'Since: ', line)
+ adoc_lines.append(line)
+
+ # If line starts with single "*" and space, leave it mostly intact.
+ elif re.match(r'^\*\s', line):
+ adoc_lines += ['', line]
+ # keep eating until we find a blank line or end
+ line = next(cli)
+ try:
+ while not re.match(r'^\s*$', line):
+ raw_len = len(line)
+ line = line.lstrip()
+ # if this is less indented than before, break out
+ if raw_len - len(line) < indent:
+ break
+ adoc_lines += [line]
+ line = next(cli)
+ except StopIteration:
+ pass
+ adoc_lines.append('')
+
+ # if line starts with "1." and space, leave it mostly intact.
+ elif re.match(r'^1\.\s', line):
+ adoc_lines += ['', line]
+ # keep eating until we find a blank line or end
+ line = next(cli)
+ try:
+ while not re.match(r'^\s*$', line):
+ raw_len = len(line)
+ line = line.lstrip()
+ # if this is less indented than before, break out
+ if raw_len - len(line) < indent:
+ break
+ adoc_lines += [line]
+ line = next(cli)
+ except StopIteration:
+ pass
+ adoc_lines.append('')
+
+ # Just a normal line, add it to array
+ else:
+ # Nested Lua arrays
+ line = re.sub(r'\[\[(.*)\]\]', r'$$\1$$', line)
+ adoc_lines += [line]
+
+ # Strip out consecutive empty lines.
+ # This isn't strictly necessary but makes the AsciiDoc output prettier.
+ adoc_lines = '\n'.join(adoc_lines).splitlines()
+ adoc_lines = [val for idx, val in enumerate(adoc_lines) if idx == 0 or not (val == '' and val == adoc_lines[idx - 1])]
+
+ return '\n'.join(adoc_lines)
+
+
+class LuaFunction:
+ def __init__(self, c_file, id, start, name, raw_description):
+ self.c_file = c_file
+ self.id = id
+ self.start = start
+ self.name = name
+ if not raw_description:
+ raw_description = ''
+ self.description = parse_desc(raw_description)
+ self.arguments = [] # (name, description, optional)
+ self.returns = [] # description
+ self.errors = [] # description
+ logging.info(f'Created function {id} ({name}) at {start}')
+
+ def add_argument(self, id, raw_name, raw_description, raw_optional):
+ if id != self.id:
+ logging.critical(f'Invalid argument ID {id} in function {self.id}')
+ sys.exit(1)
+ if not raw_description:
+ raw_description = ''
+ optional = False
+ if raw_optional == 'OPT':
+ optional = True
+ self.arguments.append((raw_name.lower(), parse_desc(raw_description), optional))
+
+ def extract_buf(self, buf):
+ "Extract arguments, errors, and return values from a function's buffer."
+
+ # Splits "WSLUA_OPTARG_ProtoField_int8_NAME /* food */" into
+ # "OPT" (1), "ProtoField_int8" (2), "NAME" (3), ..., ..., " food " (6)
+ # Handles functions like "loadfile(filename)" too.
+ for m in re.finditer(r'#define WSLUA_(OPT)?ARG_((?:[A-Za-z0-9]+_)?[a-z0-9_]+)_([A-Z0-9_]+)\s+\d+' + TRAILING_COMMENT_RE, buf, re.MULTILINE|re.DOTALL):
+ self.add_argument(m.group(2), m.group(3), m.group(6), m.group(1))
+ logging.info(f'Created arg {m.group(3)} for {self.id} at {m.start()}')
+
+ # Same as above, except that there is no macro but a (multi-line) comment.
+ for m in re.finditer(r'/\*\s*WSLUA_(OPT)?ARG_((?:[A-Za-z0-9]+_)?[a-z0-9_]+)_([A-Z0-9_]+)\s*(.*?)\*/', buf, re.MULTILINE|re.DOTALL):
+ self.add_argument(m.group(2), m.group(3), m.group(4), m.group(1))
+ logging.info(f'Created arg {m.group(3)} for {self.id} at {m.start()}')
+
+ for m in re.finditer(r'/\*\s+WSLUA_MOREARGS\s+([A-Za-z_]+)\s+(.*?)\*/', buf, re.MULTILINE|re.DOTALL):
+ self.add_argument(m.group(1), '...', m.group(2), False)
+ logging.info(f'Created morearg for {self.id}')
+
+ for m in re.finditer(r'WSLUA_(FINAL_)?RETURN\(\s*.*?\s*\)\s*;' + TRAILING_COMMENT_RE, buf, re.MULTILINE|re.DOTALL):
+ if m.group(4) and len(m.group(4)) > 0:
+ self.returns.append(m.group(4).strip())
+ logging.info(f'Created return for {self.id} at {m.start()}')
+
+ for m in re.finditer(r'/\*\s*_WSLUA_RETURNS_\s*(.*?)\*/', buf, re.MULTILINE|re.DOTALL):
+ if m.group(1) and len(m.group(1)) > 0:
+ self.returns.append(m.group(1).strip())
+ logging.info(f'Created return for {self.id} at {m.start()}')
+
+ for m in re.finditer(r'WSLUA_ERROR\s*\(\s*(([A-Z][A-Za-z]+)_)?([a-z_]+),' + QUOTED_RE, buf, re.MULTILINE|re.DOTALL):
+ self.errors.append(m.group(4).strip())
+ logging.info(f'Created error {m.group(4)[:10]} for {self.id} at {m.start()}')
+
+ def to_adoc(self):
+ # The Perl script wrapped optional args in '[]', joined them with ', ', and
+ # converted non-alphabetic characters to underscores.
+ mangled_names = [f'_{a}_' if optional else a for a, _, optional in self.arguments]
+ section_name = re.sub('[^A-Za-z0-9]', '_', f'{self.name}_{"__".join(mangled_names)}_')
+ opt_names = [f'[{a}]' if optional else a for a, _, optional in self.arguments]
+ adoc_buf = f'''
+// {self.c_file}
+[#lua_fn_{section_name}]
+===== {self.name}({', '.join(opt_names)})
+
+{self.description}
+'''
+ if len(self.arguments) > 0:
+ adoc_buf += '''
+[float]
+===== Arguments
+'''
+ for (name, description, optional) in self.arguments:
+ if optional:
+ name += ' (optional)'
+ adoc_buf += f'\n{name}::\n'
+
+ if len(description) > 0:
+ adoc_buf += f'\n{description}\n'
+
+ adoc_buf += f'\n// function_arg_footer: {name}'
+
+ if len(self.arguments) > 0:
+ adoc_buf += '\n// end of function_args\n'
+
+ if len(self.returns) > 0:
+ adoc_buf += '''
+[float]
+===== Returns
+'''
+ for description in self.returns:
+ adoc_buf += f'\n{description}\n'
+
+ if len(self.returns) > 0:
+ adoc_buf += f'\n// function_returns_footer: {self.name}'
+
+ if len(self.errors) > 0:
+ adoc_buf += '''
+[float]
+===== Errors
+'''
+ for description in self.errors:
+ adoc_buf += f'\n* {description}\n'
+
+ if len(self.errors) > 0:
+ adoc_buf += f'\n// function_errors_footer: {self.name}'
+
+ adoc_buf += f'\n// function_footer: {section_name}\n'
+
+ return adoc_buf
+
+
+# group 1: whole trailing comment (possibly empty), e.g. " /* foo */"
+# group 2: any leading whitespace. XXX why is this not removed using (?:...)
+# group 3: actual comment text, e.g. " foo ".
+TRAILING_COMMENT_RE = r'((\s*|[\n\r]*)/\*(.*?)\*/)?'
+IN_COMMENT_RE = r'[\s\r\n]*((.*?)\s*\*/)?'
+QUOTED_RE = r'"([^"]*)"'
+
+# XXX We might want to create a "LuaClass" class similar to LuaFunction
+# and move these there.
+def extract_class_definitions(c_file, c_buf, module, classes, functions):
+ for m in re.finditer(r'WSLUA_CLASS_DEFINE(?:_BASE)?\(\s*([A-Z][a-zA-Z0-9]+).*?\);' + TRAILING_COMMENT_RE, c_buf, re.MULTILINE|re.DOTALL):
+ raw_desc = m.group(4)
+ if raw_desc is None:
+ raw_desc = ''
+ name = m.group(1)
+ mod_class = {
+ 'description': parse_desc(raw_desc),
+ 'constructors': [],
+ 'methods': [],
+ 'attributes': [],
+ }
+ classes[name] = mod_class
+ logging.info(f'Created class {name}')
+ return 0
+
+def extract_function_definitions(c_file, c_buf, module, classes, functions):
+ for m in re.finditer(r'WSLUA_FUNCTION\s+wslua_([a-z_0-9]+)[^\{]*\{' + TRAILING_COMMENT_RE, c_buf, re.MULTILINE|re.DOTALL):
+ id = m.group(1)
+ functions[id] = LuaFunction(c_file, id, m.start(), id, m.group(4))
+
+def extract_constructor_definitions(c_file, c_buf, module, classes, functions):
+ for m in re.finditer(r'WSLUA_CONSTRUCTOR\s+([A-Za-z0-9]+)_([a-z0-9_]+).*?\{' + TRAILING_COMMENT_RE, c_buf, re.MULTILINE|re.DOTALL):
+ class_name = m.group(1)
+ id = f'{class_name}_{m.group(2)}'
+ name = f'{class_name}.{m.group(2)}'
+ functions[id] = LuaFunction(c_file, id, m.start(), name, m.group(5))
+ classes[class_name]['constructors'].append(id)
+
+def extract_constructor_markups(c_file, c_buf, module, classes, functions):
+ for m in re.finditer(r'_WSLUA_CONSTRUCTOR_\s+([A-Za-z0-9]+)_([a-z0-9_]+)\s*(.*?)\*/', c_buf, re.MULTILINE|re.DOTALL):
+ class_name = m.group(1)
+ id = f'{class_name}_{m.group(2)}'
+ name = f'{class_name}.{m.group(2)}'
+ functions[id] = LuaFunction(c_file, id, m.start(), name, m.group(3))
+ classes[class_name]['constructors'].append(id)
+
+def extract_method_definitions(c_file, c_buf, module, classes, functions):
+ for m in re.finditer(r'WSLUA_METHOD\s+([A-Za-z0-9]+)_([a-z0-9_]+)[^\{]*\{' + TRAILING_COMMENT_RE, c_buf, re.MULTILINE|re.DOTALL):
+ class_name = m.group(1)
+ id = f'{class_name}_{m.group(2)}'
+ name = f'{class_name.lower()}:{m.group(2)}'
+ functions[id] = LuaFunction(c_file, id, m.start(), name, m.group(5))
+ classes[class_name]['methods'].append(id)
+
+def extract_metamethod_definitions(c_file, c_buf, module, classes, functions):
+ for m in re.finditer(r'WSLUA_METAMETHOD\s+([A-Za-z0-9]+)(__[a-z0-9]+)[^\{]*\{' + TRAILING_COMMENT_RE, c_buf, re.MULTILINE|re.DOTALL):
+ class_name = m.group(1)
+ id = f'{class_name}{m.group(2)}'
+ name = f'{class_name.lower()}:{m.group(2)}'
+ functions[id] = LuaFunction(c_file, id, m.start(), name, m.group(5))
+ classes[class_name]['methods'].append(id)
+
+def extract_attribute_markups(c_file, c_buf, module, classes, functions):
+ for m in re.finditer(r'/\*\s+WSLUA_ATTRIBUTE\s+([A-Za-z0-9]+)_([a-z0-9_]+)\s+([A-Z]*)\s*(.*?)\*/', c_buf, re.MULTILINE|re.DOTALL):
+ class_name = m.group(1)
+ name = f'{m.group(1).lower()}.{m.group(2)}'
+ mode = m.group(3)
+ mode_desc = 'Mode: '
+ if 'RO' in mode:
+ mode_desc += 'Retrieve only.\n'
+ elif 'WO' in mode:
+ mode_desc += 'Assign only.\n'
+ elif 'RW' in mode or 'WR' in mode:
+ mode_desc += 'Retrieve or assign.\n'
+ else:
+ sys.stderr.write(f'Attribute does not have a RO/WO/RW mode {mode}\n')
+ sys.exit(1)
+
+ attribute = {
+ 'name': name,
+ 'description': parse_desc(f'{mode_desc}\n{m.group(4)}'),
+ }
+ classes[class_name]['attributes'].append(attribute)
+ logging.info(f'Created attribute {name} for class {class_name}')
+
+def main():
+ parser = argparse.ArgumentParser(description="WSLUA's Reference Manual Generator")
+ parser.add_argument("c_files", nargs='+', metavar='C file', help="C file")
+ parser.add_argument('--output-directory', help='Output directory')
+ parser.add_argument('--verbose', action='store_true', help='Show more output')
+ args = parser.parse_args()
+
+ logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.DEBUG if args.verbose else logging.WARNING)
+
+ modules = {}
+
+ for c_file in args.c_files:
+ with open(c_file, encoding='utf-8') as c_f:
+ c_buf = c_f.read()
+
+ # Peek for modules vs continuations.
+ m = re.search(r'WSLUA_(|CONTINUE_)MODULE\s*(\w+)', c_buf)
+ if m:
+ module_name = m.group(2)
+ c_pair = (os.path.basename(c_file), c_buf)
+ try:
+ if m.group(1) == 'CONTINUE_':
+ modules[module_name]['c'].append(c_pair)
+ else:
+ modules[module_name]['c'].insert(0, c_pair)
+ except KeyError:
+ modules[module_name] = {}
+ modules[module_name]['c'] = [c_pair]
+ modules[module_name]['file_base'] = os.path.splitext(c_pair[0])[0]
+ else:
+ logging.warning(f'No module found in {c_file}')
+
+ extractors = [
+ extract_class_definitions,
+ extract_function_definitions,
+ extract_constructor_definitions,
+ extract_constructor_markups,
+ extract_method_definitions,
+ extract_metamethod_definitions,
+ extract_attribute_markups,
+ ]
+
+ for module_name in sorted(modules):
+ adoc_file = f'{modules[module_name]["file_base"]}.adoc'
+ logging.info(f'Writing module {module_name} to {adoc_file} from {len(modules[module_name]["c"])} input(s)')
+ functions = {}
+ classes = {}
+
+ # Extract our module's description.
+ m = re.search(r'WSLUA_MODULE\s*[A-Z][a-zA-Z0-9]+' + IN_COMMENT_RE, modules[module_name]['c'][0][1], re.MULTILINE|re.DOTALL)
+ if not m:
+ return
+ modules[module_name]['description'] = parse_desc(f'{m.group(2)}')
+
+ # Extract module-level information from each file.
+ for (c_file, c_buf) in modules[module_name]['c']:
+ for extractor in extractors:
+ extractor(c_file, c_buf, modules[module_name], classes, functions)
+
+ # Extract function-level information from each file.
+ for (c_file, c_buf) in modules[module_name]['c']:
+ c_file_ids = filter(lambda k: functions[k].c_file == c_file, functions.keys())
+ func_ids = sorted(c_file_ids, key=lambda k: functions[k].start)
+ id = func_ids.pop(0)
+ for next_id in func_ids:
+ functions[id].extract_buf(c_buf[functions[id].start:functions[next_id].start])
+ id = next_id
+ functions[id].extract_buf(c_buf[functions[id].start:])
+
+ with open(os.path.join(args.output_directory, adoc_file), 'w', encoding='utf-8') as adoc_f:
+ adoc_f.write(f'''\
+// {c_file}
+[#lua_module_{module_name}]
+=== {modules[module_name]["description"]}
+''')
+ for class_name in sorted(classes.keys()):
+ lua_class = classes[class_name]
+ adoc_f.write(f'''
+// {c_file}
+[#lua_class_{class_name}]
+==== {class_name}
+''')
+
+ if not lua_class["description"] == '':
+ adoc_f.write(f'\n{lua_class["description"]}\n')
+
+ for constructor_id in sorted(lua_class['constructors'], key=lambda id: functions[id].start):
+ adoc_f.write(functions[constructor_id].to_adoc())
+ del functions[constructor_id]
+
+ for method_id in sorted(lua_class['methods'], key=lambda id: functions[id].start):
+ adoc_f.write(functions[method_id].to_adoc())
+ del functions[method_id]
+
+ for attribute in lua_class['attributes']:
+ attribute_id = re.sub('[^A-Za-z0-9]', '_', f'{attribute["name"]}')
+ adoc_f.write(f'''
+[#lua_class_attrib_{attribute_id}]
+===== {attribute["name"]}
+
+{attribute["description"]}
+
+// End {attribute["name"]}
+''')
+
+
+ adoc_f.write(f'\n// class_footer: {class_name}\n')
+
+ if len(functions.keys()) > 0:
+ adoc_f.write(f'''\
+[#global_functions_{module_name}]
+==== Global Functions
+''')
+
+ for global_id in sorted(functions.keys(), key=lambda id: functions[id].start):
+ adoc_f.write(functions[global_id].to_adoc())
+
+ if len(functions.keys()) > 0:
+ adoc_f.write(f'// Global function\n')
+
+ adoc_f.write('// end of module\n')
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/ncp2222.py b/tools/ncp2222.py
index f14d0c5c..acc0af4e 100755
--- a/tools/ncp2222.py
+++ b/tools/ncp2222.py
@@ -3,7 +3,7 @@
"""
Creates C code from a table of NCP type 0x2222 packet types.
(And 0x3333, which are the replies, but the packets are more commonly
-refered to as type 0x2222; the 0x3333 replies are understood to be
+referred to as type 0x2222; the 0x3333 replies are understood to be
part of the 0x2222 "family")
The data-munging code was written by Gilbert Ramirez.
@@ -324,7 +324,7 @@ class PTVCBitfield(PTVC):
def Code(self):
ett_name = self.ETTName()
- x = "static int %s = -1;\n" % (ett_name,)
+ x = "static int %s;\n" % (ett_name,)
x = x + "static const ptvc_record ptvc_%s[] = {\n" % (self.Name())
for ptvc_rec in self.list:
@@ -884,7 +884,7 @@ class struct(PTVC, Type):
def Code(self):
ett_name = self.ETTName()
- x = "static int %s = -1;\n" % (ett_name,)
+ x = "static int %s;\n" % (ett_name,)
x = x + "static const ptvc_record ptvc_%s[] = {\n" % (self.name,)
for ptvc_rec in self.list:
x = x + " %s,\n" % (ptvc_rec.Code())
@@ -5850,7 +5850,7 @@ def define_errors():
errors[0xff0d] = "Object associated with ObjectID is not a manager"
errors[0xff0e] = "Invalid initial semaphore value"
errors[0xff0f] = "The semaphore handle is not valid"
- errors[0xff10] = "SemaphoreHandle is not associated with a valid sempahore"
+ errors[0xff10] = "SemaphoreHandle is not associated with a valid semaphore"
errors[0xff11] = "Invalid semaphore handle"
errors[0xff12] = "Transaction tracking is not available"
errors[0xff13] = "The transaction has not yet been written to disk"
@@ -5905,7 +5905,6 @@ def produce_code():
#include "config.h"
#include <string.h>
-#include <glib.h>
#include <epan/packet.h>
#include <epan/dfilter/dfilter.h>
#include <epan/exceptions.h>
@@ -5917,6 +5916,7 @@ def produce_code():
#include <epan/reassemble.h>
#include <epan/tap.h>
#include <epan/proto_data.h>
+#include <wsutil/array.h>
#include "packet-ncp-int.h"
#include "packet-ncp-nmas.h"
#include "packet-ncp-sss.h"
@@ -5957,565 +5957,565 @@ static int ptvc_struct_int_storage;
#define NREV 0x00000004
#define NFLAGS 0x00000008
-static int hf_ncp_number_of_data_streams_long = -1;
-static int hf_ncp_func = -1;
-static int hf_ncp_length = -1;
-static int hf_ncp_subfunc = -1;
-static int hf_ncp_group = -1;
-static int hf_ncp_fragment_handle = -1;
-static int hf_ncp_completion_code = -1;
-static int hf_ncp_connection_status = -1;
-static int hf_ncp_req_frame_num = -1;
-static int hf_ncp_req_frame_time = -1;
-static int hf_ncp_fragment_size = -1;
-static int hf_ncp_message_size = -1;
-static int hf_ncp_nds_flag = -1;
-static int hf_ncp_nds_verb = -1;
-static int hf_ping_version = -1;
-/* static int hf_nds_version = -1; */
-/* static int hf_nds_flags = -1; */
-static int hf_nds_reply_depth = -1;
-static int hf_nds_reply_rev = -1;
-static int hf_nds_reply_flags = -1;
-static int hf_nds_p1type = -1;
-static int hf_nds_uint32value = -1;
-static int hf_nds_bit1 = -1;
-static int hf_nds_bit2 = -1;
-static int hf_nds_bit3 = -1;
-static int hf_nds_bit4 = -1;
-static int hf_nds_bit5 = -1;
-static int hf_nds_bit6 = -1;
-static int hf_nds_bit7 = -1;
-static int hf_nds_bit8 = -1;
-static int hf_nds_bit9 = -1;
-static int hf_nds_bit10 = -1;
-static int hf_nds_bit11 = -1;
-static int hf_nds_bit12 = -1;
-static int hf_nds_bit13 = -1;
-static int hf_nds_bit14 = -1;
-static int hf_nds_bit15 = -1;
-static int hf_nds_bit16 = -1;
-static int hf_outflags = -1;
-static int hf_bit1outflags = -1;
-static int hf_bit2outflags = -1;
-static int hf_bit3outflags = -1;
-static int hf_bit4outflags = -1;
-static int hf_bit5outflags = -1;
-static int hf_bit6outflags = -1;
-static int hf_bit7outflags = -1;
-static int hf_bit8outflags = -1;
-static int hf_bit9outflags = -1;
-static int hf_bit10outflags = -1;
-static int hf_bit11outflags = -1;
-static int hf_bit12outflags = -1;
-static int hf_bit13outflags = -1;
-static int hf_bit14outflags = -1;
-static int hf_bit15outflags = -1;
-static int hf_bit16outflags = -1;
-static int hf_bit1nflags = -1;
-static int hf_bit2nflags = -1;
-static int hf_bit3nflags = -1;
-static int hf_bit4nflags = -1;
-static int hf_bit5nflags = -1;
-static int hf_bit6nflags = -1;
-static int hf_bit7nflags = -1;
-static int hf_bit8nflags = -1;
-static int hf_bit9nflags = -1;
-static int hf_bit10nflags = -1;
-static int hf_bit11nflags = -1;
-static int hf_bit12nflags = -1;
-static int hf_bit13nflags = -1;
-static int hf_bit14nflags = -1;
-static int hf_bit15nflags = -1;
-static int hf_bit16nflags = -1;
-static int hf_bit1rflags = -1;
-static int hf_bit2rflags = -1;
-static int hf_bit3rflags = -1;
-static int hf_bit4rflags = -1;
-static int hf_bit5rflags = -1;
-static int hf_bit6rflags = -1;
-static int hf_bit7rflags = -1;
-static int hf_bit8rflags = -1;
-static int hf_bit9rflags = -1;
-static int hf_bit10rflags = -1;
-static int hf_bit11rflags = -1;
-static int hf_bit12rflags = -1;
-static int hf_bit13rflags = -1;
-static int hf_bit14rflags = -1;
-static int hf_bit15rflags = -1;
-static int hf_bit16rflags = -1;
-static int hf_cflags = -1;
-static int hf_bit1cflags = -1;
-static int hf_bit2cflags = -1;
-static int hf_bit3cflags = -1;
-static int hf_bit4cflags = -1;
-static int hf_bit5cflags = -1;
-static int hf_bit6cflags = -1;
-static int hf_bit7cflags = -1;
-static int hf_bit8cflags = -1;
-static int hf_bit9cflags = -1;
-static int hf_bit10cflags = -1;
-static int hf_bit11cflags = -1;
-static int hf_bit12cflags = -1;
-static int hf_bit13cflags = -1;
-static int hf_bit14cflags = -1;
-static int hf_bit15cflags = -1;
-static int hf_bit16cflags = -1;
-static int hf_bit1acflags = -1;
-static int hf_bit2acflags = -1;
-static int hf_bit3acflags = -1;
-static int hf_bit4acflags = -1;
-static int hf_bit5acflags = -1;
-static int hf_bit6acflags = -1;
-static int hf_bit7acflags = -1;
-static int hf_bit8acflags = -1;
-static int hf_bit9acflags = -1;
-static int hf_bit10acflags = -1;
-static int hf_bit11acflags = -1;
-static int hf_bit12acflags = -1;
-static int hf_bit13acflags = -1;
-static int hf_bit14acflags = -1;
-static int hf_bit15acflags = -1;
-static int hf_bit16acflags = -1;
-static int hf_vflags = -1;
-static int hf_bit1vflags = -1;
-static int hf_bit2vflags = -1;
-static int hf_bit3vflags = -1;
-static int hf_bit4vflags = -1;
-static int hf_bit5vflags = -1;
-static int hf_bit6vflags = -1;
-static int hf_bit7vflags = -1;
-static int hf_bit8vflags = -1;
-static int hf_bit9vflags = -1;
-static int hf_bit10vflags = -1;
-static int hf_bit11vflags = -1;
-static int hf_bit12vflags = -1;
-static int hf_bit13vflags = -1;
-static int hf_bit14vflags = -1;
-static int hf_bit15vflags = -1;
-static int hf_bit16vflags = -1;
-static int hf_eflags = -1;
-static int hf_bit1eflags = -1;
-static int hf_bit2eflags = -1;
-static int hf_bit3eflags = -1;
-static int hf_bit4eflags = -1;
-static int hf_bit5eflags = -1;
-static int hf_bit6eflags = -1;
-static int hf_bit7eflags = -1;
-static int hf_bit8eflags = -1;
-static int hf_bit9eflags = -1;
-static int hf_bit10eflags = -1;
-static int hf_bit11eflags = -1;
-static int hf_bit12eflags = -1;
-static int hf_bit13eflags = -1;
-static int hf_bit14eflags = -1;
-static int hf_bit15eflags = -1;
-static int hf_bit16eflags = -1;
-static int hf_infoflagsl = -1;
-static int hf_retinfoflagsl = -1;
-static int hf_bit1infoflagsl = -1;
-static int hf_bit2infoflagsl = -1;
-static int hf_bit3infoflagsl = -1;
-static int hf_bit4infoflagsl = -1;
-static int hf_bit5infoflagsl = -1;
-static int hf_bit6infoflagsl = -1;
-static int hf_bit7infoflagsl = -1;
-static int hf_bit8infoflagsl = -1;
-static int hf_bit9infoflagsl = -1;
-static int hf_bit10infoflagsl = -1;
-static int hf_bit11infoflagsl = -1;
-static int hf_bit12infoflagsl = -1;
-static int hf_bit13infoflagsl = -1;
-static int hf_bit14infoflagsl = -1;
-static int hf_bit15infoflagsl = -1;
-static int hf_bit16infoflagsl = -1;
-static int hf_infoflagsh = -1;
-static int hf_bit1infoflagsh = -1;
-static int hf_bit2infoflagsh = -1;
-static int hf_bit3infoflagsh = -1;
-static int hf_bit4infoflagsh = -1;
-static int hf_bit5infoflagsh = -1;
-static int hf_bit6infoflagsh = -1;
-static int hf_bit7infoflagsh = -1;
-static int hf_bit8infoflagsh = -1;
-static int hf_bit9infoflagsh = -1;
-static int hf_bit10infoflagsh = -1;
-static int hf_bit11infoflagsh = -1;
-static int hf_bit12infoflagsh = -1;
-static int hf_bit13infoflagsh = -1;
-static int hf_bit14infoflagsh = -1;
-static int hf_bit15infoflagsh = -1;
-static int hf_bit16infoflagsh = -1;
-static int hf_retinfoflagsh = -1;
-static int hf_bit1retinfoflagsh = -1;
-static int hf_bit2retinfoflagsh = -1;
-static int hf_bit3retinfoflagsh = -1;
-static int hf_bit4retinfoflagsh = -1;
-static int hf_bit5retinfoflagsh = -1;
-static int hf_bit6retinfoflagsh = -1;
-static int hf_bit7retinfoflagsh = -1;
-static int hf_bit8retinfoflagsh = -1;
-static int hf_bit9retinfoflagsh = -1;
-static int hf_bit10retinfoflagsh = -1;
-static int hf_bit11retinfoflagsh = -1;
-static int hf_bit12retinfoflagsh = -1;
-static int hf_bit13retinfoflagsh = -1;
-static int hf_bit14retinfoflagsh = -1;
-static int hf_bit15retinfoflagsh = -1;
-static int hf_bit16retinfoflagsh = -1;
-static int hf_bit1lflags = -1;
-static int hf_bit2lflags = -1;
-static int hf_bit3lflags = -1;
-static int hf_bit4lflags = -1;
-static int hf_bit5lflags = -1;
-static int hf_bit6lflags = -1;
-static int hf_bit7lflags = -1;
-static int hf_bit8lflags = -1;
-static int hf_bit9lflags = -1;
-static int hf_bit10lflags = -1;
-static int hf_bit11lflags = -1;
-static int hf_bit12lflags = -1;
-static int hf_bit13lflags = -1;
-static int hf_bit14lflags = -1;
-static int hf_bit15lflags = -1;
-static int hf_bit16lflags = -1;
-static int hf_l1flagsl = -1;
-static int hf_l1flagsh = -1;
-static int hf_bit1l1flagsl = -1;
-static int hf_bit2l1flagsl = -1;
-static int hf_bit3l1flagsl = -1;
-static int hf_bit4l1flagsl = -1;
-static int hf_bit5l1flagsl = -1;
-static int hf_bit6l1flagsl = -1;
-static int hf_bit7l1flagsl = -1;
-static int hf_bit8l1flagsl = -1;
-static int hf_bit9l1flagsl = -1;
-static int hf_bit10l1flagsl = -1;
-static int hf_bit11l1flagsl = -1;
-static int hf_bit12l1flagsl = -1;
-static int hf_bit13l1flagsl = -1;
-static int hf_bit14l1flagsl = -1;
-static int hf_bit15l1flagsl = -1;
-static int hf_bit16l1flagsl = -1;
-static int hf_bit1l1flagsh = -1;
-static int hf_bit2l1flagsh = -1;
-static int hf_bit3l1flagsh = -1;
-static int hf_bit4l1flagsh = -1;
-static int hf_bit5l1flagsh = -1;
-static int hf_bit6l1flagsh = -1;
-static int hf_bit7l1flagsh = -1;
-static int hf_bit8l1flagsh = -1;
-static int hf_bit9l1flagsh = -1;
-static int hf_bit10l1flagsh = -1;
-static int hf_bit11l1flagsh = -1;
-static int hf_bit12l1flagsh = -1;
-static int hf_bit13l1flagsh = -1;
-static int hf_bit14l1flagsh = -1;
-static int hf_bit15l1flagsh = -1;
-static int hf_bit16l1flagsh = -1;
-static int hf_nds_tree_name = -1;
-static int hf_nds_reply_error = -1;
-static int hf_nds_net = -1;
-static int hf_nds_node = -1;
-static int hf_nds_socket = -1;
-static int hf_add_ref_ip = -1;
-static int hf_add_ref_udp = -1;
-static int hf_add_ref_tcp = -1;
-static int hf_referral_record = -1;
-static int hf_referral_addcount = -1;
-static int hf_nds_port = -1;
-static int hf_mv_string = -1;
-static int hf_nds_syntax = -1;
-static int hf_value_string = -1;
-static int hf_nds_buffer_size = -1;
-static int hf_nds_ver = -1;
-static int hf_nds_nflags = -1;
-static int hf_nds_scope = -1;
-static int hf_nds_name = -1;
-static int hf_nds_comm_trans = -1;
-static int hf_nds_tree_trans = -1;
-static int hf_nds_iteration = -1;
-static int hf_nds_eid = -1;
-static int hf_nds_info_type = -1;
-static int hf_nds_all_attr = -1;
-static int hf_nds_req_flags = -1;
-static int hf_nds_attr = -1;
-static int hf_nds_crc = -1;
-static int hf_nds_referrals = -1;
-static int hf_nds_result_flags = -1;
-static int hf_nds_tag_string = -1;
-static int hf_value_bytes = -1;
-static int hf_replica_type = -1;
-static int hf_replica_state = -1;
-static int hf_replica_number = -1;
-static int hf_min_nds_ver = -1;
-static int hf_nds_ver_include = -1;
-static int hf_nds_ver_exclude = -1;
-/* static int hf_nds_es = -1; */
-static int hf_es_type = -1;
-/* static int hf_delim_string = -1; */
-static int hf_rdn_string = -1;
-static int hf_nds_revent = -1;
-static int hf_nds_rnum = -1;
-static int hf_nds_name_type = -1;
-static int hf_nds_rflags = -1;
-static int hf_nds_eflags = -1;
-static int hf_nds_depth = -1;
-static int hf_nds_class_def_type = -1;
-static int hf_nds_classes = -1;
-static int hf_nds_return_all_classes = -1;
-static int hf_nds_stream_flags = -1;
-static int hf_nds_stream_name = -1;
-static int hf_nds_file_handle = -1;
-static int hf_nds_file_size = -1;
-static int hf_nds_dn_output_type = -1;
-static int hf_nds_nested_output_type = -1;
-static int hf_nds_output_delimiter = -1;
-static int hf_nds_output_entry_specifier = -1;
-static int hf_es_value = -1;
-static int hf_es_rdn_count = -1;
-static int hf_nds_replica_num = -1;
-static int hf_nds_event_num = -1;
-static int hf_es_seconds = -1;
-static int hf_nds_compare_results = -1;
-static int hf_nds_parent = -1;
-static int hf_nds_name_filter = -1;
-static int hf_nds_class_filter = -1;
-static int hf_nds_time_filter = -1;
-static int hf_nds_partition_root_id = -1;
-static int hf_nds_replicas = -1;
-static int hf_nds_purge = -1;
-static int hf_nds_local_partition = -1;
-static int hf_partition_busy = -1;
-static int hf_nds_number_of_changes = -1;
-static int hf_sub_count = -1;
-static int hf_nds_revision = -1;
-static int hf_nds_base_class = -1;
-static int hf_nds_relative_dn = -1;
-/* static int hf_nds_root_dn = -1; */
-/* static int hf_nds_parent_dn = -1; */
-static int hf_deref_base = -1;
-/* static int hf_nds_entry_info = -1; */
-static int hf_nds_base = -1;
-static int hf_nds_privileges = -1;
-static int hf_nds_vflags = -1;
-static int hf_nds_value_len = -1;
-static int hf_nds_cflags = -1;
-static int hf_nds_acflags = -1;
-static int hf_nds_asn1 = -1;
-static int hf_nds_upper = -1;
-static int hf_nds_lower = -1;
-static int hf_nds_trustee_dn = -1;
-static int hf_nds_attribute_dn = -1;
-static int hf_nds_acl_add = -1;
-static int hf_nds_acl_del = -1;
-static int hf_nds_att_add = -1;
-static int hf_nds_att_del = -1;
-static int hf_nds_keep = -1;
-static int hf_nds_new_rdn = -1;
-static int hf_nds_time_delay = -1;
-static int hf_nds_root_name = -1;
-static int hf_nds_new_part_id = -1;
-static int hf_nds_child_part_id = -1;
-static int hf_nds_master_part_id = -1;
-static int hf_nds_target_name = -1;
-static int hf_nds_super = -1;
-static int hf_pingflags2 = -1;
-static int hf_bit1pingflags2 = -1;
-static int hf_bit2pingflags2 = -1;
-static int hf_bit3pingflags2 = -1;
-static int hf_bit4pingflags2 = -1;
-static int hf_bit5pingflags2 = -1;
-static int hf_bit6pingflags2 = -1;
-static int hf_bit7pingflags2 = -1;
-static int hf_bit8pingflags2 = -1;
-static int hf_bit9pingflags2 = -1;
-static int hf_bit10pingflags2 = -1;
-static int hf_bit11pingflags2 = -1;
-static int hf_bit12pingflags2 = -1;
-static int hf_bit13pingflags2 = -1;
-static int hf_bit14pingflags2 = -1;
-static int hf_bit15pingflags2 = -1;
-static int hf_bit16pingflags2 = -1;
-static int hf_pingflags1 = -1;
-static int hf_bit1pingflags1 = -1;
-static int hf_bit2pingflags1 = -1;
-static int hf_bit3pingflags1 = -1;
-static int hf_bit4pingflags1 = -1;
-static int hf_bit5pingflags1 = -1;
-static int hf_bit6pingflags1 = -1;
-static int hf_bit7pingflags1 = -1;
-static int hf_bit8pingflags1 = -1;
-static int hf_bit9pingflags1 = -1;
-static int hf_bit10pingflags1 = -1;
-static int hf_bit11pingflags1 = -1;
-static int hf_bit12pingflags1 = -1;
-static int hf_bit13pingflags1 = -1;
-static int hf_bit14pingflags1 = -1;
-static int hf_bit15pingflags1 = -1;
-static int hf_bit16pingflags1 = -1;
-static int hf_pingpflags1 = -1;
-static int hf_bit1pingpflags1 = -1;
-static int hf_bit2pingpflags1 = -1;
-static int hf_bit3pingpflags1 = -1;
-static int hf_bit4pingpflags1 = -1;
-static int hf_bit5pingpflags1 = -1;
-static int hf_bit6pingpflags1 = -1;
-static int hf_bit7pingpflags1 = -1;
-static int hf_bit8pingpflags1 = -1;
-static int hf_bit9pingpflags1 = -1;
-static int hf_bit10pingpflags1 = -1;
-static int hf_bit11pingpflags1 = -1;
-static int hf_bit12pingpflags1 = -1;
-static int hf_bit13pingpflags1 = -1;
-static int hf_bit14pingpflags1 = -1;
-static int hf_bit15pingpflags1 = -1;
-static int hf_bit16pingpflags1 = -1;
-static int hf_pingvflags1 = -1;
-static int hf_bit1pingvflags1 = -1;
-static int hf_bit2pingvflags1 = -1;
-static int hf_bit3pingvflags1 = -1;
-static int hf_bit4pingvflags1 = -1;
-static int hf_bit5pingvflags1 = -1;
-static int hf_bit6pingvflags1 = -1;
-static int hf_bit7pingvflags1 = -1;
-static int hf_bit8pingvflags1 = -1;
-static int hf_bit9pingvflags1 = -1;
-static int hf_bit10pingvflags1 = -1;
-static int hf_bit11pingvflags1 = -1;
-static int hf_bit12pingvflags1 = -1;
-static int hf_bit13pingvflags1 = -1;
-static int hf_bit14pingvflags1 = -1;
-static int hf_bit15pingvflags1 = -1;
-static int hf_bit16pingvflags1 = -1;
-static int hf_nds_letter_ver = -1;
-static int hf_nds_os_majver = -1;
-static int hf_nds_os_minver = -1;
-static int hf_nds_lic_flags = -1;
-static int hf_nds_ds_time = -1;
-static int hf_nds_ping_version = -1;
-static int hf_nds_search_scope = -1;
-static int hf_nds_num_objects = -1;
-static int hf_siflags = -1;
-static int hf_bit1siflags = -1;
-static int hf_bit2siflags = -1;
-static int hf_bit3siflags = -1;
-static int hf_bit4siflags = -1;
-static int hf_bit5siflags = -1;
-static int hf_bit6siflags = -1;
-static int hf_bit7siflags = -1;
-static int hf_bit8siflags = -1;
-static int hf_bit9siflags = -1;
-static int hf_bit10siflags = -1;
-static int hf_bit11siflags = -1;
-static int hf_bit12siflags = -1;
-static int hf_bit13siflags = -1;
-static int hf_bit14siflags = -1;
-static int hf_bit15siflags = -1;
-static int hf_bit16siflags = -1;
-static int hf_nds_segments = -1;
-static int hf_nds_segment = -1;
-static int hf_nds_segment_overlap = -1;
-static int hf_nds_segment_overlap_conflict = -1;
-static int hf_nds_segment_multiple_tails = -1;
-static int hf_nds_segment_too_long_segment = -1;
-static int hf_nds_segment_error = -1;
-static int hf_nds_segment_count = -1;
-static int hf_nds_reassembled_length = -1;
-static int hf_nds_verb2b_req_flags = -1;
-static int hf_ncp_ip_address = -1;
-static int hf_ncp_copyright = -1;
-static int hf_ndsprot1flag = -1;
-static int hf_ndsprot2flag = -1;
-static int hf_ndsprot3flag = -1;
-static int hf_ndsprot4flag = -1;
-static int hf_ndsprot5flag = -1;
-static int hf_ndsprot6flag = -1;
-static int hf_ndsprot7flag = -1;
-static int hf_ndsprot8flag = -1;
-static int hf_ndsprot9flag = -1;
-static int hf_ndsprot10flag = -1;
-static int hf_ndsprot11flag = -1;
-static int hf_ndsprot12flag = -1;
-static int hf_ndsprot13flag = -1;
-static int hf_ndsprot14flag = -1;
-static int hf_ndsprot15flag = -1;
-static int hf_ndsprot16flag = -1;
-static int hf_nds_svr_dst_name = -1;
-static int hf_nds_tune_mark = -1;
-/* static int hf_nds_create_time = -1; */
-static int hf_srvr_param_number = -1;
-static int hf_srvr_param_boolean = -1;
-static int hf_srvr_param_string = -1;
-static int hf_nds_svr_time = -1;
-static int hf_nds_crt_time = -1;
-static int hf_nds_number_of_items = -1;
-static int hf_nds_compare_attributes = -1;
-static int hf_nds_read_attribute = -1;
-static int hf_nds_write_add_delete_attribute = -1;
-static int hf_nds_add_delete_self = -1;
-static int hf_nds_privilege_not_defined = -1;
-static int hf_nds_supervisor = -1;
-static int hf_nds_inheritance_control = -1;
-static int hf_nds_browse_entry = -1;
-static int hf_nds_add_entry = -1;
-static int hf_nds_delete_entry = -1;
-static int hf_nds_rename_entry = -1;
-static int hf_nds_supervisor_entry = -1;
-static int hf_nds_entry_privilege_not_defined = -1;
-static int hf_nds_iterator = -1;
-static int hf_ncp_nds_iterverb = -1;
-static int hf_iter_completion_code = -1;
-/* static int hf_nds_iterobj = -1; */
-static int hf_iter_verb_completion_code = -1;
-static int hf_iter_ans = -1;
-static int hf_positionable = -1;
-static int hf_num_skipped = -1;
-static int hf_num_to_skip = -1;
-static int hf_timelimit = -1;
-static int hf_iter_index = -1;
-static int hf_num_to_get = -1;
-/* static int hf_ret_info_type = -1; */
-static int hf_data_size = -1;
-static int hf_this_count = -1;
-static int hf_max_entries = -1;
-static int hf_move_position = -1;
-static int hf_iter_copy = -1;
-static int hf_iter_position = -1;
-static int hf_iter_search = -1;
-static int hf_iter_other = -1;
-static int hf_nds_oid = -1;
-static int hf_ncp_bytes_actually_trans_64 = -1;
-static int hf_sap_name = -1;
-static int hf_os_name = -1;
-static int hf_vendor_name = -1;
-static int hf_hardware_name = -1;
-static int hf_no_request_record_found = -1;
-static int hf_search_modifier = -1;
-static int hf_search_pattern = -1;
-static int hf_nds_acl_protected_attribute = -1;
-static int hf_nds_acl_subject = -1;
-static int hf_nds_acl_privileges = -1;
-
-static expert_field ei_ncp_file_rights_change = EI_INIT;
-static expert_field ei_ncp_completion_code = EI_INIT;
-static expert_field ei_nds_reply_error = EI_INIT;
-static expert_field ei_ncp_destroy_connection = EI_INIT;
-static expert_field ei_nds_iteration = EI_INIT;
-static expert_field ei_ncp_eid = EI_INIT;
-static expert_field ei_ncp_file_handle = EI_INIT;
-static expert_field ei_ncp_connection_destroyed = EI_INIT;
-static expert_field ei_ncp_no_request_record_found = EI_INIT;
-static expert_field ei_ncp_file_rights = EI_INIT;
-static expert_field ei_iter_verb_completion_code = EI_INIT;
-static expert_field ei_ncp_connection_request = EI_INIT;
-static expert_field ei_ncp_connection_status = EI_INIT;
-static expert_field ei_ncp_op_lock_handle = EI_INIT;
-static expert_field ei_ncp_effective_rights = EI_INIT;
-static expert_field ei_ncp_server = EI_INIT;
-static expert_field ei_ncp_invalid_offset = EI_INIT;
-static expert_field ei_ncp_address_type = EI_INIT;
-static expert_field ei_ncp_value_too_large = EI_INIT;
+static int hf_ncp_number_of_data_streams_long;
+static int hf_ncp_func;
+static int hf_ncp_length;
+static int hf_ncp_subfunc;
+static int hf_ncp_group;
+static int hf_ncp_fragment_handle;
+static int hf_ncp_completion_code;
+static int hf_ncp_connection_status;
+static int hf_ncp_req_frame_num;
+static int hf_ncp_req_frame_time;
+static int hf_ncp_fragment_size;
+static int hf_ncp_message_size;
+static int hf_ncp_nds_flag;
+static int hf_ncp_nds_verb;
+static int hf_ping_version;
+/* static int hf_nds_version; */
+/* static int hf_nds_flags; */
+static int hf_nds_reply_depth;
+static int hf_nds_reply_rev;
+static int hf_nds_reply_flags;
+static int hf_nds_p1type;
+static int hf_nds_uint32value;
+static int hf_nds_bit1;
+static int hf_nds_bit2;
+static int hf_nds_bit3;
+static int hf_nds_bit4;
+static int hf_nds_bit5;
+static int hf_nds_bit6;
+static int hf_nds_bit7;
+static int hf_nds_bit8;
+static int hf_nds_bit9;
+static int hf_nds_bit10;
+static int hf_nds_bit11;
+static int hf_nds_bit12;
+static int hf_nds_bit13;
+static int hf_nds_bit14;
+static int hf_nds_bit15;
+static int hf_nds_bit16;
+static int hf_outflags;
+static int hf_bit1outflags;
+static int hf_bit2outflags;
+static int hf_bit3outflags;
+static int hf_bit4outflags;
+static int hf_bit5outflags;
+static int hf_bit6outflags;
+static int hf_bit7outflags;
+static int hf_bit8outflags;
+static int hf_bit9outflags;
+static int hf_bit10outflags;
+static int hf_bit11outflags;
+static int hf_bit12outflags;
+static int hf_bit13outflags;
+static int hf_bit14outflags;
+static int hf_bit15outflags;
+static int hf_bit16outflags;
+static int hf_bit1nflags;
+static int hf_bit2nflags;
+static int hf_bit3nflags;
+static int hf_bit4nflags;
+static int hf_bit5nflags;
+static int hf_bit6nflags;
+static int hf_bit7nflags;
+static int hf_bit8nflags;
+static int hf_bit9nflags;
+static int hf_bit10nflags;
+static int hf_bit11nflags;
+static int hf_bit12nflags;
+static int hf_bit13nflags;
+static int hf_bit14nflags;
+static int hf_bit15nflags;
+static int hf_bit16nflags;
+static int hf_bit1rflags;
+static int hf_bit2rflags;
+static int hf_bit3rflags;
+static int hf_bit4rflags;
+static int hf_bit5rflags;
+static int hf_bit6rflags;
+static int hf_bit7rflags;
+static int hf_bit8rflags;
+static int hf_bit9rflags;
+static int hf_bit10rflags;
+static int hf_bit11rflags;
+static int hf_bit12rflags;
+static int hf_bit13rflags;
+static int hf_bit14rflags;
+static int hf_bit15rflags;
+static int hf_bit16rflags;
+static int hf_cflags;
+static int hf_bit1cflags;
+static int hf_bit2cflags;
+static int hf_bit3cflags;
+static int hf_bit4cflags;
+static int hf_bit5cflags;
+static int hf_bit6cflags;
+static int hf_bit7cflags;
+static int hf_bit8cflags;
+static int hf_bit9cflags;
+static int hf_bit10cflags;
+static int hf_bit11cflags;
+static int hf_bit12cflags;
+static int hf_bit13cflags;
+static int hf_bit14cflags;
+static int hf_bit15cflags;
+static int hf_bit16cflags;
+static int hf_bit1acflags;
+static int hf_bit2acflags;
+static int hf_bit3acflags;
+static int hf_bit4acflags;
+static int hf_bit5acflags;
+static int hf_bit6acflags;
+static int hf_bit7acflags;
+static int hf_bit8acflags;
+static int hf_bit9acflags;
+static int hf_bit10acflags;
+static int hf_bit11acflags;
+static int hf_bit12acflags;
+static int hf_bit13acflags;
+static int hf_bit14acflags;
+static int hf_bit15acflags;
+static int hf_bit16acflags;
+static int hf_vflags;
+static int hf_bit1vflags;
+static int hf_bit2vflags;
+static int hf_bit3vflags;
+static int hf_bit4vflags;
+static int hf_bit5vflags;
+static int hf_bit6vflags;
+static int hf_bit7vflags;
+static int hf_bit8vflags;
+static int hf_bit9vflags;
+static int hf_bit10vflags;
+static int hf_bit11vflags;
+static int hf_bit12vflags;
+static int hf_bit13vflags;
+static int hf_bit14vflags;
+static int hf_bit15vflags;
+static int hf_bit16vflags;
+static int hf_eflags;
+static int hf_bit1eflags;
+static int hf_bit2eflags;
+static int hf_bit3eflags;
+static int hf_bit4eflags;
+static int hf_bit5eflags;
+static int hf_bit6eflags;
+static int hf_bit7eflags;
+static int hf_bit8eflags;
+static int hf_bit9eflags;
+static int hf_bit10eflags;
+static int hf_bit11eflags;
+static int hf_bit12eflags;
+static int hf_bit13eflags;
+static int hf_bit14eflags;
+static int hf_bit15eflags;
+static int hf_bit16eflags;
+static int hf_infoflagsl;
+static int hf_retinfoflagsl;
+static int hf_bit1infoflagsl;
+static int hf_bit2infoflagsl;
+static int hf_bit3infoflagsl;
+static int hf_bit4infoflagsl;
+static int hf_bit5infoflagsl;
+static int hf_bit6infoflagsl;
+static int hf_bit7infoflagsl;
+static int hf_bit8infoflagsl;
+static int hf_bit9infoflagsl;
+static int hf_bit10infoflagsl;
+static int hf_bit11infoflagsl;
+static int hf_bit12infoflagsl;
+static int hf_bit13infoflagsl;
+static int hf_bit14infoflagsl;
+static int hf_bit15infoflagsl;
+static int hf_bit16infoflagsl;
+static int hf_infoflagsh;
+static int hf_bit1infoflagsh;
+static int hf_bit2infoflagsh;
+static int hf_bit3infoflagsh;
+static int hf_bit4infoflagsh;
+static int hf_bit5infoflagsh;
+static int hf_bit6infoflagsh;
+static int hf_bit7infoflagsh;
+static int hf_bit8infoflagsh;
+static int hf_bit9infoflagsh;
+static int hf_bit10infoflagsh;
+static int hf_bit11infoflagsh;
+static int hf_bit12infoflagsh;
+static int hf_bit13infoflagsh;
+static int hf_bit14infoflagsh;
+static int hf_bit15infoflagsh;
+static int hf_bit16infoflagsh;
+static int hf_retinfoflagsh;
+static int hf_bit1retinfoflagsh;
+static int hf_bit2retinfoflagsh;
+static int hf_bit3retinfoflagsh;
+static int hf_bit4retinfoflagsh;
+static int hf_bit5retinfoflagsh;
+static int hf_bit6retinfoflagsh;
+static int hf_bit7retinfoflagsh;
+static int hf_bit8retinfoflagsh;
+static int hf_bit9retinfoflagsh;
+static int hf_bit10retinfoflagsh;
+static int hf_bit11retinfoflagsh;
+static int hf_bit12retinfoflagsh;
+static int hf_bit13retinfoflagsh;
+static int hf_bit14retinfoflagsh;
+static int hf_bit15retinfoflagsh;
+static int hf_bit16retinfoflagsh;
+static int hf_bit1lflags;
+static int hf_bit2lflags;
+static int hf_bit3lflags;
+static int hf_bit4lflags;
+static int hf_bit5lflags;
+static int hf_bit6lflags;
+static int hf_bit7lflags;
+static int hf_bit8lflags;
+static int hf_bit9lflags;
+static int hf_bit10lflags;
+static int hf_bit11lflags;
+static int hf_bit12lflags;
+static int hf_bit13lflags;
+static int hf_bit14lflags;
+static int hf_bit15lflags;
+static int hf_bit16lflags;
+static int hf_l1flagsl;
+static int hf_l1flagsh;
+static int hf_bit1l1flagsl;
+static int hf_bit2l1flagsl;
+static int hf_bit3l1flagsl;
+static int hf_bit4l1flagsl;
+static int hf_bit5l1flagsl;
+static int hf_bit6l1flagsl;
+static int hf_bit7l1flagsl;
+static int hf_bit8l1flagsl;
+static int hf_bit9l1flagsl;
+static int hf_bit10l1flagsl;
+static int hf_bit11l1flagsl;
+static int hf_bit12l1flagsl;
+static int hf_bit13l1flagsl;
+static int hf_bit14l1flagsl;
+static int hf_bit15l1flagsl;
+static int hf_bit16l1flagsl;
+static int hf_bit1l1flagsh;
+static int hf_bit2l1flagsh;
+static int hf_bit3l1flagsh;
+static int hf_bit4l1flagsh;
+static int hf_bit5l1flagsh;
+static int hf_bit6l1flagsh;
+static int hf_bit7l1flagsh;
+static int hf_bit8l1flagsh;
+static int hf_bit9l1flagsh;
+static int hf_bit10l1flagsh;
+static int hf_bit11l1flagsh;
+static int hf_bit12l1flagsh;
+static int hf_bit13l1flagsh;
+static int hf_bit14l1flagsh;
+static int hf_bit15l1flagsh;
+static int hf_bit16l1flagsh;
+static int hf_nds_tree_name;
+static int hf_nds_reply_error;
+static int hf_nds_net;
+static int hf_nds_node;
+static int hf_nds_socket;
+static int hf_add_ref_ip;
+static int hf_add_ref_udp;
+static int hf_add_ref_tcp;
+static int hf_referral_record;
+static int hf_referral_addcount;
+static int hf_nds_port;
+static int hf_mv_string;
+static int hf_nds_syntax;
+static int hf_value_string;
+static int hf_nds_buffer_size;
+static int hf_nds_ver;
+static int hf_nds_nflags;
+static int hf_nds_scope;
+static int hf_nds_name;
+static int hf_nds_comm_trans;
+static int hf_nds_tree_trans;
+static int hf_nds_iteration;
+static int hf_nds_eid;
+static int hf_nds_info_type;
+static int hf_nds_all_attr;
+static int hf_nds_req_flags;
+static int hf_nds_attr;
+static int hf_nds_crc;
+static int hf_nds_referrals;
+static int hf_nds_result_flags;
+static int hf_nds_tag_string;
+static int hf_value_bytes;
+static int hf_replica_type;
+static int hf_replica_state;
+static int hf_replica_number;
+static int hf_min_nds_ver;
+static int hf_nds_ver_include;
+static int hf_nds_ver_exclude;
+/* static int hf_nds_es; */
+static int hf_es_type;
+/* static int hf_delim_string; */
+static int hf_rdn_string;
+static int hf_nds_revent;
+static int hf_nds_rnum;
+static int hf_nds_name_type;
+static int hf_nds_rflags;
+static int hf_nds_eflags;
+static int hf_nds_depth;
+static int hf_nds_class_def_type;
+static int hf_nds_classes;
+static int hf_nds_return_all_classes;
+static int hf_nds_stream_flags;
+static int hf_nds_stream_name;
+static int hf_nds_file_handle;
+static int hf_nds_file_size;
+static int hf_nds_dn_output_type;
+static int hf_nds_nested_output_type;
+static int hf_nds_output_delimiter;
+static int hf_nds_output_entry_specifier;
+static int hf_es_value;
+static int hf_es_rdn_count;
+static int hf_nds_replica_num;
+static int hf_nds_event_num;
+static int hf_es_seconds;
+static int hf_nds_compare_results;
+static int hf_nds_parent;
+static int hf_nds_name_filter;
+static int hf_nds_class_filter;
+static int hf_nds_time_filter;
+static int hf_nds_partition_root_id;
+static int hf_nds_replicas;
+static int hf_nds_purge;
+static int hf_nds_local_partition;
+static int hf_partition_busy;
+static int hf_nds_number_of_changes;
+static int hf_sub_count;
+static int hf_nds_revision;
+static int hf_nds_base_class;
+static int hf_nds_relative_dn;
+/* static int hf_nds_root_dn; */
+/* static int hf_nds_parent_dn; */
+static int hf_deref_base;
+/* static int hf_nds_entry_info; */
+static int hf_nds_base;
+static int hf_nds_privileges;
+static int hf_nds_vflags;
+static int hf_nds_value_len;
+static int hf_nds_cflags;
+static int hf_nds_acflags;
+static int hf_nds_asn1;
+static int hf_nds_upper;
+static int hf_nds_lower;
+static int hf_nds_trustee_dn;
+static int hf_nds_attribute_dn;
+static int hf_nds_acl_add;
+static int hf_nds_acl_del;
+static int hf_nds_att_add;
+static int hf_nds_att_del;
+static int hf_nds_keep;
+static int hf_nds_new_rdn;
+static int hf_nds_time_delay;
+static int hf_nds_root_name;
+static int hf_nds_new_part_id;
+static int hf_nds_child_part_id;
+static int hf_nds_master_part_id;
+static int hf_nds_target_name;
+static int hf_nds_super;
+static int hf_pingflags2;
+static int hf_bit1pingflags2;
+static int hf_bit2pingflags2;
+static int hf_bit3pingflags2;
+static int hf_bit4pingflags2;
+static int hf_bit5pingflags2;
+static int hf_bit6pingflags2;
+static int hf_bit7pingflags2;
+static int hf_bit8pingflags2;
+static int hf_bit9pingflags2;
+static int hf_bit10pingflags2;
+static int hf_bit11pingflags2;
+static int hf_bit12pingflags2;
+static int hf_bit13pingflags2;
+static int hf_bit14pingflags2;
+static int hf_bit15pingflags2;
+static int hf_bit16pingflags2;
+static int hf_pingflags1;
+static int hf_bit1pingflags1;
+static int hf_bit2pingflags1;
+static int hf_bit3pingflags1;
+static int hf_bit4pingflags1;
+static int hf_bit5pingflags1;
+static int hf_bit6pingflags1;
+static int hf_bit7pingflags1;
+static int hf_bit8pingflags1;
+static int hf_bit9pingflags1;
+static int hf_bit10pingflags1;
+static int hf_bit11pingflags1;
+static int hf_bit12pingflags1;
+static int hf_bit13pingflags1;
+static int hf_bit14pingflags1;
+static int hf_bit15pingflags1;
+static int hf_bit16pingflags1;
+static int hf_pingpflags1;
+static int hf_bit1pingpflags1;
+static int hf_bit2pingpflags1;
+static int hf_bit3pingpflags1;
+static int hf_bit4pingpflags1;
+static int hf_bit5pingpflags1;
+static int hf_bit6pingpflags1;
+static int hf_bit7pingpflags1;
+static int hf_bit8pingpflags1;
+static int hf_bit9pingpflags1;
+static int hf_bit10pingpflags1;
+static int hf_bit11pingpflags1;
+static int hf_bit12pingpflags1;
+static int hf_bit13pingpflags1;
+static int hf_bit14pingpflags1;
+static int hf_bit15pingpflags1;
+static int hf_bit16pingpflags1;
+static int hf_pingvflags1;
+static int hf_bit1pingvflags1;
+static int hf_bit2pingvflags1;
+static int hf_bit3pingvflags1;
+static int hf_bit4pingvflags1;
+static int hf_bit5pingvflags1;
+static int hf_bit6pingvflags1;
+static int hf_bit7pingvflags1;
+static int hf_bit8pingvflags1;
+static int hf_bit9pingvflags1;
+static int hf_bit10pingvflags1;
+static int hf_bit11pingvflags1;
+static int hf_bit12pingvflags1;
+static int hf_bit13pingvflags1;
+static int hf_bit14pingvflags1;
+static int hf_bit15pingvflags1;
+static int hf_bit16pingvflags1;
+static int hf_nds_letter_ver;
+static int hf_nds_os_majver;
+static int hf_nds_os_minver;
+static int hf_nds_lic_flags;
+static int hf_nds_ds_time;
+static int hf_nds_ping_version;
+static int hf_nds_search_scope;
+static int hf_nds_num_objects;
+static int hf_siflags;
+static int hf_bit1siflags;
+static int hf_bit2siflags;
+static int hf_bit3siflags;
+static int hf_bit4siflags;
+static int hf_bit5siflags;
+static int hf_bit6siflags;
+static int hf_bit7siflags;
+static int hf_bit8siflags;
+static int hf_bit9siflags;
+static int hf_bit10siflags;
+static int hf_bit11siflags;
+static int hf_bit12siflags;
+static int hf_bit13siflags;
+static int hf_bit14siflags;
+static int hf_bit15siflags;
+static int hf_bit16siflags;
+static int hf_nds_segments;
+static int hf_nds_segment;
+static int hf_nds_segment_overlap;
+static int hf_nds_segment_overlap_conflict;
+static int hf_nds_segment_multiple_tails;
+static int hf_nds_segment_too_long_segment;
+static int hf_nds_segment_error;
+static int hf_nds_segment_count;
+static int hf_nds_reassembled_length;
+static int hf_nds_verb2b_req_flags;
+static int hf_ncp_ip_address;
+static int hf_ncp_copyright;
+static int hf_ndsprot1flag;
+static int hf_ndsprot2flag;
+static int hf_ndsprot3flag;
+static int hf_ndsprot4flag;
+static int hf_ndsprot5flag;
+static int hf_ndsprot6flag;
+static int hf_ndsprot7flag;
+static int hf_ndsprot8flag;
+static int hf_ndsprot9flag;
+static int hf_ndsprot10flag;
+static int hf_ndsprot11flag;
+static int hf_ndsprot12flag;
+static int hf_ndsprot13flag;
+static int hf_ndsprot14flag;
+static int hf_ndsprot15flag;
+static int hf_ndsprot16flag;
+static int hf_nds_svr_dst_name;
+static int hf_nds_tune_mark;
+/* static int hf_nds_create_time; */
+static int hf_srvr_param_number;
+static int hf_srvr_param_boolean;
+static int hf_srvr_param_string;
+static int hf_nds_svr_time;
+static int hf_nds_crt_time;
+static int hf_nds_number_of_items;
+static int hf_nds_compare_attributes;
+static int hf_nds_read_attribute;
+static int hf_nds_write_add_delete_attribute;
+static int hf_nds_add_delete_self;
+static int hf_nds_privilege_not_defined;
+static int hf_nds_supervisor;
+static int hf_nds_inheritance_control;
+static int hf_nds_browse_entry;
+static int hf_nds_add_entry;
+static int hf_nds_delete_entry;
+static int hf_nds_rename_entry;
+static int hf_nds_supervisor_entry;
+static int hf_nds_entry_privilege_not_defined;
+static int hf_nds_iterator;
+static int hf_ncp_nds_iterverb;
+static int hf_iter_completion_code;
+/* static int hf_nds_iterobj; */
+static int hf_iter_verb_completion_code;
+static int hf_iter_ans;
+static int hf_positionable;
+static int hf_num_skipped;
+static int hf_num_to_skip;
+static int hf_timelimit;
+static int hf_iter_index;
+static int hf_num_to_get;
+/* static int hf_ret_info_type; */
+static int hf_data_size;
+static int hf_this_count;
+static int hf_max_entries;
+static int hf_move_position;
+static int hf_iter_copy;
+static int hf_iter_position;
+static int hf_iter_search;
+static int hf_iter_other;
+static int hf_nds_oid;
+static int hf_ncp_bytes_actually_trans_64;
+static int hf_sap_name;
+static int hf_os_name;
+static int hf_vendor_name;
+static int hf_hardware_name;
+static int hf_no_request_record_found;
+static int hf_search_modifier;
+static int hf_search_pattern;
+static int hf_nds_acl_protected_attribute;
+static int hf_nds_acl_subject;
+static int hf_nds_acl_privileges;
+
+static expert_field ei_ncp_file_rights_change;
+static expert_field ei_ncp_completion_code;
+static expert_field ei_nds_reply_error;
+static expert_field ei_ncp_destroy_connection;
+static expert_field ei_nds_iteration;
+static expert_field ei_ncp_eid;
+static expert_field ei_ncp_file_handle;
+static expert_field ei_ncp_connection_destroyed;
+static expert_field ei_ncp_no_request_record_found;
+static expert_field ei_ncp_file_rights;
+static expert_field ei_iter_verb_completion_code;
+static expert_field ei_ncp_connection_request;
+static expert_field ei_ncp_connection_status;
+static expert_field ei_ncp_op_lock_handle;
+static expert_field ei_ncp_effective_rights;
+static expert_field ei_ncp_server;
+static expert_field ei_ncp_invalid_offset;
+static expert_field ei_ncp_address_type;
+static expert_field ei_ncp_value_too_large;
""")
# Look at all packet types in the packets collection, and cull information
@@ -6553,7 +6553,7 @@ static expert_field ei_ncp_value_too_large = EI_INIT;
sorted_vars = list(variables_used_hash.values())
sorted_vars.sort()
for var in sorted_vars:
- print("static int " + var.HFName() + " = -1;")
+ print("static int " + var.HFName() + ";")
# Print the value_string's
@@ -8486,7 +8486,7 @@ proto_register_ncp2222(void)
{ "Vendor Name", "ncp.vendor_name", FT_STRING, BASE_NONE, NULL, 0x0, NULL, HFILL }},
{ &hf_hardware_name,
- { "Hardware Name", "ncp.harware_name", FT_STRING, BASE_NONE, NULL, 0x0, NULL, HFILL }},
+ { "Hardware Name", "ncp.hardware_name", FT_STRING, BASE_NONE, NULL, 0x0, NULL, HFILL }},
{ &hf_no_request_record_found,
{ "No request record found. Parsing is impossible.", "ncp.no_request_record_found", FT_NONE, BASE_NONE, NULL, 0x0, NULL, HFILL }},
diff --git a/tools/netscreen2dump.py b/tools/netscreen2dump.py
deleted file mode 100755
index 7aaac94b..00000000
--- a/tools/netscreen2dump.py
+++ /dev/null
@@ -1,137 +0,0 @@
-#!/usr/bin/env python
-"""
-Converts netscreen snoop hex-dumps to a hex-dump that text2pcap can read.
-
-Copyright (c) 2004 by Gilbert Ramirez <gram@alumni.rice.edu>
-
-SPDX-License-Identifier: GPL-2.0-or-later
-"""
-
-import sys
-import re
-import os
-import stat
-import time
-
-
-class OutputFile:
- TIMER_MAX = 99999.9
-
- def __init__(self, name, base_time):
- try:
- self.fh = open(name, "w")
- except IOError, err:
- sys.exit(err)
-
- self.base_time = base_time
- self.prev_timestamp = 0.0
-
- def PrintPacket(self, timestamp, datalines):
- # What do to with the timestamp? I need more data about what
- # the netscreen timestamp is, then I can generate one for the text file.
- # print("TS:", timestamp.group("time"))
- try:
- timestamp = float(timestamp.group("time"))
- except ValueError:
- sys.exit("Unable to convert '%s' to floating point." %
- (timestamp,))
-
- # Did we wrap around the timeer max?
- if timestamp < self.prev_timestamp:
- self.base_time += self.TIMER_MAX
-
- self.prev_timestamp = timestamp
-
- packet_timestamp = self.base_time + timestamp
-
- # Determine the time string to print
- gmtime = time.gmtime(packet_timestamp)
- subsecs = packet_timestamp - int(packet_timestamp)
- assert subsecs <= 0
- subsecs = int(subsecs * 10)
-
- print >> self.fh, "%s.%d" % (time.strftime("%Y-%m-%d %H:%M:%S", gmtime), \
- subsecs)
-
- # Print the packet data
- offset = 0
- for lineno, hexgroup in datalines:
- hexline = hexgroup.group("hex")
- hexpairs = hexline.split()
- print >> self.fh, "%08x %s" % (offset, hexline)
- offset += len(hexpairs)
-
- # Blank line
- print >> self.fh
-
-
-# Find a timestamp line
-re_timestamp = re.compile(r"^(?P<time>\d+\.\d): [\w/]+\((?P<io>.)\)(:| len=)")
-
-# Find a hex dump line
-re_hex_line = re.compile(r"(?P<hex>([0-9a-f]{2} ){1,16})\s+(?P<ascii>.){1,16}")
-
-
-def run(input_filename, output_filename):
- try:
- ifh = open(input_filename, "r")
- except IOError, err:
- sys.exit(err)
-
- # Get the file's creation time.
- try:
- ctime = os.stat(input_filename)[stat.ST_CTIME]
- except OSError, err:
- sys.exit(err)
-
- output_file = OutputFile(output_filename, ctime)
-
- timestamp = None
- datalines = []
- lineno = 0
-
- for line in ifh.xreadlines():
- lineno += 1
- # If we have no timestamp yet, look for one
- if not timestamp:
- m = re_timestamp.search(line)
- if m:
- timestamp = m
-
- # Otherwise, look for hex dump lines
- else:
- m = re_hex_line.search(line)
- if m:
- datalines.append((lineno, m))
- else:
- # If we have been gathering hex dump lines,
- # and this line is not a hex dump line, then the hex dump
- # has finished, and so has the packet. So print the packet
- # and reset our variables so we can look for the next packet.
- if datalines:
- output_file.PrintPacket(timestamp, datalines)
- timestamp = None
- datalines = []
-
- # At the end of the file we may still have hex dump data in memory.
- # If so, print the packet
- if datalines:
- output_file.PrintPacket(timestamp, datalines)
- timestamp = None
- datalines = []
-
-
-def usage():
- print >> sys.stderr, "Usage: netscreen2dump.py netscreen-dump-file new-dump-file"
- sys.exit(1)
-
-
-def main():
- if len(sys.argv) != 3:
- usage()
-
- run(sys.argv[1], sys.argv[2])
-
-
-if __name__ == "__main__":
- main()
diff --git a/tools/parse_xml2skinny_dissector.py b/tools/parse_xml2skinny_dissector.py
index b13776e3..fc9933e6 100755
--- a/tools/parse_xml2skinny_dissector.py
+++ b/tools/parse_xml2skinny_dissector.py
@@ -239,7 +239,7 @@ def xml2obj(src):
def get_req_resp_keys(self, req_resp):
for field in self._children:
key = field.get_req_resp_key()
- if not key is None and not key in req_resp:
+ if key is not None and key not in req_resp:
req_resp.append(key)
def declaration(self):
@@ -292,7 +292,7 @@ def xml2obj(src):
self.decr_indent()
ret += self.indent_out('}\n')
- return ret;
+ return ret
class Integer(DataNode):
def __init__(self):
@@ -343,12 +343,12 @@ def xml2obj(src):
ret += self.indent_out('{\n')
self.incr_indent()
variable = 'counter_%d' %indentation
- ret += self.indent_out('uint32_t %s = 0;\n' %(variable));
+ ret += self.indent_out('uint32_t %s = 0;\n' %(variable))
if self.size_fieldname:
ret += self.indent_out('ptvcursor_add_text_with_subtree(cursor, SUBTREE_UNDEFINED_LENGTH, ett_skinny_tree, "%s [ref:%s = %%d, max:%s]", %s);\n' %(self.name, self.size_fieldname, size, self.size_fieldname))
else:
ret += self.indent_out('ptvcursor_add_text_with_subtree(cursor, SUBTREE_UNDEFINED_LENGTH, ett_skinny_tree, "%s [max:%s]");\n' %(self.name, size))
- ret += self.indent_out('for (%s = 0; %s < %s; %s++) {\n' %(variable, variable, size, variable));
+ ret += self.indent_out('for (%s = 0; %s < %s; %s++) {\n' %(variable, variable, size, variable))
if self.basemessage.dynamic == "no" and self.size_fieldname:
self.incr_indent()
ret += self.indent_out('if (%s < %s) {\n' %(variable,self.size_fieldname))
@@ -361,14 +361,14 @@ def xml2obj(src):
elif (self.intsize == 2):
ret += self.indent_out('%s = tvb_get_ntohs(ptvcursor_tvbuff(cursor), ptvcursor_current_offset(cursor));\n' %(self.name))
else:
- ret += self.indent_out('%s = tvb_get_guint8(ptvcursor_tvbuff(cursor), ptvcursor_current_offset(cursor));\n' %(self.name))
+ ret += self.indent_out('%s = tvb_get_uint8(ptvcursor_tvbuff(cursor), ptvcursor_current_offset(cursor));\n' %(self.name))
else:
if (self.intsize == 4):
ret += self.indent_out('%s = tvb_get_letohl(ptvcursor_tvbuff(cursor), ptvcursor_current_offset(cursor));\n' %(self.name))
elif (self.intsize == 2):
ret += self.indent_out('%s = tvb_get_letohs(ptvcursor_tvbuff(cursor), ptvcursor_current_offset(cursor));\n' %(self.name))
else:
- ret += self.indent_out('%s = tvb_get_guint8(ptvcursor_tvbuff(cursor), ptvcursor_current_offset(cursor));\n' %(self.name))
+ ret += self.indent_out('%s = tvb_get_uint8(ptvcursor_tvbuff(cursor), ptvcursor_current_offset(cursor));\n' %(self.name))
if self.name in si_fields.keys():
if self.endianness == "big":
@@ -417,7 +417,6 @@ def xml2obj(src):
def declaration(self):
ret = ''
- prevvalue = 0
enum_sizes = {'uint32':4,'uint16':2,'uint8':1}
if self.type in enum_sizes:
self.intsize = enum_sizes[self.type]
@@ -477,12 +476,12 @@ def xml2obj(src):
ret += self.indent_out('{\n')
self.incr_indent()
variable = 'counter_%d' %indentation
- ret += self.indent_out('uint32_t %s = 0;\n' %(variable));
+ ret += self.indent_out('uint32_t %s = 0;\n' %(variable))
if self.size_fieldname:
ret += self.indent_out('ptvcursor_add_text_with_subtree(cursor, SUBTREE_UNDEFINED_LENGTH, ett_skinny_tree, "%s [ref: %s = %%d, max:%s]", %s);\n' %(self.name, self.size_fieldname, size, self.size_fieldname))
else:
ret += self.indent_out('ptvcursor_add_text_with_subtree(cursor, SUBTREE_UNDEFINED_LENGTH, ett_skinny_tree, "%s [max:%s]");\n' %(self.name, size))
- ret += self.indent_out('for (%s = 0; %s < %s; %s++) {\n' %(variable, variable, size, variable));
+ ret += self.indent_out('for (%s = 0; %s < %s; %s++) {\n' %(variable, variable, size, variable))
if self.basemessage.dynamic == "no" and self.size_fieldname:
self.incr_indent()
ret += self.indent_out('if (%s < %s) {\n' %(variable,self.size_fieldname))
@@ -497,7 +496,7 @@ def xml2obj(src):
elif (self.intsize == 2):
ret += self.indent_out('%s = tvb_get_letohs(ptvcursor_tvbuff(cursor), ptvcursor_current_offset(cursor));\n' %(self.name))
else:
- ret += self.indent_out('%s = tvb_get_guint8(ptvcursor_tvbuff(cursor), ptvcursor_current_offset(cursor));\n' %(self.name))
+ ret += self.indent_out('%s = tvb_get_uint8(ptvcursor_tvbuff(cursor), ptvcursor_current_offset(cursor));\n' %(self.name))
ret += self.indent_out('ptvcursor_add(cursor, hf_skinny_%s, %d, %s);\n' %(self.name, self.intsize, endian))
@@ -755,9 +754,9 @@ def xml2obj(src):
def dissect(self):
ret = ''
if self.make_additional_info == "yes":
- ret += self.indent_out('read_skinny_ipv4or6(cursor, &%s);\n' %(self.name));
- ret += self.indent_out('dissect_skinny_ipv4or6(cursor, hf_skinny_%s_ipv4, hf_skinny_%s_ipv6);\n' %(self.name, self.name));
- return ret;
+ ret += self.indent_out('read_skinny_ipv4or6(cursor, &%s);\n' %(self.name))
+ ret += self.indent_out('dissect_skinny_ipv4or6(cursor, hf_skinny_%s_ipv4, hf_skinny_%s_ipv6);\n' %(self.name, self.name))
+ return ret
class XML(DataNode):
def __init__(self):
@@ -848,7 +847,7 @@ def xml2obj(src):
self.incr_indent()
if debug:
ret += self.indent_out('/* start struct : %s / size: %d */\n' %(self.name, self.intsize))
- ret += self.indent_out('uint32_t %s = 0;\n' %(variable));
+ ret += self.indent_out('uint32_t %s = 0;\n' %(variable))
if self.size_fieldname:
ret += self.indent_out('ptvcursor_add_text_with_subtree(cursor, SUBTREE_UNDEFINED_LENGTH, ett_skinny_tree, "%s [ref:%s = %%d, max:%s]", %s);\n' %(self.name, self.size_fieldname, self.maxsize, self.size_fieldname))
if self.maxsize:
@@ -859,7 +858,7 @@ def xml2obj(src):
else:
ret += self.indent_out('ptvcursor_add_text_with_subtree(cursor, SUBTREE_UNDEFINED_LENGTH, ett_skinny_tree, "%s [max:%s]");\n' %(self.name, size))
- ret += self.indent_out('for (%s = 0; %s < %s; %s++) {\n' %(variable, variable, size, variable));
+ ret += self.indent_out('for (%s = 0; %s < %s; %s++) {\n' %(variable, variable, size, variable))
if self.basemessage.dynamic == "no" and self.size_fieldname:
self.incr_indent()
ret += self.indent_out('if (%s < %s) {\n' %(variable,self.size_fieldname))
@@ -906,7 +905,7 @@ def xml2obj(src):
if self.size_fieldname:
ret += self.indent_out('} else {\n')
self.incr_indent()
- ret += self.indent_out('ptvcursor_advance(cursor, (%s * %s));%s\n' %(self.size_fieldname, self.intsize, ' /* guard kicked in -> skip the rest */' if debug else ''));
+ ret += self.indent_out('ptvcursor_advance(cursor, (%s * %s));%s\n' %(self.size_fieldname, self.intsize, ' /* guard kicked in -> skip the rest */' if debug else ''))
self.decr_indent()
ret += self.indent_out('} /* end struct size guard */\n' if debug else '}\n')
@@ -948,7 +947,6 @@ def xml2obj(src):
def dissect(self):
ret = ''
ifblock = self.indent_out('if')
- skip = 0
#ret += self.indent_out('/* Union : %s / maxsize: %s */\n' %(self.name, self.maxsize))
if (self.fields is not None and len(self.fields)):
diff --git a/tools/pidl/idl.yp b/tools/pidl/idl.yp
index 08f982a9..8889632b 100644
--- a/tools/pidl/idl.yp
+++ b/tools/pidl/idl.yp
@@ -680,12 +680,29 @@ sub parse_file($$)
undef $/;
my $cpp = $ENV{CPP};
my $options = "";
- if (! defined $cpp) {
- if (defined $ENV{CC}) {
- $cpp = "$ENV{CC}";
- $options = "-E";
- } else {
- $cpp = "cpp";
+ if ($^O eq "MSWin32") {
+ $cpp = "cpp";
+ }else{
+ if (! defined $cpp) {
+ if (defined $ENV{CC}) {
+ $cpp = "$ENV{CC}";
+ $options = "-E";
+ } else {
+ #
+ # If cc is Clang-based don't use cpp, as
+ # at least some versions of Clang, cpp
+ # doesn't strip // comments, but cc -E
+ # does.
+ #
+
+ my $cc_version = `cc --version`;
+ if ($cc_version =~ /clang/) {
+ $cpp = "cc";
+ $options = "-E"
+ } else {
+ $cpp = "cpp";
+ }
+ }
}
}
my $includes = join('',map { " -I$_" } @$incdirs);
diff --git a/tools/pidl/lib/Parse/Pidl.pm b/tools/pidl/lib/Parse/Pidl.pm
index 40e36739..e4c39b3d 100644
--- a/tools/pidl/lib/Parse/Pidl.pm
+++ b/tools/pidl/lib/Parse/Pidl.pm
@@ -12,7 +12,7 @@ require Exporter;
@EXPORT_OK = qw(warning error fatal $VERSION);
use strict;
-
+use warnings;
use vars qw ( $VERSION );
$VERSION = '0.02';
diff --git a/tools/pidl/lib/Parse/Pidl/CUtil.pm b/tools/pidl/lib/Parse/Pidl/CUtil.pm
index 9deb6ee1..ccd8fcc3 100644
--- a/tools/pidl/lib/Parse/Pidl/CUtil.pm
+++ b/tools/pidl/lib/Parse/Pidl/CUtil.pm
@@ -11,6 +11,7 @@ use vars qw($VERSION);
$VERSION = '0.01';
use strict;
+use warnings;
sub get_pointer_to($)
{
diff --git a/tools/pidl/lib/Parse/Pidl/Compat.pm b/tools/pidl/lib/Parse/Pidl/Compat.pm
index b8abcb88..062a53b8 100644
--- a/tools/pidl/lib/Parse/Pidl/Compat.pm
+++ b/tools/pidl/lib/Parse/Pidl/Compat.pm
@@ -8,6 +8,7 @@ package Parse::Pidl::Compat;
use Parse::Pidl qw(warning);
use Parse::Pidl::Util qw(has_property);
use strict;
+use warnings;
use vars qw($VERSION);
$VERSION = '0.01';
diff --git a/tools/pidl/lib/Parse/Pidl/Dump.pm b/tools/pidl/lib/Parse/Pidl/Dump.pm
index 4e623db6..5d241b81 100644
--- a/tools/pidl/lib/Parse/Pidl/Dump.pm
+++ b/tools/pidl/lib/Parse/Pidl/Dump.pm
@@ -27,6 +27,7 @@ $VERSION = '0.01';
@EXPORT_OK = qw(DumpType DumpTypedef DumpStruct DumpEnum DumpBitmap DumpUnion DumpFunction);
use strict;
+use warnings;
use Parse::Pidl::Util qw(has_property);
my($res);
diff --git a/tools/pidl/lib/Parse/Pidl/Expr.pm b/tools/pidl/lib/Parse/Pidl/Expr.pm
index 24581d29..967d6876 100644
--- a/tools/pidl/lib/Parse/Pidl/Expr.pm
+++ b/tools/pidl/lib/Parse/Pidl/Expr.pm
@@ -10,6 +10,7 @@
package Parse::Pidl::Expr;
use vars qw ( @ISA );
use strict;
+use warnings;
@ISA= qw ( Parse::Yapp::Driver );
use Parse::Yapp::Driver;
diff --git a/tools/pidl/lib/Parse/Pidl/IDL.pm b/tools/pidl/lib/Parse/Pidl/IDL.pm
index 6927c892..46f9813c 100644
--- a/tools/pidl/lib/Parse/Pidl/IDL.pm
+++ b/tools/pidl/lib/Parse/Pidl/IDL.pm
@@ -10,6 +10,7 @@
package Parse::Pidl::IDL;
use vars qw ( @ISA );
use strict;
+use warnings;
@ISA= qw ( Parse::Yapp::Driver );
use Parse::Yapp::Driver;
@@ -2646,12 +2647,29 @@ sub parse_file($$)
undef $/;
my $cpp = $ENV{CPP};
my $options = "";
- if (! defined $cpp) {
- if (defined $ENV{CC}) {
- $cpp = "$ENV{CC}";
- $options = "-E";
- } else {
- $cpp = "cpp";
+ if ($^O eq "MSWin32") {
+ $cpp = "cpp";
+ }else{
+ if (! defined $cpp) {
+ if (defined $ENV{CC}) {
+ $cpp = "$ENV{CC}";
+ $options = "-E";
+ } else {
+ #
+ # If cc is Clang-based don't use cpp, as
+ # at least some versions of Clang, cpp
+ # doesn't strip // comments, but cc -E
+ # does.
+ #
+
+ my $cc_version = `cc --version`;
+ if ($cc_version =~ /clang/) {
+ $cpp = "cc";
+ $options = "-E"
+ } else {
+ $cpp = "cpp";
+ }
+ }
}
}
my $includes = join('',map { " -I$_" } @$incdirs);
diff --git a/tools/pidl/lib/Parse/Pidl/NDR.pm b/tools/pidl/lib/Parse/Pidl/NDR.pm
index 003156e3..18db6cfe 100644
--- a/tools/pidl/lib/Parse/Pidl/NDR.pm
+++ b/tools/pidl/lib/Parse/Pidl/NDR.pm
@@ -38,6 +38,7 @@ $VERSION = '0.01';
@EXPORT_OK = qw(GetElementLevelTable ParseElement ReturnTypeElement ValidElement align_type mapToScalar ParseType can_contain_deferred is_charset_array);
use strict;
+use warnings;
use Parse::Pidl qw(warning fatal);
use Parse::Pidl::Typelist qw(hasType getType typeIs expandAlias mapScalarType is_fixed_size_scalar);
use Parse::Pidl::Util qw(has_property property_matches);
@@ -57,6 +58,7 @@ my $scalar_alignment = {
'int3264' => 5,
'uint3264' => 5,
'hyper' => 8,
+ 'int64' => 8,
'double' => 8,
'pointer' => 8,
'dlong' => 4,
@@ -64,6 +66,7 @@ my $scalar_alignment = {
'udlongr' => 4,
'DATA_BLOB' => 4,
'string' => 4,
+ 'u16string' => 4,
'string_array' => 4, #???
'time_t' => 4,
'uid_t' => 8,
@@ -80,7 +83,10 @@ my $scalar_alignment = {
'ipv4address' => 4,
'ipv6address' => 4, #16?
'dnsp_name' => 1,
- 'dnsp_string' => 1
+ 'dnsp_string' => 1,
+ 'HRESULT' => 4,
+ 'libndr_flags' => 8,
+ 'ndr_flags_type' => 4,
};
sub GetElementLevelTable($$$)
@@ -115,7 +121,7 @@ sub GetElementLevelTable($$$)
warning($e, "[out] argument `$e->{NAME}' not a pointer") if ($needptrs > $e->{POINTERS});
}
- my $allow_pipe = ($e->{PARENT}->{TYPE} eq "FUNCTION");
+ my $allow_pipe = (($e->{PARENT}->{TYPE} // '') eq "FUNCTION");
my $is_pipe = typeIs($e->{TYPE}, "PIPE");
if ($is_pipe) {
@@ -193,7 +199,7 @@ sub GetElementLevelTable($$$)
$length = $size;
}
- if ($e == $e->{PARENT}->{ELEMENTS}[-1]
+ if ($e == $e->{PARENT}->{ELEMENTS}[-1]
and $e->{PARENT}->{TYPE} ne "FUNCTION") {
$is_surrounding = 1;
}
@@ -252,7 +258,7 @@ sub GetElementLevelTable($$$)
$pt = $pointer_default;
}
- push (@$order, {
+ push (@$order, {
TYPE => "POINTER",
POINTER_TYPE => $pt,
POINTER_INDEX => $pointer_idx,
@@ -260,13 +266,13 @@ sub GetElementLevelTable($$$)
LEVEL => $level
});
- warning($e, "top-level \[out\] pointer `$e->{NAME}' is not a \[ref\] pointer")
+ warning($e, "top-level \[out\] pointer `$e->{NAME}' is not a \[ref\] pointer")
if ($i == 1 and $pt ne "ref" and
- $e->{PARENT}->{TYPE} eq "FUNCTION" and
+ $e->{PARENT}->{TYPE} eq "FUNCTION" and
not has_property($e, "in"));
$pointer_idx++;
-
+
# everything that follows will be deferred
$is_deferred = 1 if ($level ne "TOP");
@@ -283,9 +289,9 @@ sub GetElementLevelTable($$$)
$array_length = $array_size;
$is_varying =0;
}
- }
-
- if (scalar(@size_is) == 0 and has_property($e, "string") and
+ }
+
+ if (scalar(@size_is) == 0 and has_property($e, "string") and
$i == $e->{POINTERS}) {
$is_string = 1;
$is_varying = $is_conformant = has_property($e, "noheader")?0:1;
@@ -307,7 +313,7 @@ sub GetElementLevelTable($$$)
});
$is_deferred = 0;
- }
+ }
}
if ($is_pipe) {
@@ -326,10 +332,10 @@ sub GetElementLevelTable($$$)
if (defined(has_property($e, "subcontext"))) {
my $hdr_size = has_property($e, "subcontext");
my $subsize = has_property($e, "subcontext_size");
- if (not defined($subsize)) {
- $subsize = -1;
+ if (not defined($subsize)) {
+ $subsize = -1;
}
-
+
push (@$order, {
TYPE => "SUBCONTEXT",
HEADER_SIZE => $hdr_size,
@@ -341,7 +347,7 @@ sub GetElementLevelTable($$$)
if (my $switch = has_property($e, "switch_is")) {
push (@$order, {
- TYPE => "SWITCH",
+ TYPE => "SWITCH",
SWITCH_IS => $switch,
IS_DEFERRED => $is_deferred
});
@@ -390,11 +396,11 @@ sub GetTypedefLevelTable($$$$)
}
#####################################################################
-# see if a type contains any deferred data
-sub can_contain_deferred($)
+# see if a type contains any deferred data
+sub can_contain_deferred
{
- sub can_contain_deferred($);
- my ($type) = @_;
+ sub can_contain_deferred;
+ my ($type, @types_visited) = @_;
return 1 unless (hasType($type)); # assume the worst
@@ -402,15 +408,29 @@ sub can_contain_deferred($)
return 0 if (Parse::Pidl::Typelist::is_scalar($type));
- return can_contain_deferred($type->{DATA}) if ($type->{TYPE} eq "TYPEDEF");
+ foreach (@types_visited) {
+ if ($_ == $type) {
+ # we have already encountered this
+ # type, avoid recursion loop here
+ # and return
+ return 0;
+ }
+ }
+
+ return can_contain_deferred($type->{DATA},
+ @types_visited) if ($type->{TYPE} eq "TYPEDEF");
return 0 unless defined($type->{ELEMENTS});
foreach (@{$type->{ELEMENTS}}) {
return 1 if ($_->{POINTERS});
- return 1 if (can_contain_deferred ($_->{TYPE}));
+ push(@types_visited,$type);
+ if (can_contain_deferred ($_->{TYPE},@types_visited)) {
+ pop(@types_visited);
+ return 1;
+ }
+ pop(@types_visited);
}
-
return 0;
}
@@ -419,7 +439,7 @@ sub pointer_type($)
my $e = shift;
return undef unless $e->{POINTERS};
-
+
return "ref" if (has_property($e, "ref"));
return "full" if (has_property($e, "ptr"));
return "sptr" if (has_property($e, "sptr"));
@@ -433,25 +453,25 @@ sub pointer_type($)
#####################################################################
# work out the correct alignment for a structure or union
-sub find_largest_alignment($)
+sub find_largest_alignment
{
- my $s = shift;
+ my ($s, @types_visited) = @_;
my $align = 1;
for my $e (@{$s->{ELEMENTS}}) {
my $a = 1;
-
if ($e->{POINTERS}) {
# this is a hack for NDR64
# the NDR layer translates this into
# an alignment of 4 for NDR and 8 for NDR64
$a = 5;
- } elsif (has_property($e, "subcontext")) {
+ } elsif (has_property($e, "subcontext")) {
$a = 1;
} elsif (has_property($e, "transmit_as")) {
- $a = align_type($e->{PROPERTIES}->{transmit_as});
+ $a = align_type($e->{PROPERTIES}->{transmit_as},
+ @types_visited);
} else {
- $a = align_type($e->{TYPE});
+ $a = align_type($e->{TYPE}, @types_visited);
}
$align = $a if ($align < $a);
@@ -462,37 +482,71 @@ sub find_largest_alignment($)
#####################################################################
# align a type
-sub align_type($)
+sub align_type
{
- sub align_type($);
- my ($e) = @_;
-
+ sub align_type;
+ my ($e, @types_visited) = @_;
if (ref($e) eq "HASH" and $e->{TYPE} eq "SCALAR") {
- return $scalar_alignment->{$e->{NAME}};
+ my $ret = $scalar_alignment->{$e->{NAME}};
+ if (not defined $ret) {
+ warning($e, "no scalar alignment for $e->{NAME}!");
+ return 0;
+ }
+ return $ret;
}
return 0 if ($e eq "EMPTY");
unless (hasType($e)) {
- # it must be an external type - all we can do is guess
+ # it must be an external type - all we can do is guess
# warning($e, "assuming alignment of unknown type '$e' is 4");
return 4;
}
-
my $dt = getType($e);
+ foreach (@types_visited) {
+ if ($_ == $dt) {
+ # Chapt 14 of the DCE 1.1: Remote Procedure Call
+ # specification (available from pubs.opengroup.org)
+ # states:
+ # "The alignment of a structure in the octet stream is
+ # the largest of the alignments of the fields it
+ # contains. These fields may also be constructed types.
+ # The same alignment rules apply recursively to
+ # nested constructed types. "
+ #
+ # in the worst case scenario
+ # struct c1 {
+ # membertypea mema;
+ # membertypeb memb;
+ # struct c1 memc;
+ # }
+ # the nested struct c1 memc when encountered
+ # returns 0 ensuring the alignment will be calculated
+ # based on the other fields
+ return 0;
+ }
+ }
+
+
if ($dt->{TYPE} eq "TYPEDEF") {
- return align_type($dt->{DATA});
+ return align_type($dt->{DATA}, @types_visited);
} elsif ($dt->{TYPE} eq "CONFORMANCE") {
return $dt->{DATA}->{ALIGN};
} elsif ($dt->{TYPE} eq "ENUM") {
- return align_type(Parse::Pidl::Typelist::enum_type_fn($dt));
+ return align_type(Parse::Pidl::Typelist::enum_type_fn($dt),
+ @types_visited);
} elsif ($dt->{TYPE} eq "BITMAP") {
- return align_type(Parse::Pidl::Typelist::bitmap_type_fn($dt));
+ return align_type(Parse::Pidl::Typelist::bitmap_type_fn($dt),
+ @types_visited);
} elsif (($dt->{TYPE} eq "STRUCT") or ($dt->{TYPE} eq "UNION")) {
# Struct/union without body: assume 4
return 4 unless (defined($dt->{ELEMENTS}));
- return find_largest_alignment($dt);
+ my $res;
+ push(@types_visited, $dt);
+ $res = find_largest_alignment($dt, @types_visited);
+ pop(@types_visited);
+ return $res
} elsif (($dt->{TYPE} eq "PIPE")) {
return 5;
}
@@ -539,10 +593,10 @@ sub ParseStruct($$$)
CheckPointerTypes($struct, $pointer_default);
- foreach my $x (@{$struct->{ELEMENTS}})
+ foreach my $x (@{$struct->{ELEMENTS}})
{
my $e = ParseElement($x, $pointer_default, $ms_union);
- if ($x != $struct->{ELEMENTS}[-1] and
+ if ($x != $struct->{ELEMENTS}[-1] and
$e->{LEVELS}[0]->{IS_SURROUNDING}) {
fatal($x, "conformant member not at end of struct");
}
@@ -555,7 +609,7 @@ sub ParseStruct($$$)
$surrounding = $e;
}
- if (defined $e->{TYPE} && $e->{TYPE} eq "string"
+ if (defined $e->{TYPE} && Parse::Pidl::Typelist::is_string_type($e->{TYPE})
&& property_matches($e, "flag", ".*LIBNDR_FLAG_STR_CONFORMANT.*")) {
$surrounding = $struct->{ELEMENTS}[-1];
}
@@ -564,7 +618,7 @@ sub ParseStruct($$$)
if ($struct->{NAME}) {
$align = align_type($struct->{NAME});
}
-
+
return {
TYPE => "STRUCT",
NAME => $struct->{NAME},
@@ -601,7 +655,7 @@ sub ParseUnion($$)
CheckPointerTypes($e, $pointer_default);
- foreach my $x (@{$e->{ELEMENTS}})
+ foreach my $x (@{$e->{ELEMENTS}})
{
my $t;
if ($x->{TYPE} eq "EMPTY") {
@@ -793,7 +847,7 @@ sub ParseFunction($$$$)
if ($d->{RETURN_TYPE} ne "void") {
$rettype = expandAlias($d->{RETURN_TYPE});
}
-
+
return {
NAME => $d->{NAME},
TYPE => "FUNCTION",
@@ -885,7 +939,7 @@ sub ParseInterface($)
$version = "0.0";
- if(defined $idl->{PROPERTIES}->{version}) {
+ if(defined $idl->{PROPERTIES}->{version}) {
my @if_version = split(/\./, $idl->{PROPERTIES}->{version});
if ($if_version[0] == $idl->{PROPERTIES}->{version}) {
$version = $idl->{PROPERTIES}->{version};
@@ -901,9 +955,9 @@ sub ParseInterface($)
@endpoints = split /,/, $idl->{PROPERTIES}->{endpoint};
}
- return {
+ return {
NAME => $idl->{NAME},
- UUID => lc(has_property($idl, "uuid")),
+ UUID => lc(has_property($idl, "uuid") // ''),
VERSION => $version,
TYPE => "INTERFACE",
PROPERTIES => $idl->{PROPERTIES},
@@ -925,7 +979,7 @@ sub Parse($)
return undef unless (defined($idl));
Parse::Pidl::NDR::Validate($idl);
-
+
my @ndr = ();
foreach (@{$idl}) {
@@ -997,10 +1051,10 @@ sub ContainsDeferred($$)
while ($l = GetNextLevel($e,$l))
{
- return 1 if ($l->{IS_DEFERRED});
+ return 1 if ($l->{IS_DEFERRED});
return 1 if ($l->{CONTAINS_DEFERRED});
- }
-
+ }
+
return 0;
}
@@ -1094,6 +1148,7 @@ my %property_list = (
"gensize" => ["TYPEDEF", "STRUCT", "UNION"],
"value" => ["ELEMENT"],
"flag" => ["ELEMENT", "TYPEDEF", "STRUCT", "UNION", "ENUM", "BITMAP", "PIPE"],
+ "max_recursion" => ["ELEMENT"],
# generic
"public" => ["FUNCTION", "TYPEDEF", "STRUCT", "UNION", "ENUM", "BITMAP", "PIPE"],
@@ -1252,7 +1307,7 @@ sub ValidElement($)
has_property($e, "relative") or
has_property($e, "relative_short") or
has_property($e, "ref"))) {
- fatal($e, el_name($e) . " : pointer properties on non-pointer element\n");
+ fatal($e, el_name($e) . " : pointer properties on non-pointer element\n");
}
}
@@ -1298,7 +1353,7 @@ sub ValidUnion($)
ValidProperties($union,"UNION");
- if (has_property($union->{PARENT}, "nodiscriminant") and
+ if (has_property($union->{PARENT}, "nodiscriminant") and
has_property($union->{PARENT}, "switch_type")) {
fatal($union->{PARENT}, $union->{PARENT}->{NAME} . ": switch_type(" . $union->{PARENT}->{PROPERTIES}->{switch_type} . ") on union without discriminant");
}
@@ -1308,12 +1363,12 @@ sub ValidUnion($)
foreach my $e (@{$union->{ELEMENTS}}) {
$e->{PARENT} = $union;
- if (defined($e->{PROPERTIES}->{default}) and
+ if (defined($e->{PROPERTIES}->{default}) and
defined($e->{PROPERTIES}->{case})) {
fatal($e, "Union member $e->{NAME} can not have both default and case properties!");
}
-
- unless (defined ($e->{PROPERTIES}->{default}) or
+
+ unless (defined ($e->{PROPERTIES}->{default}) or
defined ($e->{PROPERTIES}->{case})) {
fatal($e, "Union member $e->{NAME} must have default or case property");
}
@@ -1386,7 +1441,7 @@ sub ValidType($)
{
my ($t) = @_;
- {
+ {
TYPEDEF => \&ValidTypedef,
STRUCT => \&ValidStruct,
UNION => \&ValidUnion,
@@ -1410,29 +1465,29 @@ sub ValidInterface($)
ValidProperties($interface,"INTERFACE");
if (has_property($interface, "pointer_default")) {
- if (not grep (/$interface->{PROPERTIES}->{pointer_default}/,
+ if (not grep (/$interface->{PROPERTIES}->{pointer_default}/,
("ref", "unique", "ptr"))) {
fatal($interface, "Unknown default pointer type `$interface->{PROPERTIES}->{pointer_default}'");
}
}
if (has_property($interface, "object")) {
- if (has_property($interface, "version") &&
+ if (has_property($interface, "version") &&
$interface->{PROPERTIES}->{version} != 0) {
fatal($interface, "Object interfaces must have version 0.0 ($interface->{NAME})");
}
- if (!defined($interface->{BASE}) &&
+ if (!defined($interface->{BASE}) &&
not ($interface->{NAME} eq "IUnknown")) {
fatal($interface, "Object interfaces must all derive from IUnknown ($interface->{NAME})");
}
}
-
+
foreach my $d (@{$data}) {
($d->{TYPE} eq "FUNCTION") && ValidFunction($d);
- ($d->{TYPE} eq "TYPEDEF" or
+ ($d->{TYPE} eq "TYPEDEF" or
$d->{TYPE} eq "STRUCT" or
- $d->{TYPE} eq "UNION" or
+ $d->{TYPE} eq "UNION" or
$d->{TYPE} eq "ENUM" or
$d->{TYPE} eq "BITMAP" or
$d->{TYPE} eq "PIPE") && ValidType($d);
@@ -1447,7 +1502,7 @@ sub Validate($)
my($idl) = shift;
foreach my $x (@{$idl}) {
- ($x->{TYPE} eq "INTERFACE") &&
+ ($x->{TYPE} eq "INTERFACE") &&
ValidInterface($x);
($x->{TYPE} eq "IMPORTLIB") &&
fatal($x, "importlib() not supported");
diff --git a/tools/pidl/lib/Parse/Pidl/ODL.pm b/tools/pidl/lib/Parse/Pidl/ODL.pm
index 14e77fa9..0943f3a8 100644
--- a/tools/pidl/lib/Parse/Pidl/ODL.pm
+++ b/tools/pidl/lib/Parse/Pidl/ODL.pm
@@ -1,5 +1,5 @@
##########################################
-# Converts ODL stuctures to IDL structures
+# Converts ODL structures to IDL structures
# (C) 2004-2005, 2008 Jelmer Vernooij <jelmer@samba.org>
package Parse::Pidl::ODL;
@@ -10,6 +10,7 @@ use Parse::Pidl::Util qw(has_property unmake_str);
use Parse::Pidl::Typelist qw(hasType getType);
use File::Basename;
use strict;
+use warnings;
use vars qw($VERSION);
$VERSION = '0.01';
diff --git a/tools/pidl/lib/Parse/Pidl/Samba3/ClientNDR.pm b/tools/pidl/lib/Parse/Pidl/Samba3/ClientNDR.pm
index 6acf1c5a..816440ef 100644
--- a/tools/pidl/lib/Parse/Pidl/Samba3/ClientNDR.pm
+++ b/tools/pidl/lib/Parse/Pidl/Samba3/ClientNDR.pm
@@ -6,12 +6,10 @@
# released under the GNU GPL
package Parse::Pidl::Samba3::ClientNDR;
-
-use Exporter;
-@ISA = qw(Exporter);
-@EXPORT_OK = qw(ParseFunction $res $res_hdr);
+use base Parse::Pidl::Base;
use strict;
+use warnings;
use Parse::Pidl qw(fatal warning error);
use Parse::Pidl::Util qw(has_property ParseExpr genpad);
use Parse::Pidl::NDR qw(ContainsPipe);
@@ -19,13 +17,10 @@ use Parse::Pidl::Typelist qw(mapTypeName);
use Parse::Pidl::Samba4 qw(DeclLong);
use Parse::Pidl::Samba4::Header qw(GenerateFunctionInEnv GenerateFunctionOutEnv);
+
use vars qw($VERSION);
$VERSION = '0.01';
-sub indent($) { my ($self) = @_; $self->{tabs}.="\t"; }
-sub deindent($) { my ($self) = @_; $self->{tabs} = substr($self->{tabs}, 1); }
-sub pidl($$) { my ($self,$txt) = @_; $self->{res} .= $txt ? "$self->{tabs}$txt\n" : "\n"; }
-sub pidl_hdr($$) { my ($self, $txt) = @_; $self->{res_hdr} .= "$txt\n"; }
sub fn_declare($$) { my ($self,$n) = @_; $self->pidl($n); $self->pidl_hdr("$n;"); }
sub new($)
diff --git a/tools/pidl/lib/Parse/Pidl/Samba3/ServerNDR.pm b/tools/pidl/lib/Parse/Pidl/Samba3/ServerNDR.pm
index c87d17a5..2dcc35e7 100644
--- a/tools/pidl/lib/Parse/Pidl/Samba3/ServerNDR.pm
+++ b/tools/pidl/lib/Parse/Pidl/Samba3/ServerNDR.pm
@@ -11,6 +11,7 @@ use Exporter;
@EXPORT_OK = qw(DeclLevel);
use strict;
+use warnings;
use Parse::Pidl qw(warning error fatal);
use Parse::Pidl::Typelist qw(mapTypeName scalar_is_reference);
use Parse::Pidl::Util qw(ParseExpr has_property is_constant);
@@ -62,11 +63,11 @@ sub AllocOutVar($$$$$$$)
$l = $nl if ($nl->{TYPE} eq "ARRAY");
} elsif
- # we don't support multi-dimentional arrays yet
+ # we don't support multi-dimensional arrays yet
($l->{TYPE} eq "ARRAY") {
my $nl = GetNextLevel($e, $l);
if ($nl->{TYPE} eq "ARRAY") {
- fatal($e->{ORIGINAL},"multi-dimentional [out] arrays are not supported!");
+ fatal($e->{ORIGINAL},"multi-dimensional [out] arrays are not supported!");
}
} else {
# neither pointer nor array, no need to alloc something.
@@ -103,7 +104,7 @@ sub CallWithStruct($$$$$$)
if (grep(/out/, @{$_->{DIRECTION}})) { $hasout = 1; }
}
- pidl "ZERO_STRUCT(r->out);" if ($hasout);
+ pidl "NDR_ZERO_STRUCT(r->out);" if ($hasout);
foreach (@{$fn->{ELEMENTS}}) {
my @dir = @{$_->{DIRECTION}};
diff --git a/tools/pidl/lib/Parse/Pidl/Samba4.pm b/tools/pidl/lib/Parse/Pidl/Samba4.pm
index b720ab90..6b3f2218 100644
--- a/tools/pidl/lib/Parse/Pidl/Samba4.pm
+++ b/tools/pidl/lib/Parse/Pidl/Samba4.pm
@@ -14,6 +14,7 @@ use Parse::Pidl::NDR qw(GetNextLevel);
use Parse::Pidl::Typelist qw(mapTypeName scalar_is_reference);
use Parse::Pidl qw(fatal error);
use strict;
+use warnings;
use vars qw($VERSION);
$VERSION = '0.01';
diff --git a/tools/pidl/lib/Parse/Pidl/Samba4/COM/Header.pm b/tools/pidl/lib/Parse/Pidl/Samba4/COM/Header.pm
index de7d4547..159f4172 100644
--- a/tools/pidl/lib/Parse/Pidl/Samba4/COM/Header.pm
+++ b/tools/pidl/lib/Parse/Pidl/Samba4/COM/Header.pm
@@ -10,6 +10,7 @@ use vars qw($VERSION);
$VERSION = '0.01';
use strict;
+use warnings;
sub GetArgumentProtoList($)
{
diff --git a/tools/pidl/lib/Parse/Pidl/Samba4/COM/Proxy.pm b/tools/pidl/lib/Parse/Pidl/Samba4/COM/Proxy.pm
index 35e6e3f0..1630cf23 100644
--- a/tools/pidl/lib/Parse/Pidl/Samba4/COM/Proxy.pm
+++ b/tools/pidl/lib/Parse/Pidl/Samba4/COM/Proxy.pm
@@ -14,6 +14,7 @@ use vars qw($VERSION);
$VERSION = '0.01';
use strict;
+use warnings;
my($res);
@@ -101,7 +102,7 @@ static $tn dcom_proxy_$interface->{NAME}_$name(struct $interface->{NAME} *d, TAL
return status;
}
- ZERO_STRUCT(r.in.ORPCthis);
+ NDR_ZERO_STRUCT(r.in.ORPCthis);
r.in.ORPCthis.version.MajorVersion = COM_MAJOR_VERSION;
r.in.ORPCthis.version.MinorVersion = COM_MINOR_VERSION;
";
diff --git a/tools/pidl/lib/Parse/Pidl/Samba4/COM/Stub.pm b/tools/pidl/lib/Parse/Pidl/Samba4/COM/Stub.pm
index 239f5baa..71980383 100644
--- a/tools/pidl/lib/Parse/Pidl/Samba4/COM/Stub.pm
+++ b/tools/pidl/lib/Parse/Pidl/Samba4/COM/Stub.pm
@@ -9,6 +9,7 @@ package Parse::Pidl::Samba4::COM::Stub;
use Parse::Pidl::Util qw(has_property);
use strict;
+use warnings;
use vars qw($VERSION);
$VERSION = '0.01';
@@ -125,8 +126,6 @@ static NTSTATUS $name\__op_ndr_pull(struct dcesrv_call_state *dce_call, TALLOC_C
/* unravel the NDR for the packet */
status = dcerpc_table_$name.calls[opnum].ndr_pull(pull, NDR_IN, *r);
if (!NT_STATUS_IS_OK(status)) {
- dcerpc_log_packet(&dcerpc_table_$name, opnum, NDR_IN,
- &dce_call->pkt.u.request.stub_and_verifier);
dce_call->fault_code = DCERPC_FAULT_NDR;
return NT_STATUS_NET_WRITE_FAULT;
}
@@ -152,8 +151,6 @@ pidl "
}
if (dce_call->fault_code != 0) {
- dcerpc_log_packet(&dcerpc_table_$name, opnum, NDR_IN,
- &dce_call->pkt.u.request.stub_and_verifier);
return NT_STATUS_NET_WRITE_FAULT;
}
@@ -175,8 +172,6 @@ pidl "
}
if (dce_call->fault_code != 0) {
- dcerpc_log_packet(&dcerpc_table_$name, opnum, NDR_IN,
- &dce_call->pkt.u.request.stub_and_verifier);
return NT_STATUS_NET_WRITE_FAULT;
}
diff --git a/tools/pidl/lib/Parse/Pidl/Samba4/Header.pm b/tools/pidl/lib/Parse/Pidl/Samba4/Header.pm
index e9b7bee0..a0b002f6 100644
--- a/tools/pidl/lib/Parse/Pidl/Samba4/Header.pm
+++ b/tools/pidl/lib/Parse/Pidl/Samba4/Header.pm
@@ -11,6 +11,7 @@ require Exporter;
@EXPORT_OK = qw(GenerateFunctionInEnv GenerateFunctionOutEnv EnvSubstituteValue GenerateStructEnv);
use strict;
+use warnings;
use Parse::Pidl qw(fatal);
use Parse::Pidl::Typelist qw(mapTypeName scalar_is_reference);
use Parse::Pidl::Util qw(has_property is_constant unmake_str ParseExpr);
@@ -142,7 +143,7 @@ sub HeaderEnum($$;$)
my $count = 0;
my $with_val = 0;
my $without_val = 0;
- pidl " { __do_not_use_enum_$name=0x7FFFFFFF}\n";
+ pidl " { __do_not_use_enum_$name=INT_MAX}\n";
foreach my $e (@{$enum->{ELEMENTS}}) {
my $t = "$e";
my $name;
@@ -493,7 +494,7 @@ sub EnvSubstituteValue($$)
# Substitute the value() values in the env
foreach my $e (@{$s->{ELEMENTS}}) {
next unless (defined(my $v = has_property($e, "value")));
-
+
$env->{$e->{NAME}} = ParseExpr($v, $env, $e);
}
diff --git a/tools/pidl/lib/Parse/Pidl/Samba4/NDR/Client.pm b/tools/pidl/lib/Parse/Pidl/Samba4/NDR/Client.pm
index 040cd5a4..84e2bebb 100644
--- a/tools/pidl/lib/Parse/Pidl/Samba4/NDR/Client.pm
+++ b/tools/pidl/lib/Parse/Pidl/Samba4/NDR/Client.pm
@@ -5,10 +5,7 @@
# released under the GNU GPL
package Parse::Pidl::Samba4::NDR::Client;
-
-use Exporter;
-@ISA = qw(Exporter);
-@EXPORT_OK = qw(Parse);
+use parent Parse::Pidl::Base;
use Parse::Pidl qw(fatal warning error);
use Parse::Pidl::Util qw(has_property ParseExpr genpad);
@@ -17,18 +14,14 @@ use Parse::Pidl::Typelist qw(mapTypeName);
use Parse::Pidl::Samba4 qw(choose_header is_intree DeclLong);
use Parse::Pidl::Samba4::Header qw(GenerateFunctionInEnv GenerateFunctionOutEnv);
+
use vars qw($VERSION);
$VERSION = '0.01';
use strict;
+use warnings;
-sub indent($) { my ($self) = @_; $self->{tabs}.="\t"; }
-sub deindent($) { my ($self) = @_; $self->{tabs} = substr($self->{tabs}, 1); }
-sub pidl($$) { my ($self,$txt) = @_; $self->{res} .= $txt ? "$self->{tabs}$txt\n" : "\n"; }
-sub pidl_hdr($$) { my ($self, $txt) = @_; $self->{res_hdr} .= "$txt\n"; }
-sub pidl_both($$) { my ($self, $txt) = @_; $self->{hdr} .= "$txt\n"; $self->{res_hdr} .= "$txt\n"; }
sub fn_declare($$) { my ($self,$n) = @_; $self->pidl($n); $self->pidl_hdr("$n;"); }
-
sub new($)
{
my ($class) = shift;
@@ -496,7 +489,7 @@ sub ParseFunction_Send($$$$)
if (defined($fn->{RETURN_TYPE})) {
$self->pidl("/* Result */");
- $self->pidl("ZERO_STRUCT(state->orig.out.result);");
+ $self->pidl("NDR_ZERO_STRUCT(state->orig.out.result);");
$self->pidl("");
}
@@ -585,7 +578,7 @@ sub ParseFunction_Done($$$$)
}
$self->pidl("/* Reset temporary structure */");
- $self->pidl("ZERO_STRUCT(state->tmp);");
+ $self->pidl("NDR_ZERO_STRUCT(state->tmp);");
$self->pidl("");
$self->pidl("tevent_req_done(req);");
@@ -698,7 +691,7 @@ sub ParseFunction_Sync($$$$)
if (defined($fn->{RETURN_TYPE})) {
$self->pidl("/* Result */");
- $self->pidl("ZERO_STRUCT(r.out.result);");
+ $self->pidl("NDR_ZERO_STRUCT(r.out.result);");
$self->pidl("");
}
@@ -770,8 +763,8 @@ sub ParseFunction($$$)
# TODO: make this fatal at NDR level
if ($e->{LEVELS}[0]->{TYPE} eq "POINTER") {
if ($e->{LEVELS}[1]->{TYPE} eq "DATA" and
- $e->{LEVELS}[1]->{DATA_TYPE} eq "string") {
- $reason = "is a pointer to type 'string'";
+ Parse::Pidl::Typelist::is_string_type($e->{LEVELS}[1]->{DATA_TYPE})) {
+ $reason = "is a pointer to a string type";
} elsif ($e->{LEVELS}[1]->{TYPE} eq "ARRAY" and
$e->{LEVELS}[1]->{IS_ZERO_TERMINATED}) {
next;
diff --git a/tools/pidl/lib/Parse/Pidl/Samba4/NDR/Parser.pm b/tools/pidl/lib/Parse/Pidl/Samba4/NDR/Parser.pm
index cfcd29e2..d7386d5b 100644
--- a/tools/pidl/lib/Parse/Pidl/Samba4/NDR/Parser.pm
+++ b/tools/pidl/lib/Parse/Pidl/Samba4/NDR/Parser.pm
@@ -6,14 +6,22 @@
# released under the GNU GPL
package Parse::Pidl::Samba4::NDR::Parser;
+use parent Parse::Pidl::Base;
require Exporter;
-@ISA = qw(Exporter);
+push @ISA, qw(Exporter);
@EXPORT_OK = qw(check_null_pointer NeededFunction NeededElement NeededType $res NeededInterface TypeFunctionName ParseElementPrint);
use strict;
-use Parse::Pidl::Typelist qw(hasType getType mapTypeName typeHasBody);
-use Parse::Pidl::Util qw(has_property ParseExpr ParseExprExt print_uuid unmake_str);
+use warnings;
+use Parse::Pidl::Typelist qw(hasType getType mapTypeName mapTypeSpecifier typeHasBody);
+use Parse::Pidl::Util qw(has_property
+ ParseExpr
+ ParseExprExt
+ print_uuid
+ unmake_str
+ parse_int
+ parse_range);
use Parse::Pidl::CUtil qw(get_pointer_to get_value_of get_array_element);
use Parse::Pidl::NDR qw(GetPrevLevel GetNextLevel ContainsDeferred ContainsPipe is_charset_array);
use Parse::Pidl::Samba4 qw(is_intree choose_header ArrayDynamicallyAllocated);
@@ -49,7 +57,7 @@ sub append_prefix($$)
$pointers++;
} elsif ($l->{TYPE} eq "ARRAY") {
$arrays++;
- if (($pointers == 0) and
+ if (($pointers == 0) and
(not $l->{IS_FIXED}) and
(not $l->{IS_INLINE})) {
return get_value_of($var_name);
@@ -60,7 +68,7 @@ sub append_prefix($$)
}
}
}
-
+
return $var_name;
}
@@ -76,28 +84,26 @@ sub has_fast_array($$)
my $t = getType($nl->{DATA_TYPE});
- # Only uint8 and string have fast array functions at the moment
- return ($t->{NAME} eq "uint8") or ($t->{NAME} eq "string");
+ # Only uint8 has a fast array function at the moment
+ return ($t->{NAME} eq "uint8");
}
-
-####################################
-# pidl() is our basic output routine
-sub pidl($$)
+sub is_public_struct
{
- my ($self, $d) = @_;
- if ($d) {
- $self->{res} .= $self->{tabs};
- $self->{res} .= $d;
+ my ($d) = @_;
+ if (!has_property($d, "public")) {
+ return 0;
+ }
+ my $t = $d;
+ if ($d->{TYPE} eq "TYPEDEF") {
+ $t = $d->{DATA};
}
- $self->{res} .="\n";
+ return $t->{TYPE} eq "STRUCT";
}
-sub pidl_hdr($$) { my ($self, $d) = @_; $self->{res_hdr} .= "$d\n"; }
-
####################################
-# defer() is like pidl(), but adds to
-# a deferred buffer which is then added to the
+# defer() is like pidl(), but adds to
+# a deferred buffer which is then added to the
# output buffer at the end of the structure/union/function
# This is needed to cope with code that must be pushed back
# to the end of a block of elements
@@ -123,18 +129,6 @@ sub add_deferred($)
$self->{defer_tabs} = "";
}
-sub indent($)
-{
- my ($self) = @_;
- $self->{tabs} .= "\t";
-}
-
-sub deindent($)
-{
- my ($self) = @_;
- $self->{tabs} = substr($self->{tabs}, 0, -1);
-}
-
#####################################################################
# declare a function public or static, depending on its attributes
sub fn_declare($$$$)
@@ -165,7 +159,7 @@ sub start_flags($$$)
if (defined $flags) {
$self->pidl("{");
$self->indent;
- $self->pidl("uint32_t _flags_save_$e->{TYPE} = $ndr->flags;");
+ $self->pidl("libndr_flags _flags_save_$e->{TYPE} = $ndr->flags;");
$self->pidl("ndr_set_flags(&$ndr->flags, $flags);");
}
}
@@ -241,7 +235,7 @@ sub check_fully_dereferenced($$)
last;
}
}
-
+
return($origvar) unless (defined($var));
my $e;
foreach (@{$element->{PARENT}->{ELEMENTS}}) {
@@ -264,7 +258,7 @@ sub check_fully_dereferenced($$)
warning($element->{ORIGINAL}, "Got pointer for `$e->{NAME}', expected fully dereferenced variable") if ($nump > length($ptr));
return ($origvar);
}
-}
+}
sub check_null_pointer($$$$)
{
@@ -285,7 +279,7 @@ sub check_null_pointer($$$$)
last;
}
}
-
+
if (defined($var)) {
my $e;
# lookup ptr in $e
@@ -301,7 +295,7 @@ sub check_null_pointer($$$$)
# See if pointer at pointer level $level
# needs to be checked.
foreach my $l (@{$e->{LEVELS}}) {
- if ($l->{TYPE} eq "POINTER" and
+ if ($l->{TYPE} eq "POINTER" and
$l->{POINTER_INDEX} == length($ptr)) {
# No need to check ref pointers
$check = ($l->{POINTER_TYPE} ne "ref");
@@ -316,7 +310,7 @@ sub check_null_pointer($$$$)
warning($element, "unknown dereferenced expression `$expandedvar'");
$check = 1;
}
-
+
$print_fn->("if ($ptr$expandedvar == NULL) $return") if $check;
}
}
@@ -346,22 +340,29 @@ sub ParseArrayPullGetSize($$$$$$)
my $size;
- if ($l->{IS_CONFORMANT}) {
- $size = "ndr_get_array_size($ndr, " . get_pointer_to($var_name) . ")";
+ my $array_size = "size_$e->{NAME}_$l->{LEVEL_INDEX}";
+
+ if ($l->{IS_CONFORMANT} and (defined($l->{SIZE_IS}) or not $l->{IS_ZERO_TERMINATED})) {
+ $self->pidl("NDR_CHECK(ndr_get_array_size($ndr, (void*)" . get_pointer_to($var_name) . ", &$array_size));");
+
+ } elsif ($l->{IS_CONFORMANT}) {
+ # This will be the last use of the array_size token
+ $self->pidl("NDR_CHECK(ndr_steal_array_size($ndr, (void*)" . get_pointer_to($var_name) . ", &$array_size));");
+
} elsif ($l->{IS_ZERO_TERMINATED} and $l->{SIZE_IS} == 0 and $l->{LENGTH_IS} == 0) { # Noheader arrays
$size = "ndr_get_string_size($ndr, sizeof(*$var_name))";
+ $self->pidl("$array_size = $size;");
+
} else {
$size = ParseExprExt($l->{SIZE_IS}, $env, $e->{ORIGINAL},
check_null_pointer($e, $env, sub { $self->pidl(shift); },
"return ndr_pull_error($ndr, NDR_ERR_INVALID_POINTER, \"NULL Pointer for size_is()\");"),
check_fully_dereferenced($e, $env));
+ $self->pidl("$array_size = $size;");
}
- $self->pidl("size_$e->{NAME}_$l->{LEVEL_INDEX} = $size;");
- my $array_size = "size_$e->{NAME}_$l->{LEVEL_INDEX}";
-
if (my $range = has_property($e, "range")) {
- my ($low, $high) = split(/,/, $range, 2);
+ my ($low, $high) = parse_range($range);
if ($low < 0) {
warning(0, "$low is invalid for the range of an array size");
}
@@ -370,7 +371,8 @@ sub ParseArrayPullGetSize($$$$$$)
} else {
$self->pidl("if ($array_size < $low || $array_size > $high) {");
}
- $self->pidl("\treturn ndr_pull_error($ndr, NDR_ERR_RANGE, \"value out of range\");");
+ $self->pidl("\treturn ndr_pull_error($ndr, NDR_ERR_RANGE, \"value (%\"PRIu32\") out of range (%\"PRIu32\" - %\"PRIu32\")\", $array_size, (uint32_t)($low), (uint32_t)($high));");
+
$self->pidl("}");
}
@@ -391,12 +393,16 @@ sub ParseArrayPullGetLength($$$$$$;$)
return $array_size;
}
- my $length = "ndr_get_array_length($ndr, " . get_pointer_to($var_name) .")";
- $self->pidl("length_$e->{NAME}_$l->{LEVEL_INDEX} = $length;");
my $array_length = "length_$e->{NAME}_$l->{LEVEL_INDEX}";
+ if ($l->{IS_VARYING} and (defined($l->{LENGTH_IS}) or not $l->{IS_ZERO_TERMINATED})) {
+ $self->pidl("NDR_CHECK(ndr_get_array_length($ndr, (void*)" . get_pointer_to($var_name) . ", &$array_length));");
+ } else {
+ # This will be the last use of the array_length token
+ $self->pidl("NDR_CHECK(ndr_steal_array_length($ndr, (void*)" . get_pointer_to($var_name) . ", &$array_length));");
+ }
if (my $range = has_property($e, "range")) {
- my ($low, $high) = split(/,/, $range, 2);
+ my ($low, $high) = parse_range($range);
if ($low < 0) {
warning(0, "$low is invalid for the range of an array size");
}
@@ -405,7 +411,7 @@ sub ParseArrayPullGetLength($$$$$$;$)
} else {
$self->pidl("if ($array_length < $low || $array_length > $high) {");
}
- $self->pidl("\treturn ndr_pull_error($ndr, NDR_ERR_RANGE, \"value out of range\");");
+ $self->pidl("\treturn ndr_pull_error($ndr, NDR_ERR_RANGE, \"value (%\"PRIu32\") out of range (%\"PRIu32\" - %\"PRIu32\")\", $array_length, (uint32_t)($low), (uint32_t)($high));");
$self->pidl("}");
}
@@ -432,7 +438,7 @@ sub ParseArrayPullHeader($$$$$$)
if ($array_length ne $array_size) {
$self->pidl("if ($array_length > $array_size) {");
$self->indent;
- $self->pidl("return ndr_pull_error($ndr, NDR_ERR_ARRAY_SIZE, \"Bad array size %u should exceed array length %u\", $array_size, $array_length);");
+ $self->pidl("return ndr_pull_error($ndr, NDR_ERR_ARRAY_SIZE, \"Bad array size %\"PRIu32\": should exceed array length %\"PRIu32\"\", $array_size, $array_length);");
$self->deindent;
$self->pidl("}");
}
@@ -444,7 +450,14 @@ sub ParseArrayPullHeader($$$$$$)
check_null_pointer($e, $env, sub { $self->defer(shift); },
"return ndr_pull_error($ndr, NDR_ERR_INVALID_POINTER, \"NULL Pointer for size_is()\");"),
check_fully_dereferenced($e, $env));
- $self->defer("NDR_CHECK(ndr_check_array_size($ndr, (void*)" . get_pointer_to($var_name) . ", $size));");
+ if (ContainsDeferred($e, $l)) {
+ # We will be needing the array_size token in
+ # the NDR_BUFFERS call, so don't steal it now
+ $self->defer("NDR_CHECK(ndr_check_array_size($ndr, (void*)" . get_pointer_to($var_name) . ", $size));");
+ } else {
+ # This will be deferred until after the last ndr_get_array_size()
+ $self->defer("NDR_CHECK(ndr_check_steal_array_size($ndr, (void*)" . get_pointer_to($var_name) . ", $size));");
+ }
$self->defer_deindent;
$self->defer("}");
}
@@ -452,11 +465,12 @@ sub ParseArrayPullHeader($$$$$$)
if ($l->{IS_VARYING} and (defined($l->{LENGTH_IS}) or not $l->{IS_ZERO_TERMINATED})) {
$self->defer("if ($var_name) {");
$self->defer_indent;
- my $length = ParseExprExt($l->{LENGTH_IS}, $env, $e->{ORIGINAL},
+ my $length = ParseExprExt($l->{LENGTH_IS}, $env, $e->{ORIGINAL},
check_null_pointer($e, $env, sub { $self->defer(shift); },
"return ndr_pull_error($ndr, NDR_ERR_INVALID_POINTER, \"NULL Pointer for length_is()\");"),
check_fully_dereferenced($e, $env));
- $self->defer("NDR_CHECK(ndr_check_array_length($ndr, (void*)" . get_pointer_to($var_name) . ", $length));");
+ # This will be deferred until after the last ndr_get_array_length()
+ $self->defer("NDR_CHECK(ndr_check_steal_array_length($ndr, (void*)" . get_pointer_to($var_name) . ", $length));");
$self->defer_deindent;
$self->defer("}");
}
@@ -468,12 +482,12 @@ sub ParseArrayPullHeader($$$$$$)
return $array_length;
}
-sub compression_alg($$)
+sub compression_alg($$$)
{
- my ($e, $l) = @_;
+ my ($e, $l, $env) = @_;
my ($alg, $clen, $dlen) = split(/,/, $l->{COMPRESSION});
- return $alg;
+ return ParseExpr($alg, $env, $e->{ORIGINAL});
}
sub compression_clen($$$)
@@ -496,13 +510,13 @@ sub ParseCompressionPushStart($$$$$)
{
my ($self,$e,$l,$ndr,$env) = @_;
my $comndr = "$ndr\_compressed";
- my $alg = compression_alg($e, $l);
- my $dlen = compression_dlen($e, $l, $env);
+ my $alg = compression_alg($e, $l, $env);
$self->pidl("{");
$self->indent;
$self->pidl("struct ndr_push *$comndr;");
- $self->pidl("NDR_CHECK(ndr_push_compression_start($ndr, &$comndr, $alg, $dlen));");
+ $self->pidl("NDR_CHECK(ndr_push_compression_state_init($ndr, $alg));");
+ $self->pidl("NDR_CHECK(ndr_push_compression_start($ndr, &$comndr));");
return $comndr;
}
@@ -511,10 +525,10 @@ sub ParseCompressionPushEnd($$$$$)
{
my ($self,$e,$l,$ndr,$env) = @_;
my $comndr = "$ndr\_compressed";
- my $alg = compression_alg($e, $l);
- my $dlen = compression_dlen($e, $l, $env);
+ my $alg = compression_alg($e, $l, $env);
- $self->pidl("NDR_CHECK(ndr_push_compression_end($ndr, $comndr, $alg, $dlen));");
+ $self->pidl("NDR_CHECK(ndr_push_compression_end($ndr, $comndr));");
+ $self->pidl("TALLOC_FREE($ndr->cstate);");
$self->deindent;
$self->pidl("}");
}
@@ -523,7 +537,7 @@ sub ParseCompressionPullStart($$$$$)
{
my ($self,$e,$l,$ndr,$env) = @_;
my $comndr = "$ndr\_compressed";
- my $alg = compression_alg($e, $l);
+ my $alg = compression_alg($e, $l, $env);
my $dlen = compression_dlen($e, $l, $env);
my $clen = compression_clen($e, $l, $env);
@@ -539,7 +553,7 @@ sub ParseCompressionPullEnd($$$$$)
{
my ($self,$e,$l,$ndr,$env) = @_;
my $comndr = "$ndr\_compressed";
- my $alg = compression_alg($e, $l);
+ my $alg = compression_alg($e, $l, $env);
my $dlen = compression_dlen($e, $l, $env);
$self->pidl("NDR_CHECK(ndr_pull_compression_end($ndr, $comndr, $alg, $dlen));");
@@ -589,7 +603,8 @@ sub ParseSubcontextPullStart($$$$$)
$self->pidl("{");
$self->indent;
$self->pidl("struct ndr_pull *$subndr;");
- $self->pidl("NDR_CHECK(ndr_pull_subcontext_start($ndr, &$subndr, $l->{HEADER_SIZE}, $subcontext_size));");
+ $self->pidl("ssize_t sub_size = $subcontext_size;");
+ $self->pidl("NDR_CHECK(ndr_pull_subcontext_start($ndr, &$subndr, $l->{HEADER_SIZE}, sub_size));");
if (defined $l->{COMPRESSION}) {
$subndr = $self->ParseCompressionPullStart($e, $l, $subndr, $env);
@@ -608,7 +623,7 @@ sub ParseSubcontextPullEnd($$$$$)
$self->ParseCompressionPullEnd($e, $l, $subndr, $env);
}
- $self->pidl("NDR_CHECK(ndr_pull_subcontext_end($ndr, $subndr, $l->{HEADER_SIZE}, $subcontext_size));");
+ $self->pidl("NDR_CHECK(ndr_pull_subcontext_end($ndr, $subndr, $l->{HEADER_SIZE}, sub_size));");
$self->deindent;
$self->pidl("}");
}
@@ -631,7 +646,7 @@ sub ParseElementPushLevel
} elsif ($l->{TYPE} eq "POINTER") {
$self->ParsePtrPush($e, $l, $ndr, $var_name);
} elsif ($l->{TYPE} eq "ARRAY") {
- my $length = $self->ParseArrayPushHeader($e, $l, $ndr, $var_name, $env);
+ my $length = $self->ParseArrayPushHeader($e, $l, $ndr, $var_name, $env);
my $nl = GetNextLevel($e, $l);
@@ -646,9 +661,7 @@ sub ParseElementPushLevel
} elsif (has_fast_array($e,$l)) {
$self->pidl("NDR_CHECK(ndr_push_array_$nl->{DATA_TYPE}($ndr, $ndr_flags, $var_name, $length));");
return;
- }
- } elsif ($l->{TYPE} eq "SWITCH") {
- $self->ParseSwitchPush($e, $l, $ndr, $var_name, $env);
+ }
} elsif ($l->{TYPE} eq "DATA") {
$self->ParseDataPush($e, $l, $ndr, $var_name, $primitives, $deferred);
} elsif ($l->{TYPE} eq "TYPEDEF") {
@@ -709,6 +722,14 @@ sub ParseElementPushLevel
$self->pidl("}");
}
} elsif ($l->{TYPE} eq "SWITCH") {
+ my $nl = GetNextLevel($e,$l);
+ my $needs_deferred_switch = is_deferred_switch_non_empty($nl);
+
+ # Avoid setting a switch value if it will not be
+ # consumed again in the NDR_BUFFERS pull
+ if ($needs_deferred_switch or !$deferred) {
+ $self->ParseSwitchPush($e, $l, $ndr, $var_name, $env);
+ }
$self->ParseElementPushLevel($e, GetNextLevel($e, $l), $ndr, $var_name, $env, $primitives, $deferred);
}
}
@@ -886,13 +907,13 @@ sub ParseElementPrint($$$$$)
my $length;
if ($l->{IS_CONFORMANT} or $l->{IS_VARYING}) {
- $var_name = get_pointer_to($var_name);
+ $var_name = get_pointer_to($var_name);
}
-
+
if ($l->{IS_ZERO_TERMINATED} and not defined($l->{LENGTH_IS})) {
$length = "ndr_string_length($var_name, sizeof(*$var_name))";
} else {
- $length = ParseExprExt($l->{LENGTH_IS}, $env, $e->{ORIGINAL},
+ $length = ParseExprExt($l->{LENGTH_IS}, $env, $e->{ORIGINAL},
check_null_pointer($e, $env, sub { $self->pidl(shift); }, "return;"), check_fully_dereferenced($e, $env));
}
@@ -906,7 +927,7 @@ sub ParseElementPrint($$$$$)
} else {
my $counter = "cntr_$e->{NAME}_$l->{LEVEL_INDEX}";
- $self->pidl("$ndr->print($ndr, \"\%s: ARRAY(\%d)\", \"$e->{NAME}\", (int)$length);");
+ $self->pidl("$ndr->print($ndr, \"%s: ARRAY(%\"PRIu32\")\", \"$e->{NAME}\", (uint32_t)($length));");
$self->pidl("$ndr->depth++;");
$self->pidl("for ($counter = 0; $counter < ($length); $counter++) {");
$self->indent;
@@ -916,10 +937,10 @@ sub ParseElementPrint($$$$$)
} elsif ($l->{TYPE} eq "DATA") {
$self->ParseDataPrint($e, $l, $ndr, $var_name);
} elsif ($l->{TYPE} eq "SWITCH") {
- my $switch_var = ParseExprExt($l->{SWITCH_IS}, $env, $e->{ORIGINAL},
+ my $switch_var = ParseExprExt($l->{SWITCH_IS}, $env, $e->{ORIGINAL},
check_null_pointer($e, $env, sub { $self->pidl(shift); }, "return;"), check_fully_dereferenced($e, $env));
$self->pidl("ndr_print_set_switch_value($ndr, " . get_pointer_to($var_name) . ", $switch_var);");
- }
+ }
}
foreach my $l (reverse @{$e->{LEVELS}}) {
@@ -956,7 +977,7 @@ sub ParseElementPrint($$$$$)
sub ParseSwitchPull($$$$$$)
{
my($self,$e,$l,$ndr,$var_name,$env) = @_;
- my $switch_var = ParseExprExt($l->{SWITCH_IS}, $env, $e->{ORIGINAL},
+ my $switch_var = ParseExprExt($l->{SWITCH_IS}, $env, $e->{ORIGINAL},
check_null_pointer($e, $env, sub { $self->pidl(shift); },
"return ndr_pull_error($ndr, NDR_ERR_INVALID_POINTER, \"NULL Pointer for switch_is()\");"),
check_fully_dereferenced($e, $env));
@@ -970,7 +991,7 @@ sub ParseSwitchPull($$$$$$)
sub ParseSwitchPush($$$$$$)
{
my($self,$e,$l,$ndr,$var_name,$env) = @_;
- my $switch_var = ParseExprExt($l->{SWITCH_IS}, $env, $e->{ORIGINAL},
+ my $switch_var = ParseExprExt($l->{SWITCH_IS}, $env, $e->{ORIGINAL},
check_null_pointer($e, $env, sub { $self->pidl(shift); },
"return ndr_push_error($ndr, NDR_ERR_INVALID_POINTER, \"NULL Pointer for switch_is()\");"),
check_fully_dereferenced($e, $env));
@@ -993,15 +1014,22 @@ sub ParseDataPull($$$$$$$)
$var_name = get_pointer_to($var_name);
+ if (my $depth = has_property($e, "max_recursion")) {
+ my $d = parse_int($depth);
+ $self->pidl("NDR_RECURSION_CHECK($ndr, $d);");
+ }
$self->pidl("NDR_CHECK(".TypeFunctionName("ndr_pull", $l->{DATA_TYPE})."($ndr, $ndr_flags, $var_name));");
+ if (has_property($e, "max_recursion")) {
+ $self->pidl("NDR_RECURSION_UNWIND($ndr);");
+ }
my $pl = GetPrevLevel($e, $l);
my $range = has_property($e, "range");
- if ($range and $pl->{TYPE} ne "ARRAY") {
+ if ($range and (not $pl or $pl->{TYPE} ne "ARRAY")) {
$var_name = get_value_of($var_name);
my $signed = Parse::Pidl::Typelist::is_signed($l->{DATA_TYPE});
- my ($low, $high) = split(/,/, $range, 2);
+ my ($low, $high) = parse_range($range);
if ($low < 0 and not $signed) {
warning(0, "$low is invalid for the range of an unsigned type");
}
@@ -1010,7 +1038,20 @@ sub ParseDataPull($$$$$$$)
} else {
$self->pidl("if ($var_name < $low || $var_name > $high) {");
}
- $self->pidl("\treturn ndr_pull_error($ndr, NDR_ERR_RANGE, \"value out of range\");");
+
+ my $data_type = mapTypeName($l->{DATA_TYPE});
+ my $fmt = mapTypeSpecifier($data_type);
+
+ if (!defined($fmt)) {
+ if (getType($l->{DATA_TYPE})->{DATA}->{TYPE} eq "ENUM") {
+ $data_type = "int";
+ $fmt = "d";
+ } else {
+ die("Format ($data_type) not supported");
+ }
+ }
+
+ $self->pidl("\treturn ndr_pull_error($ndr, NDR_ERR_RANGE, \"value (%$fmt) out of range (%$fmt - %$fmt)\", ($data_type)($var_name), ($data_type)($low), ($data_type)($high));");
$self->pidl("}");
}
} else {
@@ -1044,14 +1085,14 @@ sub CalcNdrFlags($$$)
my $scalars = 0;
my $buffers = 0;
- # Add NDR_SCALARS if this one is deferred
+ # Add NDR_SCALARS if this one is deferred
# and deferreds may be pushed
$scalars = 1 if ($l->{IS_DEFERRED} and $deferred);
- # Add NDR_SCALARS if this one is not deferred and
+ # Add NDR_SCALARS if this one is not deferred and
# primitives may be pushed
$scalars = 1 if (!$l->{IS_DEFERRED} and $primitives);
-
+
# Add NDR_BUFFERS if this one contains deferred stuff
# and deferreds may be pushed
$buffers = 1 if ($l->{CONTAINS_DEFERRED} and $deferred);
@@ -1079,7 +1120,7 @@ sub ParseMemCtxPullFlags($$$$)
my $nl = GetNextLevel($e, $l);
return undef if ($nl->{TYPE} eq "PIPE");
return undef if ($nl->{TYPE} eq "ARRAY");
- return undef if (($nl->{TYPE} eq "DATA") and ($nl->{DATA_TYPE} eq "string"));
+ return undef if (($nl->{TYPE} eq "DATA") and (Parse::Pidl::Typelist::is_string_type($nl->{DATA_TYPE})));
if ($l->{LEVEL} eq "TOP") {
$mem_flags = "LIBNDR_FLAG_REF_ALLOC";
@@ -1134,7 +1175,7 @@ sub ParseElementPullLevel
if (has_property($e, "skip") or has_property($e, "skip_noinit")) {
$self->pidl("/* [skip] '$var_name' */");
if (not has_property($e, "skip_noinit")) {
- $self->pidl("ZERO_STRUCT($var_name);");
+ $self->pidl("NDR_ZERO_STRUCT($var_name);");
}
return;
}
@@ -1174,8 +1215,6 @@ sub ParseElementPullLevel
}
} elsif ($l->{TYPE} eq "POINTER") {
$self->ParsePtrPull($e, $l, $ndr, $var_name);
- } elsif ($l->{TYPE} eq "SWITCH") {
- $self->ParseSwitchPull($e, $l, $ndr, $var_name, $env);
} elsif ($l->{TYPE} eq "DATA") {
$self->ParseDataPull($e, $l, $ndr, $var_name, $primitives, $deferred);
} elsif ($l->{TYPE} eq "TYPEDEF") {
@@ -1217,7 +1256,7 @@ sub ParseElementPullLevel
$self->deindent;
$self->pidl("}");
}
- } elsif ($l->{TYPE} eq "ARRAY" and
+ } elsif ($l->{TYPE} eq "ARRAY" and
not has_fast_array($e,$l) and not is_charset_array($e, $l)) {
my $length = $array_length;
my $counter = "cntr_$e->{NAME}_$l->{LEVEL_INDEX}";
@@ -1247,16 +1286,28 @@ sub ParseElementPullLevel
if ($deferred and ContainsDeferred($e, $l)) {
$self->pidl("for ($counter = 0; $counter < ($length); $counter++) {");
+ $self->defer("for ($counter = 0; $counter < ($length); $counter++) {");
+ $self->defer_indent;
$self->indent;
$self->ParseElementPullLevel($e,GetNextLevel($e,$l), $ndr, $var_name, $env, 0, 1);
$self->deindent;
+ $self->defer_deindent;
$self->pidl("}");
+ $self->defer("}");
}
$self->ParseMemCtxPullEnd($e, $l, $ndr);
} elsif ($l->{TYPE} eq "SWITCH") {
- $self->ParseElementPullLevel($e, GetNextLevel($e,$l), $ndr, $var_name, $env, $primitives, $deferred);
+ my $nl = GetNextLevel($e,$l);
+ my $needs_deferred_switch = is_deferred_switch_non_empty($nl);
+
+ # Avoid setting a switch value if it will not be
+ # consumed again in the NDR_BUFFERS pull
+ if ($needs_deferred_switch or !$deferred) {
+ $self->ParseSwitchPull($e, $l, $ndr, $var_name, $env);
+ }
+ $self->ParseElementPullLevel($e, $nl, $ndr, $var_name, $env, $primitives, $deferred);
}
}
@@ -1307,21 +1358,21 @@ sub ParsePtrPull($$$$$)
my $nl = GetNextLevel($e, $l);
my $next_is_array = ($nl->{TYPE} eq "ARRAY");
- my $next_is_string = (($nl->{TYPE} eq "DATA") and
- ($nl->{DATA_TYPE} eq "string"));
+ my $next_is_string = (($nl->{TYPE} eq "DATA") and
+ (Parse::Pidl::Typelist::is_string_type($nl->{DATA_TYPE})));
if ($l->{POINTER_TYPE} eq "ref" and $l->{LEVEL} eq "TOP") {
if (!$next_is_array and !$next_is_string) {
$self->pidl("if ($ndr->flags & LIBNDR_FLAG_REF_ALLOC) {");
- $self->pidl("\tNDR_PULL_ALLOC($ndr, $var_name);");
+ $self->pidl("\tNDR_PULL_ALLOC($ndr, $var_name);");
$self->pidl("}");
}
return;
} elsif ($l->{POINTER_TYPE} eq "ref" and $l->{LEVEL} eq "EMBEDDED") {
$self->pidl("NDR_CHECK(ndr_pull_ref_ptr($ndr, &_ptr_$e->{NAME}));");
- } elsif (($l->{POINTER_TYPE} eq "unique") or
+ } elsif (($l->{POINTER_TYPE} eq "unique") or
($l->{POINTER_TYPE} eq "relative") or
($l->{POINTER_TYPE} eq "full")) {
$self->pidl("NDR_CHECK(ndr_pull_generic_ptr($ndr, &_ptr_$e->{NAME}));");
@@ -1343,10 +1394,10 @@ sub ParsePtrPull($$$$$)
# allocation, as we forced it to NULL just above, and
# we may not know the declared type anyway.
} else {
- # Don't do this for arrays, they're allocated at the actual level
+ # Don't do this for arrays, they're allocated at the actual level
# of the array
- unless ($next_is_array or $next_is_string) {
- $self->pidl("NDR_PULL_ALLOC($ndr, $var_name);");
+ unless ($next_is_array or $next_is_string) {
+ $self->pidl("NDR_PULL_ALLOC($ndr, $var_name);");
} else {
# FIXME: Yes, this is nasty.
# We allocate an array twice
@@ -1398,10 +1449,10 @@ sub ParseStructPushPrimitives($$$$$)
if (defined($struct->{SURROUNDING_ELEMENT})) {
my $e = $struct->{SURROUNDING_ELEMENT};
- if (defined($e->{LEVELS}[0]) and
+ if (defined($e->{LEVELS}[0]) and
$e->{LEVELS}[0]->{TYPE} eq "ARRAY") {
my $size;
-
+
if ($e->{LEVELS}[0]->{IS_ZERO_TERMINATED}) {
if (has_property($e, "charset")) {
$size = "ndr_charset_length($varname->$e->{NAME}, CH_$e->{PROPERTIES}->{charset})";
@@ -1450,7 +1501,7 @@ sub ParseStructPushDeferred($$$$)
sub ParseStructPush($$$$)
{
my ($self, $struct, $ndr, $varname) = @_;
-
+
return unless defined($struct->{ELEMENTS});
my $env = GenerateStructEnv($struct, $varname);
@@ -1530,7 +1581,7 @@ sub ParseEnumPrint($$$$$)
$self->deindent;
$self->pidl("}");
-
+
$self->pidl("ndr_print_enum($ndr, name, \"$enum->{TYPE}\", val, $varname);");
$self->end_flags($enum, $ndr);
@@ -1539,7 +1590,7 @@ sub ParseEnumPrint($$$$$)
sub DeclEnum($$$$)
{
my ($e,$t,$name,$varname) = @_;
- return "enum $name " .
+ return "enum $name " .
($t eq "pull"?"*":"") . $varname;
}
@@ -1622,7 +1673,7 @@ sub ParseBitmapPrint($$$$$)
sub DeclBitmap($$$$)
{
my ($e,$t,$name,$varname) = @_;
- return mapTypeName(Parse::Pidl::Typelist::bitmap_type_fn($e)) .
+ return mapTypeName(Parse::Pidl::Typelist::bitmap_type_fn($e)) .
($t eq "pull"?" *":" ") . $varname;
}
@@ -1651,7 +1702,7 @@ sub ParseStructPrint($$$$$)
$self->start_flags($struct, $ndr);
$self->pidl("$ndr->depth++;");
-
+
$self->ParseElementPrint($_, $ndr, $env->{$_->{NAME}}, $env)
foreach (@{$struct->{ELEMENTS}});
$self->pidl("$ndr->depth--;");
@@ -1669,7 +1720,7 @@ sub DeclarePtrVariables($$)
foreach my $l (@{$e->{LEVELS}}) {
my $size = 32;
- if ($l->{TYPE} eq "POINTER" and
+ if ($l->{TYPE} eq "POINTER" and
not ($l->{POINTER_TYPE} eq "ref" and $l->{LEVEL} eq "TOP")) {
if ($l->{POINTER_TYPE} eq "relative_short") {
$size = 16;
@@ -1833,13 +1884,16 @@ sub ParseStructNdrSize($$$$)
sub DeclStruct($$$$)
{
my ($e,$t,$name,$varname) = @_;
+ if ($t eq "base") {
+ return "struct $name $varname";
+ }
return ($t ne "pull"?"const ":"") . "struct $name *$varname";
}
sub ArgsStructNdrSize($$$)
{
my ($d, $name, $varname) = @_;
- return "const struct $name *$varname, int flags";
+ return "const struct $name *$varname, libndr_flags flags";
}
$typefamily{STRUCT} = {
@@ -1871,8 +1925,6 @@ sub ParseUnionPushPrimitives($$$$)
my $have_default = 0;
- $self->pidl("uint32_t level = ndr_push_get_switch_value($ndr, $varname);");
-
if (defined($e->{SWITCH_TYPE})) {
if (defined($e->{ALIGN})) {
$self->pidl("NDR_CHECK(ndr_push_union_align($ndr, $e->{ALIGN}));");
@@ -1917,7 +1969,7 @@ sub ParseUnionPushPrimitives($$$$)
}
if (! $have_default) {
$self->pidl("default:");
- $self->pidl("\treturn ndr_push_error($ndr, NDR_ERR_BAD_SWITCH, \"Bad switch value \%u at \%s\", level, __location__);");
+ $self->pidl("\treturn ndr_push_error($ndr, NDR_ERR_BAD_SWITCH, \"Bad switch value %\"PRIu32, level);");
}
$self->deindent;
$self->pidl("}");
@@ -1929,7 +1981,6 @@ sub ParseUnionPushDeferred($$$$)
my $have_default = 0;
- $self->pidl("uint32_t level = ndr_push_get_switch_value($ndr, $varname);");
if (defined($e->{PROPERTIES}{relative_base})) {
# retrieve the current offset as base for relative pointers
# based on the toplevel struct/union
@@ -1953,7 +2004,7 @@ sub ParseUnionPushDeferred($$$$)
}
if (! $have_default) {
$self->pidl("default:");
- $self->pidl("\treturn ndr_push_error($ndr, NDR_ERR_BAD_SWITCH, \"Bad switch value \%u at \%s\", level, __location__);");
+ $self->pidl("\treturn ndr_push_error($ndr, NDR_ERR_BAD_SWITCH, \"Bad switch value %\"PRIu32, level);");
}
$self->deindent;
$self->pidl("}");
@@ -1966,17 +2017,28 @@ sub ParseUnionPush($$$$)
my ($self,$e,$ndr,$varname) = @_;
my $have_default = 0;
+ $self->pidl("uint32_t level;");
$self->start_flags($e, $ndr);
$self->pidl("NDR_PUSH_CHECK_FLAGS(ndr, ndr_flags);");
$self->pidl("if (ndr_flags & NDR_SCALARS) {");
$self->indent;
+ $self->pidl("/* This token is not used again (except perhaps below in the NDR_BUFFERS case) */");
+ $self->pidl("NDR_CHECK(ndr_push_steal_switch_value($ndr, $varname, &level));");
+
$self->ParseUnionPushPrimitives($e, $ndr, $varname);
$self->deindent;
$self->pidl("}");
if (is_deferred_switch_non_empty($e)) {
$self->pidl("if (ndr_flags & NDR_BUFFERS) {");
$self->indent;
+ # In case we had ndr_flags of NDR_SCALERS|NDR_BUFFERS
+ $self->pidl("if (!(ndr_flags & NDR_SCALARS)) {");
+ $self->indent;
+ $self->pidl("/* We didn't get it above, and the token is not needed after this. */");
+ $self->pidl("NDR_CHECK(ndr_push_steal_switch_value($ndr, $varname, &level));");
+ $self->deindent;
+ $self->pidl("}");
$self->ParseUnionPushDeferred($e, $ndr, $varname);
$self->deindent;
$self->pidl("}");
@@ -1998,7 +2060,7 @@ sub ParseUnionPrint($$$$$)
$self->start_flags($e, $ndr);
- $self->pidl("level = ndr_print_get_switch_value($ndr, $varname);");
+ $self->pidl("level = ndr_print_steal_switch_value($ndr, $varname);");
$self->pidl("ndr_print_union($ndr, name, level, \"$name\");");
@@ -2038,9 +2100,17 @@ sub ParseUnionPullPrimitives($$$$$)
$self->pidl("NDR_CHECK(ndr_pull_union_align($ndr, $e->{ALIGN}));");
}
+ my $data_type = mapTypeName($switch_type);
+ my $fmt = mapTypeSpecifier($data_type);
+
+ if (!defined($fmt)) {
+ $data_type = "int";
+ $fmt = "%d";
+ }
+
$self->pidl("NDR_CHECK(ndr_pull_$switch_type($ndr, NDR_SCALARS, &_level));");
- $self->pidl("if (_level != level) {");
- $self->pidl("\treturn ndr_pull_error($ndr, NDR_ERR_BAD_SWITCH, \"Bad switch value %u for $varname at \%s\", _level, __location__);");
+ $self->pidl("if (_level != level) {");
+ $self->pidl("\treturn ndr_pull_error($ndr, NDR_ERR_BAD_SWITCH, \"Bad switch value %$fmt for $varname at \%s\", ($data_type)_level, __location__);");
$self->pidl("}");
}
@@ -2058,7 +2128,7 @@ sub ParseUnionPullPrimitives($$$$$)
foreach my $el (@{$e->{ELEMENTS}}) {
if ($el->{CASE} eq "default") {
$have_default = 1;
- }
+ }
$self->pidl("$el->{CASE}: {");
if ($el->{TYPE} ne "EMPTY") {
@@ -2077,7 +2147,7 @@ sub ParseUnionPullPrimitives($$$$$)
}
if (! $have_default) {
$self->pidl("default:");
- $self->pidl("\treturn ndr_pull_error($ndr, NDR_ERR_BAD_SWITCH, \"Bad switch value \%u at \%s\", level, __location__);");
+ $self->pidl("\treturn ndr_pull_error($ndr, NDR_ERR_BAD_SWITCH, \"Bad switch value %\"PRIu32\" at %s\", level, __location__);");
}
$self->deindent;
$self->pidl("}");
@@ -2088,21 +2158,21 @@ sub ParseUnionPullDeferred($$$$)
my ($self,$e,$ndr,$varname) = @_;
my $have_default = 0;
- if (defined($e->{PROPERTIES}{relative_base})) {
- # retrieve the current offset as base for relative pointers
- # based on the toplevel struct/union
- $self->pidl("NDR_CHECK(ndr_pull_setup_relative_base_offset2($ndr, $varname));");
- }
$self->pidl("switch (level) {");
$self->indent;
foreach my $el (@{$e->{ELEMENTS}}) {
if ($el->{CASE} eq "default") {
$have_default = 1;
- }
+ }
$self->pidl("$el->{CASE}:");
if ($el->{TYPE} ne "EMPTY") {
$self->indent;
+ if (defined($e->{PROPERTIES}{relative_base})) {
+ # retrieve the current offset as base for relative pointers
+ # based on the toplevel struct/union
+ $self->pidl("NDR_CHECK(ndr_pull_setup_relative_base_offset2($ndr, $varname));");
+ }
$self->ParseElementPull($el, $ndr, {$el->{NAME} => "$varname->$el->{NAME}"}, 0, 1);
$self->deindent;
}
@@ -2111,7 +2181,7 @@ sub ParseUnionPullDeferred($$$$)
}
if (! $have_default) {
$self->pidl("default:");
- $self->pidl("\treturn ndr_pull_error($ndr, NDR_ERR_BAD_SWITCH, \"Bad switch value \%u at \%s\", level, __location__);");
+ $self->pidl("\treturn ndr_pull_error($ndr, NDR_ERR_BAD_SWITCH, \"Bad switch value %\"PRIu32\" at %s\", level, __location__);");
}
$self->deindent;
$self->pidl("}");
@@ -2149,20 +2219,21 @@ sub ParseUnionPull($$$$)
$self->pidl("NDR_PULL_CHECK_FLAGS(ndr, ndr_flags);");
$self->pidl("if (ndr_flags & NDR_SCALARS) {");
$self->indent;
- if (! $needs_deferred_switch) {
- $self->pidl("/* This token is not used again */");
- $self->pidl("level = ndr_pull_steal_switch_value($ndr, $varname);");
- } else {
- $self->pidl("level = ndr_pull_get_switch_value($ndr, $varname);");
- }
+ $self->pidl("/* This token is not used again (except perhaps below in the NDR_BUFFERS case) */");
+ $self->pidl("NDR_CHECK(ndr_pull_steal_switch_value($ndr, $varname, &level));");
$self->ParseUnionPullPrimitives($e,$ndr,$varname,$switch_type);
$self->deindent;
$self->pidl("}");
if ($needs_deferred_switch) {
$self->pidl("if (ndr_flags & NDR_BUFFERS) {");
$self->indent;
- $self->pidl("/* The token is not needed after this. */");
- $self->pidl("level = ndr_pull_steal_switch_value($ndr, $varname);");
+ # In case we had ndr_flags of NDR_SCALERS|NDR_BUFFERS
+ $self->pidl("if (!(ndr_flags & NDR_SCALARS)) {");
+ $self->indent;
+ $self->pidl("/* We didn't get it above, and the token is not needed after this. */");
+ $self->pidl("NDR_CHECK(ndr_pull_steal_switch_value($ndr, $varname, &level));");
+ $self->deindent;
+ $self->pidl("}");
$self->ParseUnionPullDeferred($e,$ndr,$varname);
$self->deindent;
$self->pidl("}");
@@ -2175,13 +2246,16 @@ sub ParseUnionPull($$$$)
sub DeclUnion($$$$)
{
my ($e,$t,$name,$varname) = @_;
+ if ($t eq "base") {
+ return "union $name $varname";
+ }
return ($t ne "pull"?"const ":"") . "union $name *$varname";
}
sub ArgsUnionNdrSize($$)
{
my ($d,$name) = @_;
- return "const union $name *r, uint32_t level, int flags";
+ return "const union $name *r, uint32_t level, libndr_flags flags";
}
$typefamily{UNION} = {
@@ -2192,7 +2266,7 @@ $typefamily{UNION} = {
SIZE_FN_ARGS => \&ArgsUnionNdrSize,
SIZE_FN_BODY => \&ParseUnionNdrSize,
};
-
+
#####################################################################
# parse a typedef - push side
sub ParseTypedefPush($$$$)
@@ -2240,7 +2314,7 @@ sub ParseTypedefNdrSize($$$$)
sub DeclTypedef($$$$)
{
my ($e, $t, $name, $varname) = @_;
-
+
return $typefamily{$e->{DATA}->{TYPE}}->{DECL}->($e->{DATA}, $t, $name, $varname);
}
@@ -2273,7 +2347,7 @@ sub ParsePipePushChunk($$)
my $args = $typefamily{$struct->{TYPE}}->{DECL}->($struct, "push", $name, $varname);
- $self->fn_declare("push", $struct, "enum ndr_err_code ndr_push_$name(struct ndr_push *$ndr, int ndr_flags, $args)") or return;
+ $self->fn_declare("push", $struct, "enum ndr_err_code ndr_push_$name(struct ndr_push *$ndr, ndr_flags_type ndr_flags, $args)") or return;
return if has_property($t, "nopush");
@@ -2306,7 +2380,7 @@ sub ParsePipePullChunk($$)
my $args = $typefamily{$struct->{TYPE}}->{DECL}->($struct, "pull", $name, $varname);
- $self->fn_declare("pull", $struct, "enum ndr_err_code ndr_pull_$name(struct ndr_pull *$ndr, int ndr_flags, $args)") or return;
+ $self->fn_declare("pull", $struct, "enum ndr_err_code ndr_pull_$name(struct ndr_pull *$ndr, ndr_flags_type ndr_flags, $args)") or return;
return if has_property($struct, "nopull");
@@ -2359,11 +2433,11 @@ sub ParseFunctionPrint($$)
my($self, $fn) = @_;
my $ndr = "ndr";
- $self->pidl_hdr("void ndr_print_$fn->{NAME}(struct ndr_print *$ndr, const char *name, int flags, const struct $fn->{NAME} *r);");
+ $self->pidl_hdr("void ndr_print_$fn->{NAME}(struct ndr_print *$ndr, const char *name, ndr_flags_type flags, const struct $fn->{NAME} *r);");
return if has_property($fn, "noprint");
- $self->pidl("_PUBLIC_ void ndr_print_$fn->{NAME}(struct ndr_print *$ndr, const char *name, int flags, const struct $fn->{NAME} *r)");
+ $self->pidl("_PUBLIC_ void ndr_print_$fn->{NAME}(struct ndr_print *$ndr, const char *name, ndr_flags_type flags, const struct $fn->{NAME} *r)");
$self->pidl("{");
$self->indent;
@@ -2394,7 +2468,7 @@ sub ParseFunctionPrint($$)
$self->pidl("$ndr->depth--;");
$self->deindent;
$self->pidl("}");
-
+
$self->pidl("if (flags & NDR_OUT) {");
$self->indent;
$self->pidl("ndr_print_struct($ndr, \"out\", \"$fn->{NAME}\");");
@@ -2412,7 +2486,7 @@ sub ParseFunctionPrint($$)
$self->pidl("$ndr->depth--;");
$self->deindent;
$self->pidl("}");
-
+
$self->pidl("$ndr->depth--;");
$self->deindent;
$self->pidl("}");
@@ -2422,18 +2496,18 @@ sub ParseFunctionPrint($$)
#####################################################################
# parse a function
sub ParseFunctionPush($$)
-{
+{
my($self, $fn) = @_;
my $ndr = "ndr";
- $self->fn_declare("push", $fn, "enum ndr_err_code ndr_push_$fn->{NAME}(struct ndr_push *$ndr, int flags, const struct $fn->{NAME} *r)") or return;
+ $self->fn_declare("push", $fn, "enum ndr_err_code ndr_push_$fn->{NAME}(struct ndr_push *$ndr, ndr_flags_type flags, const struct $fn->{NAME} *r)") or return;
return if has_property($fn, "nopush");
$self->pidl("{");
$self->indent;
- foreach my $e (@{$fn->{ELEMENTS}}) {
+ foreach my $e (@{$fn->{ELEMENTS}}) {
$self->DeclareArrayVariables($e);
}
@@ -2482,7 +2556,7 @@ sub ParseFunctionPush($$)
if ($fn->{RETURN_TYPE}) {
$self->pidl("NDR_CHECK(ndr_push_$fn->{RETURN_TYPE}($ndr, NDR_SCALARS, r->out.result));");
}
-
+
$self->deindent;
$self->pidl("}");
$self->pidl("return NDR_ERR_SUCCESS;");
@@ -2496,8 +2570,8 @@ sub AllocateArrayLevel($$$$$$)
my ($self,$e,$l,$ndr,$var,$size) = @_;
my $pl = GetPrevLevel($e, $l);
- if (defined($pl) and
- $pl->{TYPE} eq "POINTER" and
+ if (defined($pl) and
+ $pl->{TYPE} eq "POINTER" and
$pl->{POINTER_TYPE} eq "ref"
and not $l->{IS_ZERO_TERMINATED}) {
$self->pidl("if ($ndr->flags & LIBNDR_FLAG_REF_ALLOC) {");
@@ -2516,18 +2590,18 @@ sub AllocateArrayLevel($$$$$$)
#####################################################################
# parse a function
sub ParseFunctionPull($$)
-{
+{
my($self,$fn) = @_;
my $ndr = "ndr";
# pull function args
- $self->fn_declare("pull", $fn, "enum ndr_err_code ndr_pull_$fn->{NAME}(struct ndr_pull *$ndr, int flags, struct $fn->{NAME} *r)") or return;
+ $self->fn_declare("pull", $fn, "enum ndr_err_code ndr_pull_$fn->{NAME}(struct ndr_pull *$ndr, ndr_flags_type flags, struct $fn->{NAME} *r)") or return;
$self->pidl("{");
$self->indent;
# declare any internal pointers we need
- foreach my $e (@{$fn->{ELEMENTS}}) {
+ foreach my $e (@{$fn->{ELEMENTS}}) {
$self->DeclarePtrVariables($e);
$self->DeclareArrayVariables($e, "pull");
}
@@ -2551,7 +2625,7 @@ sub ParseFunctionPull($$)
# out to be too tricky (tridge)
foreach my $e (@{$fn->{ELEMENTS}}) {
next unless grep(/out/, @{$e->{DIRECTION}});
- $self->pidl("ZERO_STRUCT(r->out);");
+ $self->pidl("NDR_ZERO_STRUCT(r->out);");
$self->pidl("");
last;
}
@@ -2568,12 +2642,12 @@ sub ParseFunctionPull($$)
foreach my $e (@{$fn->{ELEMENTS}}) {
next unless (grep(/out/, @{$e->{DIRECTION}}));
- next unless ($e->{LEVELS}[0]->{TYPE} eq "POINTER" and
+ next unless ($e->{LEVELS}[0]->{TYPE} eq "POINTER" and
$e->{LEVELS}[0]->{POINTER_TYPE} eq "ref");
- next if (($e->{LEVELS}[1]->{TYPE} eq "DATA") and
- ($e->{LEVELS}[1]->{DATA_TYPE} eq "string"));
+ next if (($e->{LEVELS}[1]->{TYPE} eq "DATA") and
+ (Parse::Pidl::Typelist::is_string_type($e->{LEVELS}[1]->{DATA_TYPE})));
next if ($e->{LEVELS}[1]->{TYPE} eq "PIPE");
- next if (($e->{LEVELS}[1]->{TYPE} eq "ARRAY")
+ next if (($e->{LEVELS}[1]->{TYPE} eq "ARRAY")
and $e->{LEVELS}[1]->{IS_ZERO_TERMINATED});
if ($e->{LEVELS}[1]->{TYPE} eq "ARRAY") {
@@ -2596,11 +2670,11 @@ sub ParseFunctionPull($$)
}
} else {
$self->pidl("NDR_PULL_ALLOC($ndr, r->out.$e->{NAME});");
-
+
if (grep(/in/, @{$e->{DIRECTION}})) {
$self->pidl("*r->out.$e->{NAME} = *r->in.$e->{NAME};");
} else {
- $self->pidl("ZERO_STRUCTP(r->out.$e->{NAME});");
+ $self->pidl("NDR_ZERO_STRUCTP(r->out.$e->{NAME});");
}
}
}
@@ -2608,10 +2682,35 @@ sub ParseFunctionPull($$)
$self->add_deferred();
$self->deindent;
$self->pidl("}");
-
+
$self->pidl("if (flags & NDR_OUT) {");
$self->indent;
+ $self->pidl("#ifdef FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION");
+
+ # This is for fuzzers of ndr_pull where the out elements refer to
+ # in elements in size_is or length_is.
+ #
+ # Not actually very harmful but also not useful outside a fuzzer
+ foreach my $e (@{$fn->{ELEMENTS}}) {
+ next unless (grep(/in/, @{$e->{DIRECTION}}));
+ next unless ($e->{LEVELS}[0]->{TYPE} eq "POINTER" and
+ $e->{LEVELS}[0]->{POINTER_TYPE} eq "ref");
+ next if (($e->{LEVELS}[1]->{TYPE} eq "DATA") and
+ (Parse::Pidl::Typelist::is_string_type($e->{LEVELS}[1]->{DATA_TYPE})));
+ next if ($e->{LEVELS}[1]->{TYPE} eq "PIPE");
+ next if ($e->{LEVELS}[1]->{TYPE} eq "ARRAY");
+
+ $self->pidl("if (r->in.$e->{NAME} == NULL) {");
+ $self->indent;
+ $self->pidl("NDR_PULL_ALLOC($ndr, r->in.$e->{NAME});");
+ $self->pidl("NDR_ZERO_STRUCTP(r->in.$e->{NAME});");
+ $self->deindent;
+ $self->pidl("}");
+ }
+
+ $self->pidl("#endif /* FUZZING_BUILD_MODE_UNSAFE_FOR_PRODUCTION */");
+
$env = GenerateFunctionOutEnv($fn);
foreach my $e (@{$fn->{ELEMENTS}}) {
next unless grep(/out/, @{$e->{DIRECTION}});
@@ -2676,7 +2775,7 @@ sub ParseGeneratePipeArray($$$)
$self->deindent;
$self->pidl("},");
}
- $self->pidl("{ NULL, NULL, 0, NULL, NULL, NULL }");
+ $self->pidl("{ .name = NULL }");
$self->deindent;
$self->pidl("};");
$self->pidl("");
@@ -2752,27 +2851,57 @@ sub FunctionCallEntry($$)
return 1;
}
+sub StructEntry($$)
+{
+ my ($self, $d) = @_;
+ my $type_decl = $typefamily{$d->{TYPE}}->{DECL}->($d, "base", $d->{NAME}, "");
+
+ $self->pidl("\t{");
+ $self->pidl("\t\t.name = \"$d->{NAME}\",");
+ $self->pidl("\t\t.struct_size = sizeof($type_decl),");
+ $self->pidl("\t\t.ndr_push = (ndr_push_flags_fn_t) ndr_push_$d->{NAME},");
+ $self->pidl("\t\t.ndr_pull = (ndr_pull_flags_fn_t) ndr_pull_$d->{NAME},");
+ $self->pidl("\t\t.ndr_print = (ndr_print_function_t) ndr_print_flags_$d->{NAME},");
+ $self->pidl("\t},");
+ return 1;
+}
+
#####################################################################
# produce a function call table
sub FunctionTable($$)
{
my($self,$interface) = @_;
my $count = 0;
+ my $count_public_structs = 0;
my $uname = uc $interface->{NAME};
- return if ($#{$interface->{FUNCTIONS}}+1 == 0);
- return unless defined ($interface->{PROPERTIES}->{uuid});
+ foreach my $d (@{$interface->{TYPES}}) {
+ next unless (is_public_struct($d));
+ $count_public_structs += 1;
+ }
+ return if ($#{$interface->{FUNCTIONS}}+1 == 0 and
+ $count_public_structs == 0);
foreach my $d (@{$interface->{INHERITED_FUNCTIONS}},@{$interface->{FUNCTIONS}}) {
$self->FunctionCallPipes($d);
}
+ $self->pidl("static const struct ndr_interface_public_struct $interface->{NAME}\_public_structs[] = {");
+
+ foreach my $d (@{$interface->{TYPES}}) {
+ next unless (is_public_struct($d));
+ $self->StructEntry($d);
+ }
+ $self->pidl("\t{ .name = NULL }");
+ $self->pidl("};");
+ $self->pidl("");
+
$self->pidl("static const struct ndr_interface_call $interface->{NAME}\_calls[] = {");
foreach my $d (@{$interface->{INHERITED_FUNCTIONS}},@{$interface->{FUNCTIONS}}) {
$count += $self->FunctionCallEntry($d);
}
- $self->pidl("\t{ NULL, 0, NULL, NULL, NULL }");
+ $self->pidl("\t{ .name = NULL }");
$self->pidl("};");
$self->pidl("");
@@ -2781,7 +2910,7 @@ sub FunctionTable($$)
$self->pidl("\t$ep, ");
}
my $endpoint_count = $#{$interface->{ENDPOINTS}}+1;
-
+
$self->pidl("};");
$self->pidl("");
@@ -2795,18 +2924,22 @@ sub FunctionTable($$)
$interface->{PROPERTIES}->{authservice} = "\"host\"";
}
- $self->AuthServiceStruct($interface->{NAME},
+ $self->AuthServiceStruct($interface->{NAME},
$interface->{PROPERTIES}->{authservice});
$self->pidl("\nconst struct ndr_interface_table ndr_table_$interface->{NAME} = {");
$self->pidl("\t.name\t\t= \"$interface->{NAME}\",");
- $self->pidl("\t.syntax_id\t= {");
- $self->pidl("\t\t" . print_uuid($interface->{UUID}) .",");
- $self->pidl("\t\tNDR_$uname\_VERSION");
- $self->pidl("\t},");
- $self->pidl("\t.helpstring\t= NDR_$uname\_HELPSTRING,");
+ if (defined $interface->{PROPERTIES}->{uuid}) {
+ $self->pidl("\t.syntax_id\t= {");
+ $self->pidl("\t\t" . print_uuid($interface->{UUID}) .",");
+ $self->pidl("\t\tNDR_$uname\_VERSION");
+ $self->pidl("\t},");
+ $self->pidl("\t.helpstring\t= NDR_$uname\_HELPSTRING,");
+ }
$self->pidl("\t.num_calls\t= $count,");
$self->pidl("\t.calls\t\t= $interface->{NAME}\_calls,");
+ $self->pidl("\t.num_public_structs\t= $count_public_structs,");
+ $self->pidl("\t.public_structs\t\t= $interface->{NAME}\_public_structs,");
$self->pidl("\t.endpoints\t= &$interface->{NAME}\_endpoints,");
$self->pidl("\t.authservices\t= &$interface->{NAME}\_authservices");
$self->pidl("};");
@@ -2840,7 +2973,7 @@ sub HeaderInclude
#####################################################################
# generate prototypes and defines for the interface definitions
-# FIXME: these prototypes are for the DCE/RPC client functions, not the
+# FIXME: these prototypes are for the DCE/RPC client functions, not the
# NDR parser and so do not belong here, technically speaking
sub HeaderInterface($$$)
{
@@ -2862,7 +2995,7 @@ sub HeaderInterface($$$)
if (defined $interface->{PROPERTIES}->{uuid}) {
my $name = uc $interface->{NAME};
- $self->pidl_hdr("#define NDR_$name\_UUID " .
+ $self->pidl_hdr("#define NDR_$name\_UUID " .
Parse::Pidl::Util::make_str(lc($interface->{UUID})));
$self->pidl_hdr("#define NDR_$name\_VERSION $interface->{VERSION}");
@@ -2871,7 +3004,15 @@ sub HeaderInterface($$$)
if(!defined $interface->{PROPERTIES}->{helpstring}) { $interface->{PROPERTIES}->{helpstring} = "NULL"; }
$self->pidl_hdr("#define NDR_$name\_HELPSTRING $interface->{PROPERTIES}->{helpstring}");
+ }
+ my $count_public_structs = 0;
+ foreach my $d (@{$interface->{TYPES}}) {
+ next unless (has_property($d, "public"));
+ $count_public_structs += 1;
+ }
+ if ($#{$interface->{FUNCTIONS}}+1 > 0 or
+ $count_public_structs > 0) {
$self->pidl_hdr("extern const struct ndr_interface_table ndr_table_$interface->{NAME};");
}
@@ -2879,12 +3020,12 @@ sub HeaderInterface($$$)
next if has_property($_, "noopnum");
next if grep(/^$_->{NAME}$/,@{$interface->{INHERITED_FUNCTIONS}});
my $u_name = uc $_->{NAME};
-
+
my $val = sprintf("0x%02x", $count);
if (defined($interface->{BASE})) {
$val .= " + NDR_" . uc $interface->{BASE} . "_CALL_COUNT";
}
-
+
$self->pidl_hdr("#define NDR_$u_name ($val)");
$self->pidl_hdr("");
@@ -2919,7 +3060,7 @@ sub ParseTypePushFunction($$$)
my $args = $typefamily{$e->{TYPE}}->{DECL}->($e, "push", $e->{NAME}, $varname);
- $self->fn_declare("push", $e, "enum ndr_err_code ".TypeFunctionName("ndr_push", $e)."(struct ndr_push *$ndr, int ndr_flags, $args)") or return;
+ $self->fn_declare("push", $e, "enum ndr_err_code ".TypeFunctionName("ndr_push", $e)."(struct ndr_push *$ndr, ndr_flags_type ndr_flags, $args)") or return;
$self->pidl("{");
$self->indent;
@@ -2948,7 +3089,7 @@ sub ParseTypePullFunction($$)
my $args = $typefamily{$e->{TYPE}}->{DECL}->($e, "pull", $e->{NAME}, $varname);
- $self->fn_declare("pull", $e, "enum ndr_err_code ".TypeFunctionName("ndr_pull", $e)."(struct ndr_pull *$ndr, int ndr_flags, $args)") or return;
+ $self->fn_declare("pull", $e, "enum ndr_err_code ".TypeFunctionName("ndr_pull", $e)."(struct ndr_pull *$ndr, ndr_flags_type ndr_flags, $args)") or return;
$self->pidl("{");
$self->indent;
@@ -2975,6 +3116,18 @@ sub ParseTypePrintFunction($$$)
$self->pidl_hdr("void ".TypeFunctionName("ndr_print", $e)."(struct ndr_print *ndr, const char *name, $args);");
+ if (is_public_struct($e)) {
+ $self->pidl("static void ".TypeFunctionName("ndr_print_flags", $e).
+ "(struct ndr_print *$ndr, const char *name, ndr_flags_type unused, $args)"
+ );
+ $self->pidl("{");
+ $self->indent;
+ $self->pidl(TypeFunctionName("ndr_print", $e)."($ndr, name, $varname);");
+ $self->deindent;
+ $self->pidl("}");
+ $self->pidl("");
+ }
+
return if (has_property($e, "noprint"));
$self->pidl("_PUBLIC_ void ".TypeFunctionName("ndr_print", $e)."(struct ndr_print *$ndr, const char *name, $args)");
@@ -3040,8 +3193,8 @@ sub ParseInterface($$$)
($needed->{TypeFunctionName("ndr_print", $d)}) && $self->ParseTypePrintFunction($d, "r");
# Make sure we don't generate a function twice...
- $needed->{TypeFunctionName("ndr_push", $d)} =
- $needed->{TypeFunctionName("ndr_pull", $d)} =
+ $needed->{TypeFunctionName("ndr_push", $d)} =
+ $needed->{TypeFunctionName("ndr_pull", $d)} =
$needed->{TypeFunctionName("ndr_print", $d)} = 0;
($needed->{"ndr_size_$d->{NAME}"}) && $self->ParseTypeNdrSize($d);
@@ -3054,7 +3207,16 @@ sub ParseInterface($$$)
($needed->{"ndr_print_$d->{NAME}"}) && $self->ParseFunctionPrint($d);
}
+ # Allow compilation of generated files where replacement functions
+ # for structures declared nopull/nopush have not been provided.
+ #
+ # This makes sense when only the print functions are used
+ #
+ # Otherwise the ndr_table XXX will reference these
+
+ $self->pidl("#ifndef SKIP_NDR_TABLE_$interface->{NAME}");
$self->FunctionTable($interface);
+ $self->pidl("#endif /* SKIP_NDR_TABLE_$interface->{NAME} */");
$self->pidl_hdr("#endif /* _HEADER_NDR_$interface->{NAME} */");
}
@@ -3116,7 +3278,7 @@ sub NeededElement($$$)
return if ($e->{TYPE} eq "EMPTY");
- return if (ref($e->{TYPE}) eq "HASH" and
+ return if (ref($e->{TYPE}) eq "HASH" and
not defined($e->{TYPE}->{NAME}));
my ($t, $rt);
@@ -3180,7 +3342,7 @@ sub NeededType($$$)
return unless defined($t->{ELEMENTS});
for my $e (@{$t->{ELEMENTS}}) {
$e->{PARENT} = $t;
- if (has_property($e, "compression")) {
+ if (has_property($e, "compression")) {
$needed->{"compression"} = 1;
}
NeededElement($e, $req, $needed);
@@ -3198,7 +3360,7 @@ sub NeededInterface($$)
foreach (reverse @{$interface->{TYPES}}) {
if (has_property($_, "public")) {
- $needed->{TypeFunctionName("ndr_pull", $_)} = $needed->{TypeFunctionName("ndr_push", $_)} =
+ $needed->{TypeFunctionName("ndr_pull", $_)} = $needed->{TypeFunctionName("ndr_push", $_)} =
$needed->{TypeFunctionName("ndr_print", $_)} = 1;
}
@@ -3215,7 +3377,7 @@ sub TypeFunctionName($$)
{
my ($prefix, $t) = @_;
- return "$prefix\_$t->{NAME}" if (ref($t) eq "HASH" and
+ return "$prefix\_$t->{NAME}" if (ref($t) eq "HASH" and
$t->{TYPE} eq "TYPEDEF");
return "$prefix\_$t->{TYPE}_$t->{NAME}" if (ref($t) eq "HASH");
return "$prefix\_$t";
diff --git a/tools/pidl/lib/Parse/Pidl/Samba4/NDR/Server.pm b/tools/pidl/lib/Parse/Pidl/Samba4/NDR/Server.pm
index ad36f000..73359987 100644
--- a/tools/pidl/lib/Parse/Pidl/Samba4/NDR/Server.pm
+++ b/tools/pidl/lib/Parse/Pidl/Samba4/NDR/Server.pm
@@ -7,6 +7,7 @@
package Parse::Pidl::Samba4::NDR::Server;
use strict;
+use warnings;
use Parse::Pidl::Util;
use vars qw($VERSION);
@@ -81,10 +82,10 @@ sub Boilerplate_Iface($)
my $if_version = $interface->{VERSION};
pidl "
-static NTSTATUS $name\__op_bind(struct dcesrv_call_state *dce_call, const struct dcesrv_interface *iface, uint32_t if_version)
+static NTSTATUS $name\__op_bind(struct dcesrv_connection_context *context, const struct dcesrv_interface *iface)
{
#ifdef DCESRV_INTERFACE_$uname\_BIND
- return DCESRV_INTERFACE_$uname\_BIND(dce_call,iface);
+ return DCESRV_INTERFACE_$uname\_BIND(context,iface);
#else
return NT_STATUS_OK;
#endif
@@ -120,9 +121,6 @@ static NTSTATUS $name\__op_ndr_pull(struct dcesrv_call_state *dce_call, TALLOC_C
/* unravel the NDR for the packet */
ndr_err = ndr_table_$name.calls[opnum].ndr_pull(pull, NDR_IN, *r);
if (!NDR_ERR_CODE_IS_SUCCESS(ndr_err)) {
- dcerpc_log_packet(dce_call->conn->packet_log_dir,
- &ndr_table_$name, opnum, NDR_IN,
- &dce_call->pkt.u.request.stub_and_verifier);
dce_call->fault_code = DCERPC_FAULT_NDR;
return NT_STATUS_NET_WRITE_FAULT;
}
@@ -145,9 +143,6 @@ pidl "
}
if (dce_call->fault_code != 0) {
- dcerpc_log_packet(dce_call->conn->packet_log_dir,
- &ndr_table_$name, opnum, NDR_IN,
- &dce_call->pkt.u.request.stub_and_verifier);
return NT_STATUS_NET_WRITE_FAULT;
}
@@ -169,9 +164,6 @@ pidl "
}
if (dce_call->fault_code != 0) {
- dcerpc_log_packet(dce_call->conn->packet_log_dir,
- &ndr_table_$name, opnum, NDR_IN,
- &dce_call->pkt.u.request.stub_and_verifier);
return NT_STATUS_NET_WRITE_FAULT;
}
@@ -201,6 +193,7 @@ static const struct dcesrv_interface dcesrv\_$name\_interface = {
.dispatch = $name\__op_dispatch,
.reply = $name\__op_reply,
.ndr_push = $name\__op_ndr_push,
+ .local = NULL,
#ifdef DCESRV_INTERFACE_$uname\_FLAGS
.flags = DCESRV_INTERFACE_$uname\_FLAGS
#else
@@ -223,12 +216,22 @@ sub Boilerplate_Ep_Server($)
static NTSTATUS $name\__op_init_server(struct dcesrv_context *dce_ctx, const struct dcesrv_endpoint_server *ep_server)
{
int i;
+#ifdef DCESRV_INTERFACE_$uname\_NCACN_NP_SECONDARY_ENDPOINT
+ const char *ncacn_np_secondary_endpoint =
+ DCESRV_INTERFACE_$uname\_NCACN_NP_SECONDARY_ENDPOINT;
+#else
+ const char *ncacn_np_secondary_endpoint = NULL;
+#endif
for (i=0;i<ndr_table_$name.endpoints->count;i++) {
NTSTATUS ret;
const char *name = ndr_table_$name.endpoints->names[i];
- ret = dcesrv_interface_register(dce_ctx, name, &dcesrv_$name\_interface, NULL);
+ ret = dcesrv_interface_register(dce_ctx,
+ name,
+ ncacn_np_secondary_endpoint,
+ &dcesrv_$name\_interface,
+ NULL);
if (!NT_STATUS_IS_OK(ret)) {
DEBUG(1,(\"$name\_op_init_server: failed to register endpoint \'%s\'\\n\",name));
return ret;
@@ -238,6 +241,11 @@ static NTSTATUS $name\__op_init_server(struct dcesrv_context *dce_ctx, const str
return NT_STATUS_OK;
}
+static NTSTATUS $name\__op_shutdown_server(struct dcesrv_context *dce_ctx, const struct dcesrv_endpoint_server *ep_server)
+{
+ return NT_STATUS_OK;
+}
+
static bool $name\__op_interface_by_uuid(struct dcesrv_interface *iface, const struct GUID *uuid, uint32_t if_version)
{
if (dcesrv_$name\_interface.syntax_id.if_version == if_version &&
@@ -266,12 +274,20 @@ NTSTATUS dcerpc_server_$name\_init(TALLOC_CTX *ctx)
/* fill in our name */
.name = \"$name\",
+ /* Initialization flag */
+ .initialized = false,
+
/* fill in all the operations */
#ifdef DCESRV_INTERFACE_$uname\_INIT_SERVER
.init_server = DCESRV_INTERFACE_$uname\_INIT_SERVER,
#else
.init_server = $name\__op_init_server,
#endif
+#ifdef DCESRV_INTERFACE_$uname\_SHUTDOWN_SERVER
+ .shutdown_server = DCESRV_INTERFACE_$uname\_SHUTDOWN_SERVER,
+#else
+ .shutdown_server = $name\__op_shutdown_server,
+#endif
.interface_by_uuid = $name\__op_interface_by_uuid,
.interface_by_name = $name\__op_interface_by_name
};
diff --git a/tools/pidl/lib/Parse/Pidl/Samba4/NDR/ServerCompat.pm b/tools/pidl/lib/Parse/Pidl/Samba4/NDR/ServerCompat.pm
new file mode 100644
index 00000000..aaa10ffd
--- /dev/null
+++ b/tools/pidl/lib/Parse/Pidl/Samba4/NDR/ServerCompat.pm
@@ -0,0 +1,624 @@
+###################################################
+# server boilerplate generator
+# Copyright tridge@samba.org 2003
+# Copyright metze@samba.org 2004
+# Copyright scabrero@samba.org 2019
+# released under the GNU GPL
+
+package Parse::Pidl::Samba4::NDR::ServerCompat;
+
+use Exporter;
+@ISA = qw(Exporter);
+@EXPORT_OK = qw(Parse);
+
+use Parse::Pidl::Util qw(print_uuid has_property ParseExpr);
+use Parse::Pidl::Typelist qw(mapTypeName);
+use Parse::Pidl qw(error fatal);
+use Parse::Pidl::NDR qw(ContainsPipe GetNextLevel);
+use Parse::Pidl::Samba4 qw(ElementStars);
+use Parse::Pidl::Samba4::Header qw(GenerateFunctionOutEnv);
+
+use vars qw($VERSION);
+$VERSION = '1.0';
+
+use strict;
+
+sub indent($) { my ($self) = @_; $self->{tabs}.="\t"; }
+sub deindent($) { my ($self) = @_; $self->{tabs} = substr($self->{tabs}, 1); }
+sub pidl($$) { my ($self,$txt) = @_; $self->{res} .= $txt ? "$self->{tabs}$txt\n" : "\n"; }
+sub pidlnoindent($$) { my ($self,$txt) = @_; $self->{res} .= $txt ? "$txt\n" : "\n"; }
+sub pidl_hdr($$) { my ($self, $txt) = @_; $self->{res_hdr} .= "$txt\n"; }
+sub pidl_both($$) { my ($self, $txt) = @_; $self->{hdr} .= "$txt\n"; $self->{res_hdr} .= "$txt\n"; }
+
+sub new($)
+{
+ my ($class) = shift;
+ my $self = { res => "", res_hdr => "", tabs => "" };
+ bless($self, $class);
+}
+
+sub decl_level($$)
+{
+ my ($self, $e, $l) = @_;
+ my $res = "";
+
+ if (has_property($e, "charset")) {
+ $res .= "const char";
+ } else {
+ $res .= mapTypeName($e->{TYPE});
+ }
+
+ my $stars = ElementStars($e, $l);
+
+ $res .= " ".$stars unless ($stars eq "");
+
+ return $res;
+}
+
+sub alloc_out_var($$$$$)
+{
+ my ($self, $e, $mem_ctx, $name, $env, $alloc_error_block) = @_;
+
+ my $l = $e->{LEVELS}[0];
+
+ # we skip pointer to arrays
+ if ($l->{TYPE} eq "POINTER") {
+ my $nl = GetNextLevel($e, $l);
+ $l = $nl if ($nl->{TYPE} eq "ARRAY");
+ } elsif
+
+ # we don't support multi-dimensional arrays yet
+ ($l->{TYPE} eq "ARRAY") {
+ my $nl = GetNextLevel($e, $l);
+ if ($nl->{TYPE} eq "ARRAY") {
+ fatal($e->{ORIGINAL},"multi-dimensional [out] arrays are not supported!");
+ }
+ } else {
+ # neither pointer nor array, no need to alloc something.
+ return;
+ }
+
+ if ($l->{TYPE} eq "ARRAY") {
+ unless(defined($l->{SIZE_IS})) {
+ error($e->{ORIGINAL}, "No size known for array `$e->{NAME}'");
+ $self->pidl("#error No size known for array `$e->{NAME}'");
+ } else {
+ my $size = ParseExpr($l->{SIZE_IS}, $env, $e);
+ $self->pidl("$name = talloc_zero_array($mem_ctx, " . $self->decl_level($e, 1) . ", $size);");
+ }
+ } else {
+ $self->pidl("$name = talloc_zero($mem_ctx, " . $self->decl_level($e, 1) . ");");
+ }
+
+ $self->pidl("if ($name == NULL) {");
+ $self->indent();
+ foreach (@{$alloc_error_block}) {
+ $self->pidl($_);
+ }
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+}
+
+sub gen_fn_out($$)
+{
+ my ($self, $fn, $alloc_error_block) = @_;
+
+ my $hasout = 0;
+ foreach (@{$fn->{ELEMENTS}}) {
+ if (grep(/out/, @{$_->{DIRECTION}})) {
+ $hasout = 1;
+ }
+ }
+
+ if ($hasout) {
+ $self->pidl("NDR_ZERO_STRUCT(r2->out);");
+ }
+
+ foreach (@{$fn->{ELEMENTS}}) {
+ my @dir = @{$_->{DIRECTION}};
+ if (grep(/in/, @dir) and grep(/out/, @dir)) {
+ $self->pidl("r2->out.$_->{NAME} = r2->in.$_->{NAME};");
+ }
+ }
+
+ foreach (@{$fn->{ELEMENTS}}) {
+ next if ContainsPipe($_, $_->{LEVELS}[0]);
+
+ my @dir = @{$_->{DIRECTION}};
+
+ if (grep(/in/, @dir) and grep(/out/, @dir)) {
+ # noop
+ } elsif (grep(/out/, @dir) and not has_property($_, "represent_as")) {
+ my $env = GenerateFunctionOutEnv($fn, "r2->");
+ $self->alloc_out_var($_, "r2", "r2->out.$_->{NAME}", $env, $alloc_error_block);
+ }
+
+ }
+}
+
+#####################################################
+# generate the switch statement for function dispatch
+sub gen_dispatch_switch($)
+{
+ my ($self, $interface) = @_;
+
+ my @alloc_error_block = ("status = NT_STATUS_NO_MEMORY;",
+ "p->fault_state = DCERPC_FAULT_CANT_PERFORM;",
+ "goto fail;");
+ foreach my $fn (@{$interface->{FUNCTIONS}}) {
+ next if not defined($fn->{OPNUM});
+
+ my $fname = $fn->{NAME};
+ my $ufname = uc($fname);
+
+ $self->pidl("case $fn->{OPNUM}: { /* $fn->{NAME} */");
+ $self->indent();
+ $self->pidl("struct $fname *r2 = (struct $fname *)r;");
+ $self->pidl("if (DEBUGLEVEL >= 10) {");
+ $self->indent();
+ $self->pidl("NDR_PRINT_FUNCTION_DEBUG($fname, NDR_IN, r2);");
+ $self->deindent();
+ $self->pidl("}");
+
+ $self->gen_fn_out($fn, \@alloc_error_block);
+
+ $self->pidl_hdr("struct $fname;");
+
+ if ($fn->{RETURN_TYPE} && $fn->{RETURN_TYPE} ne "void") {
+ $self->pidl_hdr(mapTypeName($fn->{RETURN_TYPE}) . " _$fname(struct pipes_struct *p, struct $fname *r);");
+ $self->pidl("r2->out.result = _$fname(p, r2);");
+ } else {
+ $self->pidl_hdr("void _$fname(struct pipes_struct *p, struct $fname *r);");
+ $self->pidl("_$fname(p, r2);");
+ }
+
+ $self->pidl("break;");
+ $self->deindent();
+ $self->pidl("}");
+ }
+}
+
+#####################################################
+# generate the switch statement for function reply
+sub gen_reply_switch($)
+{
+ my ($self, $interface) = @_;
+
+ foreach my $fn (@{$interface->{FUNCTIONS}}) {
+ next if not defined($fn->{OPNUM});
+
+ $self->pidl("case $fn->{OPNUM}: { /* $fn->{NAME} */");
+ $self->indent();
+ $self->pidl("struct $fn->{NAME} *r2 = (struct $fn->{NAME} *)r;");
+ $self->pidl("if (dce_call->state_flags & DCESRV_CALL_STATE_FLAG_ASYNC) {");
+ $self->indent();
+ $self->pidl("DEBUG(5,(\"function $fn->{NAME} replied async\\n\"));");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("if (DEBUGLEVEL >= 10 && dce_call->fault_code == 0) {");
+ $self->indent();
+ $self->pidl("NDR_PRINT_FUNCTION_DEBUG($fn->{NAME}, NDR_OUT | NDR_SET_VALUES, r2);");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("if (dce_call->fault_code != 0) {");
+ $self->indent();
+ $self->pidl("DBG_WARNING(\"dcerpc_fault %s in $fn->{NAME}\\n\", dcerpc_errstr(mem_ctx, dce_call->fault_code));");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("break;");
+ $self->deindent();
+ $self->pidl("}");
+ }
+}
+
+#####################################################################
+# produce boilerplate code for a interface
+sub boilerplate_iface($)
+{
+ my ($self, $interface) = @_;
+
+ my $name = $interface->{NAME};
+ my $uname = uc $name;
+ my $uuid = lc($interface->{UUID});
+ my $if_version = $interface->{VERSION};
+
+ $self->pidl("static NTSTATUS $name\__op_bind(struct dcesrv_connection_context *context, const struct dcesrv_interface *iface)");
+ $self->pidl("{");
+ $self->indent();
+ $self->pidlnoindent("#ifdef DCESRV_INTERFACE_$uname\_BIND");
+ $self->pidl("return DCESRV_INTERFACE_$uname\_BIND(context,iface);");
+ $self->pidlnoindent("#else");
+ $self->pidl("return NT_STATUS_OK;");
+ $self->deindent();
+ $self->pidl("#endif");
+ $self->pidl("}");
+ $self->pidl("");
+
+ $self->pidl("static void $name\__op_unbind(struct dcesrv_connection_context *context, const struct dcesrv_interface *iface)");
+ $self->pidl("{");
+ $self->pidlnoindent("#ifdef DCESRV_INTERFACE_$uname\_UNBIND");
+ $self->indent();
+ $self->pidl("DCESRV_INTERFACE_$uname\_UNBIND(context, iface);");
+ $self->pidlnoindent("#else");
+ $self->pidl("return;");
+ $self->pidlnoindent("#endif");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+
+ $self->pidl_hdr("NTSTATUS $name\__op_ndr_pull(struct dcesrv_call_state *dce_call, TALLOC_CTX *mem_ctx, struct ndr_pull *pull, void **r);");
+ $self->pidl("NTSTATUS $name\__op_ndr_pull(struct dcesrv_call_state *dce_call, TALLOC_CTX *mem_ctx, struct ndr_pull *pull, void **r)");
+ $self->pidl("{");
+ $self->indent();
+ $self->pidl("enum ndr_err_code ndr_err;");
+ $self->pidl("uint16_t opnum = dce_call->pkt.u.request.opnum;");
+ $self->pidl("");
+ $self->pidl("dce_call->fault_code = 0;");
+ $self->pidl("");
+ $self->pidl("if (opnum >= ndr_table_$name.num_calls) {");
+ $self->indent();
+ $self->pidl("dce_call->fault_code = DCERPC_FAULT_OP_RNG_ERROR;");
+ $self->pidl("return NT_STATUS_NET_WRITE_FAULT;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+ $self->pidl("*r = talloc_named(mem_ctx, ndr_table_$name.calls[opnum].struct_size, \"struct %s\", ndr_table_$name.calls[opnum].name);");
+ $self->pidl("NT_STATUS_HAVE_NO_MEMORY(*r);");
+ $self->pidl("");
+ $self->pidl("/* unravel the NDR for the packet */");
+ $self->pidl("ndr_err = ndr_table_$name.calls[opnum].ndr_pull(pull, NDR_IN, *r);");
+ $self->pidl("if (!NDR_ERR_CODE_IS_SUCCESS(ndr_err)) {");
+ $self->indent();
+ $self->pidl("dce_call->fault_code = DCERPC_FAULT_NDR;");
+ $self->pidl("return NT_STATUS_NET_WRITE_FAULT;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+ $self->pidl("return NT_STATUS_OK;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+
+ $self->pidl("static NTSTATUS $name\__op_dispatch_internal(struct dcesrv_call_state *dce_call, TALLOC_CTX *mem_ctx, void *r, enum s3compat_rpc_dispatch dispatch)");
+ $self->pidl("{");
+ $self->indent();
+ $self->pidl("uint16_t opnum = dce_call->pkt.u.request.opnum;");
+ $self->pidl("struct pipes_struct *p = NULL;");
+ $self->pidl("NTSTATUS status = NT_STATUS_OK;");
+ $self->pidl("bool impersonated = false;");
+ $self->pidl("");
+ $self->pidl("/* Retrieve pipes struct */");
+ $self->pidl("p = dcesrv_get_pipes_struct(dce_call->conn);");
+ $self->pidl("p->dce_call = dce_call;");
+ $self->pidl("p->mem_ctx = mem_ctx;");
+ $self->pidl("/* Reset pipes struct fault state */");
+ $self->pidl("p->fault_state = 0;");
+ $self->pidl("");
+
+ $self->pidl("/* Impersonate */");
+ $self->pidl("if (dispatch == S3COMPAT_RPC_DISPATCH_EXTERNAL) {");
+ $self->indent();
+ $self->pidl("impersonated = become_authenticated_pipe_user(dce_call->auth_state->session_info);");
+ $self->pidl("if (!impersonated) {");
+ $self->indent();
+ $self->pidl("dce_call->fault_code = DCERPC_FAULT_ACCESS_DENIED;");
+ $self->pidl("status = NT_STATUS_NET_WRITE_FAULT;");
+ $self->pidl("goto fail;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+
+ $self->pidl("switch (opnum) {");
+ $self->gen_dispatch_switch($interface);
+ $self->pidl("default:");
+ $self->indent();
+ $self->pidl("dce_call->fault_code = DCERPC_FAULT_OP_RNG_ERROR;");
+ $self->pidl("break;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+
+ $self->pidlnoindent("fail:");
+ $self->pidl("/* Unimpersonate */");
+ $self->pidl("if (impersonated) {");
+ $self->indent();
+ $self->pidl("unbecome_authenticated_pipe_user();");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+
+ $self->pidl("p->dce_call = NULL;");
+ $self->pidl("p->mem_ctx = NULL;");
+ $self->pidl("/* Check pipes struct fault state */");
+ $self->pidl("if (p->fault_state != 0) {");
+ $self->indent();
+ $self->pidl("dce_call->fault_code = p->fault_state;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("if (dce_call->fault_code != 0) {");
+ $self->indent();
+ $self->pidl("status = NT_STATUS_NET_WRITE_FAULT;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+
+ $self->pidl("return status;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+
+ $self->pidl_hdr("NTSTATUS $name\__op_dispatch(struct dcesrv_call_state *dce_call, TALLOC_CTX *mem_ctx, void *r);");
+ $self->pidl("NTSTATUS $name\__op_dispatch(struct dcesrv_call_state *dce_call, TALLOC_CTX *mem_ctx, void *r)");
+ $self->pidl("{");
+ $self->indent();
+ $self->pidl("return $name\__op_dispatch_internal(dce_call, mem_ctx, r, S3COMPAT_RPC_DISPATCH_EXTERNAL);");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+
+ $self->pidl_hdr("NTSTATUS $name\__op_reply(struct dcesrv_call_state *dce_call, TALLOC_CTX *mem_ctx, void *r);");
+ $self->pidl("NTSTATUS $name\__op_reply(struct dcesrv_call_state *dce_call, TALLOC_CTX *mem_ctx, void *r)");
+ $self->pidl("{");
+ $self->indent();
+ $self->pidl("uint16_t opnum = dce_call->pkt.u.request.opnum;");
+ $self->pidl("");
+ $self->pidl("switch (opnum) {");
+ $self->gen_reply_switch($interface);
+ $self->pidl("default:");
+ $self->indent();
+ $self->pidl("dce_call->fault_code = DCERPC_FAULT_OP_RNG_ERROR;");
+ $self->pidl("break;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+
+ $self->pidl("if (dce_call->fault_code != 0) {");
+ $self->indent();
+ $self->pidl("return NT_STATUS_NET_WRITE_FAULT;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+ $self->pidl("return NT_STATUS_OK;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+
+ $self->pidl_hdr("NTSTATUS $name\__op_ndr_push(struct dcesrv_call_state *dce_call, TALLOC_CTX *mem_ctx, struct ndr_push *push, const void *r);");
+ $self->pidl("NTSTATUS $name\__op_ndr_push(struct dcesrv_call_state *dce_call, TALLOC_CTX *mem_ctx, struct ndr_push *push, const void *r)");
+ $self->pidl("{");
+ $self->indent();
+ $self->pidl("enum ndr_err_code ndr_err;");
+ $self->pidl("uint16_t opnum = dce_call->pkt.u.request.opnum;");
+ $self->pidl("");
+ $self->pidl("ndr_err = ndr_table_$name.calls[opnum].ndr_push(push, NDR_OUT, r);");
+ $self->pidl("if (!NDR_ERR_CODE_IS_SUCCESS(ndr_err)) {");
+ $self->indent();
+ $self->pidl("dce_call->fault_code = DCERPC_FAULT_NDR;");
+ $self->pidl("return NT_STATUS_NET_WRITE_FAULT;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+ $self->pidl("return NT_STATUS_OK;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+
+ ##############################
+ #### LOCAL DISPATCH ####
+ ##############################
+ $self->pidl_hdr("NTSTATUS $name\__op_local(struct dcesrv_call_state *dce_call, TALLOC_CTX *mem_ctx, void *r);");
+ $self->pidl("NTSTATUS $name\__op_local(struct dcesrv_call_state *dce_call, TALLOC_CTX *mem_ctx, void *r)");
+ $self->pidl("{");
+ $self->indent();
+ $self->pidl("return $name\__op_dispatch_internal(dce_call, mem_ctx, r, S3COMPAT_RPC_DISPATCH_INTERNAL);");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+
+ $self->pidl("static const struct dcesrv_interface dcesrv\_$name\_interface = {");
+ $self->indent();
+ $self->pidl(".name = \"$name\",");
+ $self->pidl(".syntax_id = {".print_uuid($uuid).",$if_version},");
+ $self->pidl(".bind = $name\__op_bind,");
+ $self->pidl(".unbind = $name\__op_unbind,");
+ $self->pidl(".ndr_pull = $name\__op_ndr_pull,");
+ $self->pidl(".dispatch = $name\__op_dispatch,");
+ $self->pidl(".reply = $name\__op_reply,");
+ $self->pidl(".ndr_push = $name\__op_ndr_push,");
+ $self->pidl(".local = $name\__op_local,");
+ $self->pidlnoindent("#ifdef DCESRV_INTERFACE_$uname\_FLAGS");
+ $self->pidl(".flags = DCESRV_INTERFACE_$uname\_FLAGS");
+ $self->pidlnoindent("#else");
+ $self->pidl(".flags = 0");
+ $self->pidlnoindent("#endif");
+ $self->deindent();
+ $self->pidl("};");
+ $self->pidl("");
+}
+
+#####################################################################
+# produce boilerplate code for an endpoint server
+sub boilerplate_ep_server($)
+{
+ my ($self, $interface) = @_;
+ my $name = $interface->{NAME};
+ my $uname = uc $name;
+
+ $self->pidl("static NTSTATUS $name\__op_init_server(struct dcesrv_context *dce_ctx, const struct dcesrv_endpoint_server *ep_server)");
+ $self->pidl("{");
+ $self->indent();
+ $self->pidl("uint32_t i;");
+ $self->pidl("NTSTATUS ret;");
+ $self->pidl("");
+ $self->pidlnoindent("#ifdef DCESRV_INTERFACE_$uname\_NCACN_NP_SECONDARY_ENDPOINT");
+ $self->pidl("const char *ncacn_np_secondary_endpoint = DCESRV_INTERFACE_$uname\_NCACN_NP_SECONDARY_ENDPOINT;");
+ $self->pidlnoindent("#else");
+ $self->pidl("const char *ncacn_np_secondary_endpoint = NULL;");
+ $self->pidlnoindent("#endif");
+ $self->pidl("");
+ $self->pidl("for (i=0;i<ndr_table_$name.endpoints->count;i++) {");
+ $self->indent();
+ $self->pidl("const char *name = ndr_table_$name.endpoints->names[i];");
+ $self->pidl("");
+ $self->pidl("ret = dcesrv_interface_register(dce_ctx, name, ncacn_np_secondary_endpoint, &dcesrv_$name\_interface, NULL);");
+ $self->pidl("if (!NT_STATUS_IS_OK(ret)) {");
+ $self->indent();
+ $self->pidl("DBG_ERR(\"Failed to register endpoint \'%s\'\\n\",name);");
+ $self->pidl("return ret;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+ $self->pidl("return NT_STATUS_OK;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+
+ $self->pidl("static NTSTATUS $name\__op_shutdown_server(struct dcesrv_context *dce_ctx, const struct dcesrv_endpoint_server *ep_server)");
+ $self->pidl("{");
+ $self->indent();
+ $self->pidl("return NT_STATUS_OK;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+
+ $self->pidl("static bool $name\__op_interface_by_uuid(struct dcesrv_interface *iface, const struct GUID *uuid, uint32_t if_version)");
+ $self->pidl("{");
+ $self->indent();
+ $self->pidl("if (dcesrv_$name\_interface.syntax_id.if_version == if_version && GUID_equal(\&dcesrv\_$name\_interface.syntax_id.uuid, uuid)) {");
+ $self->indent();
+ $self->pidl("memcpy(iface,&dcesrv\_$name\_interface, sizeof(*iface));");
+ $self->pidl("return true;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+ $self->pidl("return false;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+
+ $self->pidl("static bool $name\__op_interface_by_name(struct dcesrv_interface *iface, const char *name)");
+ $self->pidl("{");
+ $self->indent();
+ $self->pidl("if (strcmp(dcesrv_$name\_interface.name, name)==0) {");
+ $self->indent();
+ $self->pidl("memcpy(iface, &dcesrv_$name\_interface, sizeof(*iface));");
+ $self->pidl("return true;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+ $self->pidl("return false;");
+ $self->deindent();
+ $self->pidl("}");
+ $self->pidl("");
+
+ $self->pidl("static const struct dcesrv_endpoint_server $name\_ep_server = {");
+ $self->indent();
+ $self->pidl("/* fill in our name */");
+ $self->pidl(".name = \"$name\",");
+ $self->pidl("");
+ $self->pidl("/* Initialization flag */");
+ $self->pidl(".initialized = false,");
+ $self->pidl("");
+ $self->pidl("/* fill in all the operations */");
+ $self->pidlnoindent("#ifdef DCESRV_INTERFACE_$uname\_INIT_SERVER");
+ $self->pidl(".init_server = DCESRV_INTERFACE_$uname\_INIT_SERVER,");
+ $self->pidlnoindent("#else");
+ $self->pidl(".init_server = $name\__op_init_server,");
+ $self->pidlnoindent("#endif");
+ $self->pidlnoindent("#ifdef DCESRV_INTERFACE_$uname\_SHUTDOWN_SERVER");
+ $self->pidl(".shutdown_server = DCESRV_INTERFACE_$uname\_SHUTDOWN_SERVER,");
+ $self->pidlnoindent("#else");
+ $self->pidl(".shutdown_server = $name\__op_shutdown_server,");
+ $self->pidlnoindent("#endif");
+ $self->pidl(".interface_by_uuid = $name\__op_interface_by_uuid,");
+ $self->pidl(".interface_by_name = $name\__op_interface_by_name");
+ $self->deindent();
+ $self->pidl("};");
+ $self->pidl("");
+
+ $self->pidl("const struct dcesrv_endpoint_server *$name\_get_ep_server(void)");
+ $self->pidl("{");
+ $self->indent();
+ $self->pidl("return &$name\_ep_server;");
+ $self->deindent();
+ $self->pidl("}");
+}
+
+#####################################################################
+# dcerpc server boilerplate from a parsed IDL structure
+sub parse_interface($)
+{
+ my ($self, $interface) = @_;
+ my $count = 0;
+ my $uif = uc($interface->{NAME});
+
+
+ $self->pidl_hdr("#ifndef __NDR_${uif}_SCOMPAT_H__");
+ $self->pidl_hdr("#define __NDR_${uif}_SCOMPAT_H__");
+ $self->pidl_hdr("");
+ $self->pidl_hdr("struct pipes_struct;");
+ $self->pidl_hdr("struct dcesrv_endpoint_server;");
+ $self->pidl_hdr("struct dcesrv_call_state;");
+ $self->pidl_hdr("");
+ $self->pidl_hdr("const struct dcesrv_endpoint_server *$interface->{NAME}\_get_ep_server(void);");
+ $self->pidl_hdr("");
+
+ if (!defined $interface->{PROPERTIES}->{uuid}) {
+ $self->pidl_hdr("#endif /* __NDR_${uif}_SCOMPAT_H__ */");
+ return;
+ }
+
+ if (!defined $interface->{PROPERTIES}->{version}) {
+ $interface->{PROPERTIES}->{version} = "0.0";
+ }
+
+ foreach my $fn (@{$interface->{FUNCTIONS}}) {
+ if (defined($fn->{OPNUM})) { $count++; }
+ }
+
+ if ($count == 0) {
+ $self->pidl_hdr("#endif /* __NDR_${uif}_SCOMPAT_H__ */");
+ return;
+ }
+
+ $self->pidl("/* $interface->{NAME} - dcerpc server boilerplate generated by pidl */");
+ $self->boilerplate_iface($interface);
+ $self->boilerplate_ep_server($interface);
+
+ $self->pidl_hdr("#endif /* __NDR_${uif}_SCOMPAT_H__ */");
+}
+
+sub Parse($$)
+{
+ my ($self, $ndr, $h_scompat, $header) = @_;
+
+ $self->pidl("/* s3 compat server functions auto-generated by pidl */");
+ $self->pidl("#include \"$header\"");
+ $self->pidl("#include \"$h_scompat\"");
+
+ $self->pidl("#include <librpc/rpc/dcesrv_core.h>");
+ $self->pidl("#include <rpc_server/rpc_config.h>");
+ $self->pidl("#include <rpc_server/rpc_server.h>");
+ $self->pidl("#include <util/debug.h>");
+ $self->pidl("");
+ $self->pidl("enum s3compat_rpc_dispatch {");
+ $self->indent();
+ $self->pidl("S3COMPAT_RPC_DISPATCH_EXTERNAL = 0x00000001,");
+ $self->pidl("S3COMPAT_RPC_DISPATCH_INTERNAL = 0x00000002,");
+ $self->deindent();
+ $self->pidl("};");
+ $self->pidl("");
+
+ foreach my $x (@{$ndr}) {
+ $self->parse_interface($x) if ($x->{TYPE} eq "INTERFACE" and not defined($x->{PROPERTIES}{object}));
+ }
+
+ return ($self->{res}, $self->{res_hdr});
+}
+
+1;
diff --git a/tools/pidl/lib/Parse/Pidl/Samba4/Python.pm b/tools/pidl/lib/Parse/Pidl/Samba4/Python.pm
index f418ac48..d7ccf830 100644
--- a/tools/pidl/lib/Parse/Pidl/Samba4/Python.pm
+++ b/tools/pidl/lib/Parse/Pidl/Samba4/Python.pm
@@ -4,11 +4,10 @@
# released under the GNU GPL
package Parse::Pidl::Samba4::Python;
-
-use Exporter;
-@ISA = qw(Exporter);
+use parent Parse::Pidl::Base;
use strict;
+use warnings;
use Parse::Pidl qw(warning fatal error);
use Parse::Pidl::Typelist qw(hasType resolveType getType mapTypeName expandAlias bitmap_type_fn enum_type_fn);
use Parse::Pidl::Util qw(has_property ParseExpr unmake_str);
@@ -17,6 +16,7 @@ use Parse::Pidl::CUtil qw(get_value_of get_pointer_to);
use Parse::Pidl::Samba4 qw(ArrayDynamicallyAllocated);
use Parse::Pidl::Samba4::Header qw(GenerateFunctionInEnv GenerateFunctionOutEnv EnvSubstituteValue GenerateStructEnv);
+
use vars qw($VERSION);
$VERSION = '0.01';
@@ -34,41 +34,11 @@ sub new($) {
bless($self, $class);
}
-sub pidl_hdr ($$)
-{
- my $self = shift;
- $self->{res_hdr} .= shift;
-}
-
-sub pidl($$)
-{
- my ($self, $d) = @_;
- if ($d) {
- if ((!($d =~ /^#/))) {
- $self->{res} .= $self->{tabs};
- }
- $self->{res} .= $d;
- }
- $self->{res} .= "\n";
-}
-
-sub indent($)
-{
- my ($self) = @_;
- $self->{tabs} .= "\t";
-}
-
-sub deindent($)
-{
- my ($self) = @_;
- $self->{tabs} = substr($self->{tabs}, 0, -1);
-}
-
sub PrettifyTypeName($$)
{
my ($name, $basename) = @_;
- $basename =~ s/^.*\.([^.]+)$/\1/;
+ $basename =~ s/^.*\.([^.]+)$/$1/;
$name =~ s/^$basename\_//;
@@ -83,7 +53,7 @@ sub Import
foreach (@imports) {
$_ = unmake_str($_);
s/\.idl$//;
- $self->pidl_hdr("#include \"librpc/gen_ndr/$_\.h\"\n");
+ $self->pidl_hdr("#include \"librpc/gen_ndr/$_\.h\"");
$self->register_module_import("samba.dcerpc.$_");
}
}
@@ -199,8 +169,16 @@ sub PythonElementGetSet($$$$$$) {
$self->pidl("static PyObject *py_$name\_get_$e->{NAME}(PyObject *obj, void *closure)");
$self->pidl("{");
$self->indent;
- $self->pidl("$cname *object = ($cname *)pytalloc_get_ptr(obj);");
+ $self->pidl("$cname *object = pytalloc_get_ptr(obj);");
$self->pidl("PyObject *py_$e->{NAME};");
+ my $l = $e->{LEVELS}[0];
+ if ($l->{TYPE} eq "POINTER") {
+ $self->pidl("if ($varname == NULL) {");
+ $self->indent;
+ $self->pidl("Py_RETURN_NONE;");
+ $self->deindent;
+ $self->pidl("}");
+ }
$self->ConvertObjectToPython("pytalloc_get_mem_ctx(obj)", $env, $e, $varname, "py_$e->{NAME}", "return NULL;");
$self->pidl("return py_$e->{NAME};");
$self->deindent;
@@ -210,9 +188,8 @@ sub PythonElementGetSet($$$$$$) {
$self->pidl("static int py_$name\_set_$e->{NAME}(PyObject *py_obj, PyObject *value, void *closure)");
$self->pidl("{");
$self->indent;
- $self->pidl("$cname *object = ($cname *)pytalloc_get_ptr(py_obj);");
+ $self->pidl("$cname *object = pytalloc_get_ptr(py_obj);");
my $mem_ctx = "pytalloc_get_mem_ctx(py_obj)";
- my $l = $e->{LEVELS}[0];
my $nl = GetNextLevel($e, $l);
if ($l->{TYPE} eq "POINTER" and
not ($nl->{TYPE} eq "ARRAY" and ($nl->{IS_FIXED} or is_charset_array($e, $nl))) and
@@ -279,10 +256,10 @@ sub PythonStruct($$$$$$)
# If the struct is not public there ndr_pull/ndr_push functions will
# be static so not callable from here
if (has_property($d, "public")) {
- $self->pidl("static PyObject *py_$name\_ndr_pack(PyObject *py_obj)");
+ $self->pidl("static PyObject *py_$name\_ndr_pack(PyObject *py_obj, PyObject *Py_UNUSED(ignored))");
$self->pidl("{");
$self->indent;
- $self->pidl("$cname *object = ($cname *)pytalloc_get_ptr(py_obj);");
+ $self->pidl("$cname *object = pytalloc_get_ptr(py_obj);");
$self->pidl("PyObject *ret = NULL;");
$self->pidl("DATA_BLOB blob;");
$self->pidl("enum ndr_err_code err;");
@@ -312,8 +289,8 @@ sub PythonStruct($$$$$$)
$self->pidl("static PyObject *py_$name\_ndr_unpack(PyObject *py_obj, PyObject *args, PyObject *kwargs)");
$self->pidl("{");
$self->indent;
- $self->pidl("$cname *object = ($cname *)pytalloc_get_ptr(py_obj);");
- $self->pidl("DATA_BLOB blob;");
+ $self->pidl("$cname *object = pytalloc_get_ptr(py_obj);");
+ $self->pidl("DATA_BLOB blob = {.data = NULL, .length = 0};");
$self->pidl("Py_ssize_t blob_length = 0;");
$self->pidl("enum ndr_err_code err;");
$self->pidl("const char * const kwnames[] = { \"data_blob\", \"allow_remaining\", NULL };");
@@ -359,15 +336,15 @@ sub PythonStruct($$$$$$)
$self->pidl("}");
$self->pidl("");
- $self->pidl("static PyObject *py_$name\_ndr_print(PyObject *py_obj)");
+ $self->pidl("static PyObject *py_$name\_ndr_print(PyObject *py_obj, PyObject *Py_UNUSED(ignored))");
$self->pidl("{");
$self->indent;
- $self->pidl("$cname *object = ($cname *)pytalloc_get_ptr(py_obj);");
+ $self->pidl("$cname *object = pytalloc_get_ptr(py_obj);");
$self->pidl("PyObject *ret;");
$self->pidl("char *retstr;");
$self->pidl("");
$self->pidl("retstr = ndr_print_struct_string(pytalloc_get_mem_ctx(py_obj), (ndr_print_fn_t)ndr_print_$name, \"$name\", object);");
- $self->pidl("ret = PyStr_FromString(retstr);");
+ $self->pidl("ret = PyUnicode_FromString(retstr);");
$self->pidl("talloc_free(retstr);");
$self->pidl("");
$self->pidl("return ret;");
@@ -379,7 +356,7 @@ sub PythonStruct($$$$$$)
$self->pidl("static PyMethodDef $py_methods\[] = {");
$self->indent;
$self->pidl("{ \"__ndr_pack__\", (PyCFunction)py_$name\_ndr_pack, METH_NOARGS, \"S.ndr_pack(object) -> blob\\nNDR pack\" },");
- $self->pidl("{ \"__ndr_unpack__\", (PyCFunction)py_$name\_ndr_unpack, METH_VARARGS|METH_KEYWORDS, \"S.ndr_unpack(class, blob, allow_remaining=False) -> None\\nNDR unpack\" },");
+ $self->pidl("{ \"__ndr_unpack__\", PY_DISCARD_FUNC_SIG(PyCFunction,py_$name\_ndr_unpack), METH_VARARGS|METH_KEYWORDS, \"S.ndr_unpack(class, blob, allow_remaining=False) -> None\\nNDR unpack\" },");
$self->pidl("{ \"__ndr_print__\", (PyCFunction)py_$name\_ndr_print, METH_NOARGS, \"S.ndr_print(object) -> None\\nNDR print\" },");
$self->pidl("{ NULL, NULL, 0, NULL }");
$self->deindent;
@@ -387,7 +364,7 @@ sub PythonStruct($$$$$$)
$self->pidl("");
}
- $self->pidl_hdr("static PyTypeObject $name\_Type;\n");
+ $self->pidl_hdr("static PyTypeObject $name\_Type;");
$self->pidl("");
my $docstring = $self->DocString($d, $name);
my $typeobject = "$name\_Type";
@@ -491,7 +468,62 @@ sub PythonFunctionStruct($$$$)
$self->pidl("static PyObject *py_$name\_new(PyTypeObject *type, PyObject *args, PyObject *kwargs)");
$self->pidl("{");
$self->indent;
- $self->pidl("return pytalloc_new($cname, type);");
+
+ # This creates a new, zeroed C structure and python object.
+ # These may not be valid or sensible values, but this is as
+ # well as we can do.
+
+ $self->pidl("PyObject *self = pytalloc_new($cname, type);");
+
+ # If there are any children that are ref pointers, we need to
+ # allocate something for them to point to just as the pull
+ # routine will when parsing the structure from NDR.
+ #
+ # We then make those pointers point to zeroed memory
+ #
+ # A ref pointer is a pointer in the C structure but a scalar
+ # on the wire. It is for a remote function like:
+ #
+ # int foo(int *i)
+ #
+ # This may be called with the pointer by reference eg foo(&i)
+ #
+ # That is why this only goes as far as the next level; deeply
+ # nested pointer chains will end in a NULL.
+
+ my @ref_elements;
+ foreach my $e (@{$fn->{ELEMENTS}}) {
+ if (has_property($e, "ref") && ! has_property($e, "charset")) {
+ if (!has_property($e, 'in') && !has_property($e, 'out')) {
+ die "ref pointer that is not in or out";
+ }
+ push @ref_elements, $e;
+ }
+ }
+ if (@ref_elements) {
+ $self->pidl("$cname *_self = ($cname *)pytalloc_get_ptr(self);");
+ $self->pidl("TALLOC_CTX *mem_ctx = pytalloc_get_mem_ctx(self);");
+ foreach my $e (@ref_elements) {
+ my $ename = $e->{NAME};
+ my $t = mapTypeName($e->{TYPE});
+ my $p = $e->{ORIGINAL}->{POINTERS} // 1;
+ if ($p > 1) {
+ $self->pidl("/* a pointer to a NULL pointer */");
+ $t .= ' ' . '*' x ($p - 1);
+ }
+
+ # We checked in the loop above that each ref
+ # pointer is in or out (or both)
+ if (has_property($e, 'in')) {
+ $self->pidl("_self->in.$ename = talloc_zero(mem_ctx, $t);");
+ }
+
+ if (has_property($e, 'out')) {
+ $self->pidl("_self->out.$ename = talloc_zero(mem_ctx, $t);");
+ }
+ }
+ }
+ $self->pidl("return self;");
$self->deindent;
$self->pidl("}");
$self->pidl("");
@@ -499,19 +531,19 @@ sub PythonFunctionStruct($$$$)
my $py_methods = "NULL";
my $ndr_call = "const struct ndr_interface_call *call = NULL;";
- my $object_ptr = "$cname *object = ($cname *)pytalloc_get_ptr(py_obj);";
+ my $object_ptr = "$cname *object = pytalloc_get_ptr(py_obj);";
- $self->pidl("static PyObject *py_$name\_ndr_opnum(PyTypeObject *type)");
+ $self->pidl("static PyObject *py_$name\_ndr_opnum(PyTypeObject *type, PyObject *Py_UNUSED(ignored))");
$self->pidl("{");
$self->indent;
$self->pidl("");
$self->pidl("");
- $self->pidl("return PyInt_FromLong($fn->{OPNUM});");
+ $self->pidl("return PyLong_FromLong($fn->{OPNUM});");
$self->deindent;
$self->pidl("}");
$self->pidl("");
- $self->pidl("static PyObject *py_$name\_ndr_pack(PyObject *py_obj, int ndr_inout_flags, uint32_t ndr_push_flags)");
+ $self->pidl("static PyObject *py_$name\_ndr_pack(PyObject *py_obj, ndr_flags_type ndr_inout_flags, libndr_flags ndr_push_flags)");
$self->pidl("{");
$self->indent;
$self->pidl("$ndr_call");
@@ -562,7 +594,7 @@ sub PythonFunctionStruct($$$$)
$self->pidl("const char * const kwnames[] = { \"bigendian\", \"ndr64\", NULL };");
$self->pidl("PyObject *bigendian_obj = NULL;");
$self->pidl("PyObject *ndr64_obj = NULL;");
- $self->pidl("uint32_t ndr_push_flags = 0;");
+ $self->pidl("libndr_flags ndr_push_flags = 0;");
$self->pidl("");
$self->pidl("if (!PyArg_ParseTupleAndKeywords(args, kwargs, \"|OO:__ndr_pack_in__\",");
$self->indent;
@@ -597,7 +629,7 @@ sub PythonFunctionStruct($$$$)
$self->pidl("const char * const kwnames[] = { \"bigendian\", \"ndr64\", NULL };");
$self->pidl("PyObject *bigendian_obj = NULL;");
$self->pidl("PyObject *ndr64_obj = NULL;");
- $self->pidl("uint32_t ndr_push_flags = 0;");
+ $self->pidl("libndr_flags ndr_push_flags = 0;");
$self->pidl("");
$self->pidl("if (!PyArg_ParseTupleAndKeywords(args, kwargs, \"|OO:__ndr_pack_out__\",");
$self->indent;
@@ -626,7 +658,7 @@ sub PythonFunctionStruct($$$$)
$self->pidl("}");
$self->pidl("");
- $self->pidl("static PyObject *py_$name\_ndr_unpack(PyObject *py_obj, const DATA_BLOB *blob, int ndr_inout_flags, uint32_t ndr_pull_flags, bool allow_remaining)");
+ $self->pidl("static PyObject *py_$name\_ndr_unpack(PyObject *py_obj, const DATA_BLOB *blob, ndr_flags_type ndr_inout_flags, libndr_flags ndr_pull_flags, bool allow_remaining)");
$self->pidl("{");
$self->indent;
$self->pidl("$ndr_call");
@@ -703,7 +735,7 @@ sub PythonFunctionStruct($$$$)
$self->pidl("const char * const kwnames[] = { \"data_blob\", \"bigendian\", \"ndr64\", \"allow_remaining\", NULL };");
$self->pidl("PyObject *bigendian_obj = NULL;");
$self->pidl("PyObject *ndr64_obj = NULL;");
- $self->pidl("uint32_t ndr_pull_flags = LIBNDR_FLAG_REF_ALLOC;");
+ $self->pidl("libndr_flags ndr_pull_flags = LIBNDR_FLAG_REF_ALLOC;");
$self->pidl("PyObject *allow_remaining_obj = NULL;");
$self->pidl("bool allow_remaining = false;");
$self->pidl("");
@@ -751,7 +783,7 @@ sub PythonFunctionStruct($$$$)
$self->pidl("const char * const kwnames[] = { \"data_blob\", \"bigendian\", \"ndr64\", \"allow_remaining\", NULL };");
$self->pidl("PyObject *bigendian_obj = NULL;");
$self->pidl("PyObject *ndr64_obj = NULL;");
- $self->pidl("uint32_t ndr_pull_flags = LIBNDR_FLAG_REF_ALLOC;");
+ $self->pidl("libndr_flags ndr_pull_flags = LIBNDR_FLAG_REF_ALLOC;");
$self->pidl("PyObject *allow_remaining_obj = NULL;");
$self->pidl("bool allow_remaining = false;");
$self->pidl("");
@@ -791,7 +823,7 @@ sub PythonFunctionStruct($$$$)
$self->pidl("}");
$self->pidl("");
- $self->pidl("static PyObject *py_$name\_ndr_print(PyObject *py_obj, const char *name, int ndr_inout_flags)");
+ $self->pidl("static PyObject *py_$name\_ndr_print(PyObject *py_obj, const char *name, ndr_flags_type ndr_inout_flags)");
$self->pidl("{");
$self->indent;
$self->pidl("$ndr_call");
@@ -809,7 +841,7 @@ sub PythonFunctionStruct($$$$)
$self->pidl("call = &ndr_table_$iface\.calls[$fn->{OPNUM}];");
$self->pidl("");
$self->pidl("retstr = ndr_print_function_string(pytalloc_get_mem_ctx(py_obj), call->ndr_print, name, ndr_inout_flags, object);");
- $self->pidl("ret = PyStr_FromString(retstr);");
+ $self->pidl("ret = PyUnicode_FromString(retstr);");
$self->pidl("TALLOC_FREE(retstr);");
$self->pidl("");
$self->pidl("return ret;");
@@ -817,7 +849,7 @@ sub PythonFunctionStruct($$$$)
$self->pidl("}");
$self->pidl("");
- $self->pidl("static PyObject *py_$name\_ndr_print_in(PyObject *py_obj)");
+ $self->pidl("static PyObject *py_$name\_ndr_print_in(PyObject *py_obj, PyObject *Py_UNUSED(ignored))");
$self->pidl("{");
$self->indent;
$self->pidl("return py_$name\_ndr_print(py_obj, \"$name\_in\", NDR_IN);");
@@ -825,7 +857,7 @@ sub PythonFunctionStruct($$$$)
$self->pidl("}");
$self->pidl("");
- $self->pidl("static PyObject *py_$name\_ndr_print_out(PyObject *py_obj)");
+ $self->pidl("static PyObject *py_$name\_ndr_print_out(PyObject *py_obj, PyObject *Py_UNUSED(ignored))");
$self->pidl("{");
$self->indent;
$self->pidl("return py_$name\_ndr_print(py_obj, \"$name\_out\", NDR_OUT);");
@@ -840,19 +872,19 @@ sub PythonFunctionStruct($$$$)
$self->indent;
$self->pidl("\"$modulename.$prettyname.opnum() -> ".sprintf("%d (0x%02x)", $fn->{OPNUM}, $fn->{OPNUM})." \" },");
$self->deindent;
- $self->pidl("{ \"__ndr_pack_in__\", (PyCFunction)py_$name\_ndr_pack_in, METH_VARARGS|METH_KEYWORDS,");
+ $self->pidl("{ \"__ndr_pack_in__\", PY_DISCARD_FUNC_SIG(PyCFunction,py_$name\_ndr_pack_in), METH_VARARGS|METH_KEYWORDS,");
$self->indent;
$self->pidl("\"S.ndr_pack_in(object, bigendian=False, ndr64=False) -> blob\\nNDR pack input\" },");
$self->deindent;
- $self->pidl("{ \"__ndr_pack_out__\", (PyCFunction)py_$name\_ndr_pack_out, METH_VARARGS|METH_KEYWORDS,");
+ $self->pidl("{ \"__ndr_pack_out__\", PY_DISCARD_FUNC_SIG(PyCFunction,py_$name\_ndr_pack_out), METH_VARARGS|METH_KEYWORDS,");
$self->indent;
$self->pidl("\"S.ndr_pack_out(object, bigendian=False, ndr64=False) -> blob\\nNDR pack output\" },");
$self->deindent;
- $self->pidl("{ \"__ndr_unpack_in__\", (PyCFunction)py_$name\_ndr_unpack_in, METH_VARARGS|METH_KEYWORDS,");
+ $self->pidl("{ \"__ndr_unpack_in__\", PY_DISCARD_FUNC_SIG(PyCFunction,py_$name\_ndr_unpack_in), METH_VARARGS|METH_KEYWORDS,");
$self->indent;
$self->pidl("\"S.ndr_unpack_in(class, blob, bigendian=False, ndr64=False, allow_remaining=False) -> None\\nNDR unpack input\" },");
$self->deindent;
- $self->pidl("{ \"__ndr_unpack_out__\", (PyCFunction)py_$name\_ndr_unpack_out, METH_VARARGS|METH_KEYWORDS,");
+ $self->pidl("{ \"__ndr_unpack_out__\", PY_DISCARD_FUNC_SIG(PyCFunction,py_$name\_ndr_unpack_out), METH_VARARGS|METH_KEYWORDS,");
$self->indent;
$self->pidl("\"S.ndr_unpack_out(class, blob, bigendian=False, ndr64=False, allow_remaining=False) -> None\\nNDR unpack output\" },");
$self->deindent;
@@ -863,7 +895,7 @@ sub PythonFunctionStruct($$$$)
$self->pidl("};");
$self->pidl("");
- $self->pidl_hdr("static PyTypeObject $name\_Type;\n");
+ $self->pidl_hdr("static PyTypeObject $name\_Type;");
$self->pidl("");
my $docstring = $self->DocString($fn, $name);
my $typeobject = "$name\_Type";
@@ -1235,7 +1267,7 @@ sub PythonType($$$$)
$self->pidl("PyObject *in = NULL;");
$self->pidl("$typename *out = NULL;");
$self->pidl("");
- $self->pidl("if (!PyArg_ParseTupleAndKeywords(args, kwargs, \"OiO:import\",");
+ $self->pidl("if (!PyArg_ParseTupleAndKeywords(args, kwargs, \"OiO:export\",");
$self->indent;
$self->pidl("discard_const_p(char *, kwnames),");
$self->pidl("&mem_ctx_obj,");
@@ -1269,12 +1301,12 @@ sub PythonType($$$$)
$py_methods = "py_$d->{NAME}_methods";
$self->pidl("static PyMethodDef $py_methods\[] = {");
$self->indent;
- $self->pidl("{ \"__import__\", (PyCFunction)py_$d->{NAME}\_import,");
+ $self->pidl("{ \"__import__\", PY_DISCARD_FUNC_SIG(PyCFunction,py_$d->{NAME}\_import),");
$self->indent;
$self->pidl("METH_VARARGS|METH_KEYWORDS|METH_CLASS,");
$self->pidl("\"T.__import__(mem_ctx, level, in) => ret.\" },");
$self->deindent;
- $self->pidl("{ \"__export__\", (PyCFunction)py_$d->{NAME}\_export,");
+ $self->pidl("{ \"__export__\", PY_DISCARD_FUNC_SIG(PyCFunction,py_$d->{NAME}\_export),");
$self->indent;
$self->pidl("METH_VARARGS|METH_KEYWORDS|METH_CLASS,");
$self->pidl("\"T.__export__(mem_ctx, level, in) => ret.\" },");
@@ -1294,7 +1326,7 @@ sub PythonType($$$$)
$self->pidl("");
$self->pidl("");
- $self->pidl_hdr("static PyTypeObject $typeobject;\n");
+ $self->pidl_hdr("static PyTypeObject $typeobject;");
$self->pidl("static PyTypeObject $typeobject = {");
$self->indent;
$self->pidl("PyVarObject_HEAD_INIT(NULL, 0)");
@@ -1349,7 +1381,7 @@ sub Interface($$$)
}
if (defined $interface->{PROPERTIES}->{uuid}) {
- $self->pidl_hdr("static PyTypeObject $interface->{NAME}_InterfaceType;\n");
+ $self->pidl_hdr("static PyTypeObject $interface->{NAME}_InterfaceType;");
$self->pidl("");
my @fns = ();
@@ -1387,7 +1419,7 @@ sub Interface($$$)
my ($infn, $outfn, $callfn, $prettyname, $docstring, $opnum) = @$d;
$self->pidl("{ \"$prettyname\", $docstring, (py_dcerpc_call_fn)$callfn, (py_data_pack_fn)$infn, (py_data_unpack_fn)$outfn, $opnum, &ndr_table_$interface->{NAME} },");
}
- $self->pidl("{ NULL }");
+ $self->pidl("{0}");
$self->deindent;
$self->pidl("};");
$self->pidl("");
@@ -1446,9 +1478,9 @@ sub Interface($$$)
$self->pidl("");
- my $signature = "\"$interface->{NAME}_abstract_syntax()\\n\"";
+ $signature = "\"$interface->{NAME}_abstract_syntax()\\n\"";
- my $docstring = $self->DocString($interface, $interface->{NAME}."_syntax");
+ $docstring = $self->DocString($interface, $interface->{NAME}."_syntax");
if ($docstring) {
$docstring = "$signature$docstring";
@@ -1482,7 +1514,7 @@ sub Interface($$$)
""]);
}
- $self->pidl_hdr("\n");
+ $self->pidl_hdr("");
}
sub register_module_method($$$$$)
@@ -1635,7 +1667,6 @@ sub ConvertStringFromPythonData($$$$$)
$self->pidl("unicode = PyUnicode_AsEncodedString($py_var, \"utf-8\", \"ignore\");");
$self->pidl("if (unicode == NULL) {");
$self->indent;
- $self->pidl("PyErr_NoMemory();");
$self->pidl("$fail");
$self->deindent;
$self->pidl("}");
@@ -1669,6 +1700,27 @@ sub ConvertStringFromPythonData($$$$$)
$self->pidl("}");
}
+sub ConvertU16StringFromPythonData($$$$$)
+{
+ my ($self, $mem_ctx, $py_var, $target, $fail) = @_;
+
+ $self->pidl("{");
+ $self->indent;
+ $self->pidl("unsigned char *str = NULL;");
+ $self->pidl("");
+ $self->pidl("str = PyUtf16String_FromBytes(");
+ $self->pidl(" $mem_ctx, $py_var);");
+ $self->pidl("if (str == NULL) {");
+ $self->indent;
+ $self->pidl("$fail");
+ $self->deindent;
+ $self->pidl("}");
+ $self->pidl("");
+ $self->pidl("$target = str;");
+ $self->deindent;
+ $self->pidl("}");
+}
+
sub ConvertObjectFromPythonData($$$$$$;$$)
{
my ($self, $mem_ctx, $cvar, $ctype, $target, $fail, $location, $switch) = @_;
@@ -1715,21 +1767,8 @@ sub ConvertObjectFromPythonData($$$$$$;$$)
$self->pidl("}");
$self->pidl("if (test_var > uint_max) {");
$self->indent;
- $self->pidl("PyErr_Format(PyExc_OverflowError, \"Expected type %s or %s within range 0 - %llu, got %llu\",\\");
- $self->pidl(" PyInt_Type.tp_name, PyLong_Type.tp_name, uint_max, test_var);");
- $self->pidl($fail);
- $self->deindent;
- $self->pidl("}");
- $self->pidl("$target = test_var;");
- $self->deindent;
- $self->pidl("} else if (PyInt_Check($cvar)) {");
- $self->indent;
- $self->pidl("long test_var;");
- $self->pidl("test_var = PyInt_AsLong($cvar);");
- $self->pidl("if (test_var < 0 || test_var > uint_max) {");
- $self->indent;
- $self->pidl("PyErr_Format(PyExc_OverflowError, \"Expected type %s or %s within range 0 - %llu, got %ld\",\\");
- $self->pidl(" PyInt_Type.tp_name, PyLong_Type.tp_name, uint_max, test_var);");
+ $self->pidl("PyErr_Format(PyExc_OverflowError, \"Expected type %s within range 0 - %llu, got %llu\",");
+ $self->pidl(" PyLong_Type.tp_name, uint_max, test_var);");
$self->pidl($fail);
$self->deindent;
$self->pidl("}");
@@ -1737,8 +1776,8 @@ sub ConvertObjectFromPythonData($$$$$$;$$)
$self->deindent;
$self->pidl("} else {");
$self->indent;
- $self->pidl("PyErr_Format(PyExc_TypeError, \"Expected type %s or %s\",\\");
- $self->pidl(" PyInt_Type.tp_name, PyLong_Type.tp_name);");
+ $self->pidl("PyErr_Format(PyExc_TypeError, \"Expected type %s\",");
+ $self->pidl(" PyLong_Type.tp_name);");
$self->pidl($fail);
$self->deindent;
$self->pidl("}");
@@ -1767,21 +1806,8 @@ sub ConvertObjectFromPythonData($$$$$$;$$)
$self->pidl("}");
$self->pidl("if (test_var < int_min || test_var > int_max) {");
$self->indent;
- $self->pidl("PyErr_Format(PyExc_OverflowError, \"Expected type %s or %s within range %lld - %lld, got %lld\",\\");
- $self->pidl(" PyInt_Type.tp_name, PyLong_Type.tp_name, int_min, int_max, test_var);");
- $self->pidl($fail);
- $self->deindent;
- $self->pidl("}");
- $self->pidl("$target = test_var;");
- $self->deindent;
- $self->pidl("} else if (PyInt_Check($cvar)) {");
- $self->indent;
- $self->pidl("long test_var;");
- $self->pidl("test_var = PyInt_AsLong($cvar);");
- $self->pidl("if (test_var < int_min || test_var > int_max) {");
- $self->indent;
- $self->pidl("PyErr_Format(PyExc_OverflowError, \"Expected type %s or %s within range %lld - %lld, got %ld\",\\");
- $self->pidl(" PyInt_Type.tp_name, PyLong_Type.tp_name, int_min, int_max, test_var);");
+ $self->pidl("PyErr_Format(PyExc_OverflowError, \"Expected type %s within range %lld - %lld, got %lld\",");
+ $self->pidl(" PyLong_Type.tp_name, int_min, int_max, test_var);");
$self->pidl($fail);
$self->deindent;
$self->pidl("}");
@@ -1789,8 +1815,8 @@ sub ConvertObjectFromPythonData($$$$$$;$$)
$self->deindent;
$self->pidl("} else {");
$self->indent;
- $self->pidl("PyErr_Format(PyExc_TypeError, \"Expected type %s or %s\",\\");
- $self->pidl(" PyInt_Type.tp_name, PyLong_Type.tp_name);");
+ $self->pidl("PyErr_Format(PyExc_TypeError, \"Expected type %s\",");
+ $self->pidl(" PyLong_Type.tp_name);");
$self->pidl($fail);
$self->deindent;
$self->pidl("}");
@@ -1803,7 +1829,8 @@ sub ConvertObjectFromPythonData($$$$$$;$$)
my $ctype_name = $self->use_type_variable($ctype);
unless (defined ($ctype_name)) {
error($location, "Unable to determine origin of type `" . mapTypeName($ctype) . "'");
- $self->pidl("PyErr_SetString(PyExc_TypeError, \"Can not convert C Type " . mapTypeName($ctype) . " from Python\");");
+ $self->pidl("PyErr_SetString(PyExc_TypeError, \"Cannot convert Python object to NDR $target\");");
+ $self->pidl("$fail");
return;
}
$self->pidl("PY_CHECK_TYPE($ctype_name, $cvar, $fail);");
@@ -1849,18 +1876,24 @@ sub ConvertObjectFromPythonData($$$$$$;$$)
return;
}
+ if ($actual_ctype->{TYPE} eq "SCALAR" and
+ $actual_ctype->{NAME} eq "u16string") {
+ $self->ConvertU16StringFromPythonData($mem_ctx, $cvar, $target, $fail);
+ return;
+ }
+
if ($actual_ctype->{TYPE} eq "SCALAR" and $actual_ctype->{NAME} eq "NTSTATUS") {
- $self->pidl("$target = NT_STATUS(PyInt_AsLong($cvar));");
+ $self->pidl("$target = NT_STATUS(PyLong_AsLong($cvar));");
return;
}
if ($actual_ctype->{TYPE} eq "SCALAR" and $actual_ctype->{NAME} eq "WERROR") {
- $self->pidl("$target = W_ERROR(PyInt_AsLong($cvar));");
+ $self->pidl("$target = W_ERROR(PyLong_AsLong($cvar));");
return;
}
if ($actual_ctype->{TYPE} eq "SCALAR" and $actual_ctype->{NAME} eq "HRESULT") {
- $self->pidl("$target = HRES_ERROR(PyInt_AsLong($cvar));");
+ $self->pidl("$target = HRES_ERROR(PyLong_AsLong($cvar));");
return;
}
@@ -1893,8 +1926,7 @@ sub ConvertObjectFromPythonLevel($$$$$$$$$)
if ($recurse == 0) {
$self->pidl("if ($py_var == NULL) {");
$self->indent;
- $self->pidl("PyErr_Format(PyExc_AttributeError, \"Cannot delete NDR object: " .
- mapTypeName($var_name) . "\");");
+ $self->pidl("PyErr_Format(PyExc_AttributeError, \"Cannot delete NDR object: $var_name\");");
$self->pidl($fail);
$self->deindent;
$self->pidl("}");
@@ -1902,6 +1934,9 @@ sub ConvertObjectFromPythonLevel($$$$$$$$$)
$recurse = $recurse + 1;
if ($l->{TYPE} eq "POINTER") {
+ my $need_deindent = 0;
+ my $need_deref = 0;
+
if ($l->{POINTER_TYPE} ne "ref") {
$self->pidl("if ($py_var == Py_None) {");
$self->indent;
@@ -1909,10 +1944,13 @@ sub ConvertObjectFromPythonLevel($$$$$$$$$)
$self->deindent;
$self->pidl("} else {");
$self->indent;
+ $need_deindent = 1;
+ if ($nl->{TYPE} eq "POINTER") {
+ $need_deref = 1;
+ }
}
- # if we want to handle more than one level of pointer in python interfaces
- # then this is where we would need to allocate it
- if ($l->{POINTER_TYPE} eq "ref") {
+
+ if ($l->{POINTER_TYPE} eq "ref" or $need_deref == 1) {
$self->pidl("$var_name = talloc_ptrtype($mem_ctx, $var_name);");
$self->pidl("if ($var_name == NULL) {");
$self->indent;
@@ -1932,11 +1970,20 @@ sub ConvertObjectFromPythonLevel($$$$$$$$$)
} else {
$self->pidl("$var_name = NULL;");
}
+ if ($need_deref == 1) {
+ my $ndr_pointer_typename = $self->import_type_variable("samba.dcerpc.base", "ndr_pointer");
+ $self->pidl("$py_var = py_dcerpc_ndr_pointer_deref($ndr_pointer_typename, $py_var);");
+ $self->pidl("if ($py_var == NULL) {");
+ $self->indent;
+ $self->pidl($fail);
+ $self->deindent;
+ $self->pidl("}");
+ }
unless ($nl->{TYPE} eq "DATA" and Parse::Pidl::Typelist::scalar_is_reference($nl->{DATA_TYPE})) {
$var_name = get_value_of($var_name);
}
$self->ConvertObjectFromPythonLevel($env, $mem_ctx, $py_var, $e, $nl, $var_name, $fail, $recurse);
- if ($l->{POINTER_TYPE} ne "ref") {
+ if ($need_deindent == 1) {
$self->deindent;
$self->pidl("}");
}
@@ -1955,7 +2002,7 @@ sub ConvertObjectFromPythonLevel($$$$$$$$$)
$self->pidl("int $counter;");
if (ArrayDynamicallyAllocated($e, $l)) {
$self->pidl("$var_name = talloc_array_ptrtype($mem_ctx, $var_name, PyList_GET_SIZE($py_var));");
- $self->pidl("if (!$var_name) { $fail; }");
+ $self->pidl("if (!$var_name) { $fail }");
$self->pidl("talloc_set_name_const($var_name, \"ARRAY: $var_name\");");
} else {
$self->pidl("if (ARRAY_SIZE($var_name) != PyList_GET_SIZE($py_var)) {");
@@ -1967,7 +2014,11 @@ sub ConvertObjectFromPythonLevel($$$$$$$$$)
}
$self->pidl("for ($counter = 0; $counter < PyList_GET_SIZE($py_var); $counter++) {");
$self->indent;
- $self->ConvertObjectFromPythonLevel($env, $var_name, "PyList_GET_ITEM($py_var, $counter)", $e, $nl, $var_name."[$counter]", $fail, 0);
+ if (ArrayDynamicallyAllocated($e, $l)) {
+ $self->ConvertObjectFromPythonLevel($env, $var_name, "PyList_GET_ITEM($py_var, $counter)", $e, $nl, "($var_name)"."[$counter]", $fail, 0);
+ } else {
+ $self->ConvertObjectFromPythonLevel($env, $mem_ctx, "PyList_GET_ITEM($py_var, $counter)", $e, $nl, "($var_name)"."[$counter]", $fail, 0);
+ }
$self->deindent;
$self->pidl("}");
$self->deindent;
@@ -2015,15 +2066,15 @@ sub ConvertScalarToPython($$$$)
$ctypename = expandAlias($ctypename);
if ($ctypename =~ /^(int64|dlong)$/) {
- return "ndr_PyLong_FromLongLong($cvar)";
+ return "PyLong_FromLongLong($cvar)";
}
if ($ctypename =~ /^(uint64|hyper|NTTIME_hyper|NTTIME|NTTIME_1sec|udlong|udlongr|uid_t|gid_t)$/) {
- return "ndr_PyLong_FromUnsignedLongLong($cvar)";
+ return "PyLong_FromUnsignedLongLong($cvar)";
}
if ($ctypename =~ /^(char|int|int8|int16|int32|time_t)$/) {
- return "PyInt_FromLong($cvar)";
+ return "PyLong_FromLong($cvar)";
}
# Needed to ensure unsigned values in a 32 or 16 bit enum is
@@ -2031,11 +2082,11 @@ sub ConvertScalarToPython($$$$)
# possibly 64 bit unsigned long. (enums are signed in C,
# unsigned in NDR)
if ($ctypename =~ /^(uint32|uint3264)$/) {
- return "ndr_PyLong_FromUnsignedLongLong((uint32_t)$cvar)";
+ return "PyLong_FromUnsignedLongLong((uint32_t)($cvar))";
}
if ($ctypename =~ /^(uint|uint8|uint16|uint1632)$/) {
- return "PyInt_FromLong((uint16_t)$cvar)";
+ return "PyLong_FromLong((uint16_t)($cvar))";
}
if ($ctypename eq "DATA_BLOB") {
@@ -2062,6 +2113,10 @@ sub ConvertScalarToPython($$$$)
return "PyString_FromStringOrNULL($cvar)";
}
+ if ($ctypename eq "u16string") {
+ return "PyBytes_FromUtf16StringOrNULL($cvar)";
+ }
+
# Not yet supported
if ($ctypename eq "string_array") {
return "pytalloc_GenericObject_reference_ex($mem_ctx, $cvar)";
@@ -2138,6 +2193,10 @@ sub ConvertObjectToPythonLevel($$$$$$$)
}
if ($l->{TYPE} eq "POINTER") {
+ my $need_wrap = 0;
+ if ($l->{POINTER_TYPE} ne "ref" and $nl->{TYPE} eq "POINTER") {
+ $need_wrap = 1;
+ }
if ($l->{POINTER_TYPE} ne "ref") {
if ($recurse == 0) {
$self->pidl("if ($var_name == NULL) {");
@@ -2164,6 +2223,19 @@ sub ConvertObjectToPythonLevel($$$$$$$)
$self->deindent;
$self->pidl("}");
}
+ if ($need_wrap) {
+ my $py_var_wrap = undef;
+ $need_wrap = 1;
+ $self->pidl("{");
+ $self->indent;
+ $py_var_wrap = "py_$e->{NAME}_level_$l->{LEVEL_INDEX}";
+ $self->pidl("PyObject *$py_var_wrap = $py_var;");
+ my $ndr_pointer_typename = $self->import_type_variable("samba.dcerpc.base", "ndr_pointer");
+ $self->pidl("$py_var = py_dcerpc_ndr_pointer_wrap($ndr_pointer_typename, $py_var_wrap);");
+ $self->pidl("Py_XDECREF($py_var_wrap);");
+ $self->deindent;
+ $self->pidl("}");
+ }
} elsif ($l->{TYPE} eq "ARRAY") {
if ($pl && $pl->{TYPE} eq "POINTER") {
$var_name = get_pointer_to($var_name);
@@ -2199,7 +2271,11 @@ sub ConvertObjectToPythonLevel($$$$$$$)
$self->indent;
my $member_var = "py_$e->{NAME}_$l->{LEVEL_INDEX}";
$self->pidl("PyObject *$member_var;");
- $self->ConvertObjectToPythonLevel($var_name, $env, $e, $nl, $var_name."[$counter]", $member_var, $fail, $recurse);
+ if (ArrayDynamicallyAllocated($e, $l)) {
+ $self->ConvertObjectToPythonLevel($var_name, $env, $e, $nl, "($var_name)"."[$counter]", $member_var, $fail, $recurse);
+ } else {
+ $self->ConvertObjectToPythonLevel($mem_ctx, $env, $e, $nl, "($var_name)"."[$counter]", $member_var, $fail, $recurse);
+ }
$self->pidl("PyList_SetItem($py_var, $counter, $member_var);");
$self->deindent;
$self->pidl("}");
@@ -2219,6 +2295,10 @@ sub ConvertObjectToPythonLevel($$$$$$$)
}
my $conv = $self->ConvertObjectToPythonData($mem_ctx, $l->{DATA_TYPE}, $var_name, $e->{ORIGINAL});
$self->pidl("$py_var = $conv;");
+ if ($conv eq "NULL") {
+ $self->pidl("PyErr_SetString(PyExc_NotImplementedError, \"Cannot convert NDR $var_name to Python\");");
+ $self->pidl("$fail");
+ }
} elsif ($l->{TYPE} eq "SUBCONTEXT") {
$self->ConvertObjectToPythonLevel($mem_ctx, $env, $e, $nl, $var_name, $py_var, $fail, $recurse);
} else {
@@ -2240,19 +2320,37 @@ sub Parse($$$$$)
$self->{BASENAME} = $basename;
+ my $ndr_hdr_include = "";
+ if (defined($ndr_hdr)) {
+ $ndr_hdr_include = "#include \"$ndr_hdr\"";
+ }
$self->pidl_hdr("
/* Python wrapper functions auto-generated by pidl */
#define PY_SSIZE_T_CLEAN 1 /* We use Py_ssize_t for PyArg_ParseTupleAndKeywords */
-#include <Python.h>
+#include \"lib/replace/system/python.h\"
#include \"python/py3compat.h\"
#include \"includes.h\"
+#include \"python/modules.h\"
#include <pytalloc.h>
#include \"librpc/rpc/pyrpc.h\"
#include \"librpc/rpc/pyrpc_util.h\"
#include \"$hdr\"
-#include \"$ndr_hdr\"
+$ndr_hdr_include
/*
+ * Suppress compiler warnings if the generated code does not call these
+ * functions
+ */
+#ifndef _MAYBE_UNUSED_
+#ifdef __has_attribute
+#if __has_attribute(unused)
+#define _MAYBE_UNUSED_ __attribute__ ((unused))
+#else
+#define _MAYBE_UNUSED_
+#endif
+#endif
+#endif
+/*
* These functions are here to ensure they can be optimized out by
* the compiler based on the constant input values
*/
@@ -2273,7 +2371,7 @@ static inline unsigned long long ndr_sizeof2uintmax(size_t var_size)
return 0;
}
-static inline long long ndr_sizeof2intmax(size_t var_size)
+static inline _MAYBE_UNUSED_ long long ndr_sizeof2intmax(size_t var_size)
{
switch (var_size) {
case 8:
@@ -2288,25 +2386,6 @@ static inline long long ndr_sizeof2intmax(size_t var_size)
return 0;
}
-
-static inline PyObject *ndr_PyLong_FromLongLong(long long v)
-{
- if (v > LONG_MAX || v < LONG_MIN) {
- return PyLong_FromLongLong(v);
- } else {
- return PyInt_FromLong(v);
- }
-}
-
-static inline PyObject *ndr_PyLong_FromUnsignedLongLong(unsigned long long v)
-{
- if (v > LONG_MAX) {
- return PyLong_FromUnsignedLongLong(v);
- } else {
- return PyInt_FromLong(v);
- }
-}
-
");
foreach my $x (@$ndr) {
@@ -2340,9 +2419,9 @@ static inline PyObject *ndr_PyLong_FromUnsignedLongLong(unsigned long long v)
$self->pidl("MODULE_INIT_FUNC($basename)");
$self->pidl("{");
$self->indent;
- $self->pidl("PyObject *m;");
+ $self->pidl("PyObject *m = NULL;");
foreach my $h (@{$self->{module_imports}}) {
- $self->pidl("PyObject *$h->{'key'};");
+ $self->pidl("PyObject *$h->{'key'} = NULL;");
}
$self->pidl("");
@@ -2351,20 +2430,20 @@ static inline PyObject *ndr_PyLong_FromUnsignedLongLong(unsigned long long v)
my $module_path = $h->{'val'};
$self->pidl("$var_name = PyImport_ImportModule(\"$module_path\");");
$self->pidl("if ($var_name == NULL)");
- $self->pidl("\treturn NULL;");
+ $self->pidl("\tgoto out;");
$self->pidl("");
}
foreach my $h (@{$self->{type_imports}}) {
my $type_var = "$h->{'key'}\_Type";
my $module_path = $h->{'val'};
- $self->pidl_hdr("static PyTypeObject *$type_var;\n");
+ $self->pidl_hdr("static PyTypeObject *$type_var;");
my $pretty_name = PrettifyTypeName($h->{'key'}, $module_path);
my $module_var = "dep_$module_path";
$module_var =~ s/\./_/g;
$self->pidl("$type_var = (PyTypeObject *)PyObject_GetAttrString($module_var, \"$pretty_name\");");
$self->pidl("if ($type_var == NULL)");
- $self->pidl("\treturn NULL;");
+ $self->pidl("\tgoto out;");
$self->pidl("");
}
@@ -2372,7 +2451,7 @@ static inline PyObject *ndr_PyLong_FromUnsignedLongLong(unsigned long long v)
foreach (@{$self->{ready_types}}) {
$self->pidl("if (PyType_Ready($_) < 0)");
- $self->pidl("\treturn NULL;");
+ $self->pidl("\tgoto out;");
}
$self->pidl($_) foreach (@{$self->{postreadycode}});
@@ -2388,16 +2467,16 @@ static inline PyObject *ndr_PyLong_FromUnsignedLongLong(unsigned long long v)
$self->pidl("m = PyModule_Create(&moduledef);");
$self->pidl("if (m == NULL)");
- $self->pidl("\treturn NULL;");
+ $self->pidl("\tgoto out;");
$self->pidl("");
foreach my $h (@{$self->{constants}}) {
my $pretty_name = PrettifyTypeName($h->{'key'}, $basename);
my $py_obj;
my ($ctype, $cvar) = @{$h->{'val'}};
if ($cvar =~ /^[0-9]+$/ or $cvar =~ /^0x[0-9a-fA-F]+$/) {
- $py_obj = "ndr_PyLong_FromUnsignedLongLong($cvar)";
+ $py_obj = "PyLong_FromUnsignedLongLong($cvar)";
} elsif ($cvar =~ /^".*"$/) {
- $py_obj = "PyStr_FromString($cvar)";
+ $py_obj = "PyUnicode_FromString($cvar)";
} else {
$py_obj = $self->ConvertObjectToPythonData("NULL", expandAlias($ctype), $cvar, undef);
}
@@ -2414,7 +2493,11 @@ static inline PyObject *ndr_PyLong_FromUnsignedLongLong(unsigned long long v)
$self->pidl("#ifdef PY_MOD_".uc($basename)."_PATCH");
$self->pidl("PY_MOD_".uc($basename)."_PATCH(m);");
$self->pidl("#endif");
-
+ $self->pidl("out:");
+ foreach my $h (@{$self->{module_imports}}) {
+ my $mod_var = $h->{'key'};
+ $self->pidl("Py_XDECREF($mod_var);");
+ }
$self->pidl("return m;");
$self->pidl("");
$self->deindent;
diff --git a/tools/pidl/lib/Parse/Pidl/Samba4/TDR.pm b/tools/pidl/lib/Parse/Pidl/Samba4/TDR.pm
index c0749304..c3282879 100644
--- a/tools/pidl/lib/Parse/Pidl/Samba4/TDR.pm
+++ b/tools/pidl/lib/Parse/Pidl/Samba4/TDR.pm
@@ -9,25 +9,20 @@ use Parse::Pidl::Util qw(has_property ParseExpr is_constant);
use Parse::Pidl::Samba4 qw(is_intree choose_header);
use Parse::Pidl::Typelist qw(mapTypeName);
-use Exporter;
-@ISA = qw(Exporter);
-@EXPORT_OK = qw(ParserType $ret $ret_hdr);
+use base Parse::Pidl::Base;
use vars qw($VERSION);
$VERSION = '0.01';
use strict;
+use warnings;
sub new($) {
my ($class) = shift;
- my $self = { ret => "", ret_hdr => "", tabs => "" };
+ my $self = { res => "", res_hdr => "", tabs => "" };
bless($self, $class);
}
-sub indent($) { my $self = shift; $self->{tabs}.="\t"; }
-sub deindent($) { my $self = shift; $self->{tabs} = substr($self->{tabs}, 1); }
-sub pidl($$) { my $self = shift; $self->{ret} .= $self->{tabs}.(shift)."\n"; }
-sub pidl_hdr($$) { my $self = shift; $self->{ret_hdr} .= (shift)."\n"; }
sub typearg($) {
my $t = shift;
return(", const char *name") if ($t eq "print");
@@ -277,7 +272,7 @@ sub Parser($$$$)
$self->pidl_hdr("");
foreach (@$idl) { $self->ParserInterface($_) if ($_->{TYPE} eq "INTERFACE"); }
- return ($self->{ret_hdr}, $self->{ret});
+ return ($self->{res_hdr}, $self->{res});
}
1;
diff --git a/tools/pidl/lib/Parse/Pidl/Samba4/Template.pm b/tools/pidl/lib/Parse/Pidl/Samba4/Template.pm
index 175bb124..870d2388 100644
--- a/tools/pidl/lib/Parse/Pidl/Samba4/Template.pm
+++ b/tools/pidl/lib/Parse/Pidl/Samba4/Template.pm
@@ -11,6 +11,7 @@ $VERSION = '0.01';
use Parse::Pidl::Util qw(genpad);
use strict;
+use warnings;
my($res);
diff --git a/tools/pidl/lib/Parse/Pidl/Typelist.pm b/tools/pidl/lib/Parse/Pidl/Typelist.pm
index 774554f0..2a98a16b 100644
--- a/tools/pidl/lib/Parse/Pidl/Typelist.pm
+++ b/tools/pidl/lib/Parse/Pidl/Typelist.pm
@@ -7,27 +7,30 @@ package Parse::Pidl::Typelist;
require Exporter;
@ISA = qw(Exporter);
-@EXPORT_OK = qw(hasType getType resolveType mapTypeName scalar_is_reference expandAlias
+@EXPORT_OK = qw(hasType getType resolveType mapTypeName mapTypeSpecifier scalar_is_reference expandAlias
mapScalarType addType typeIs is_signed is_scalar enum_type_fn
bitmap_type_fn mapType typeHasBody is_fixed_size_scalar
+ is_string_type
);
use vars qw($VERSION);
$VERSION = '0.01';
use Parse::Pidl::Util qw(has_property);
use strict;
+use warnings;
my %types = ();
my @reference_scalars = (
"string", "string_array", "nbt_string", "dns_string",
"wrepl_nbt_name", "dnsp_name", "dnsp_string",
- "ipv4address", "ipv6address"
+ "ipv4address", "ipv6address", "u16string"
);
my @non_fixed_size_scalars = (
"string", "string_array", "nbt_string", "dns_string",
- "wrepl_nbt_name", "dnsp_name", "dnsp_string"
+ "wrepl_nbt_name", "dnsp_name", "dnsp_string",
+ "u16string"
);
# a list of known scalar types
@@ -45,6 +48,7 @@ my %scalars = (
"int3264" => "int32_t",
"uint3264" => "uint32_t",
"hyper" => "uint64_t",
+ "int64" => "int64_t",
"dlong" => "int64_t",
"udlong" => "uint64_t",
"udlongr" => "uint64_t",
@@ -52,6 +56,7 @@ my %scalars = (
"pointer" => "void*",
"DATA_BLOB" => "DATA_BLOB",
"string" => "const char *",
+ "u16string" => "const unsigned char *",
"string_array" => "const char **",
"time_t" => "time_t",
"uid_t" => "uid_t",
@@ -70,6 +75,8 @@ my %scalars = (
"ipv6address" => "const char *",
"dnsp_name" => "const char *",
"dnsp_string" => "const char *",
+ "libndr_flags" => "libndr_flags",
+ "ndr_flags_type"=> "ndr_flags_type",
);
my %aliases = (
@@ -87,6 +94,18 @@ my %aliases = (
"mode_t" => "uint32",
);
+my %format_specifiers = (
+ "char" => "c",
+ "int8_t", => "\"PRId8\"",
+ "int16_t", => "\"PRId16\"",
+ "int32_t", => "\"PRId32\"",
+ "int64_t", => "\"PRId64\"",
+ "uint8_t", => "\"PRIu8\"",
+ "uint16_t", => "\"PRIu16\"",
+ "uint32_t", => "\"PRIu32\"",
+ "uint64_t", => "\"PRIu64\""
+);
+
sub expandAlias($)
{
my $name = shift;
@@ -119,8 +138,18 @@ sub resolveType($)
my ($ctype) = @_;
if (not hasType($ctype)) {
- # assume struct typedef
- return { TYPE => "TYPEDEF", NAME => $ctype, DATA => { TYPE => "STRUCT" } };
+ if (! ref $ctype) {
+ # it looks like a name.
+ # assume struct typedef
+ return { TYPE => "TYPEDEF", NAME => $ctype, DATA => { TYPE => "STRUCT" } };
+ }
+ if ($ctype->{NAME} && ($ctype->{TYPE} eq "STRUCT")) {
+ return {
+ TYPE => "TYPEDEF",
+ NAME => $ctype->{NAME},
+ DATA => $ctype
+ };
+ }
} else {
return getType($ctype);
}
@@ -158,7 +187,7 @@ sub hasType($)
my $t = shift;
if (ref($t) eq "HASH") {
return 1 if (not defined($t->{NAME}));
- return 1 if (defined($types{$t->{NAME}}) and
+ return 1 if (defined($types{$t->{NAME}}) and
$types{$t->{NAME}}->{TYPE} eq $t->{TYPE});
return 0;
}
@@ -184,13 +213,13 @@ sub is_scalar($)
sub is_scalar($);
my $type = shift;
- return 1 if (ref($type) eq "HASH" and
- ($type->{TYPE} eq "SCALAR" or $type->{TYPE} eq "ENUM" or
+ return 1 if (ref($type) eq "HASH" and
+ ($type->{TYPE} eq "SCALAR" or $type->{TYPE} eq "ENUM" or
$type->{TYPE} eq "BITMAP"));
if (my $dt = getType($type)) {
return is_scalar($dt->{DATA}) if ($dt->{TYPE} eq "TYPEDEF");
- return 1 if ($dt->{TYPE} eq "SCALAR" or $dt->{TYPE} eq "ENUM" or
+ return 1 if ($dt->{TYPE} eq "SCALAR" or $dt->{TYPE} eq "ENUM" or
$dt->{TYPE} eq "BITMAP");
}
@@ -214,6 +243,13 @@ sub scalar_is_reference($)
return 0;
}
+sub is_string_type
+{
+ my ($t) = @_;
+
+ return ($t eq "string" or $t eq "u16string");
+}
+
sub RegisterScalars()
{
foreach (keys %scalars) {
@@ -314,6 +350,14 @@ sub mapTypeName($)
}
+sub mapTypeSpecifier($)
+{
+ my $t = shift;
+ return undef unless defined($t);
+
+ return $format_specifiers{$t};
+}
+
sub LoadIdl($;$)
{
my $idl = shift;
diff --git a/tools/pidl/lib/Parse/Pidl/Util.pm b/tools/pidl/lib/Parse/Pidl/Util.pm
index 83e23937..7a6039ba 100644
--- a/tools/pidl/lib/Parse/Pidl/Util.pm
+++ b/tools/pidl/lib/Parse/Pidl/Util.pm
@@ -6,11 +6,12 @@ package Parse::Pidl::Util;
require Exporter;
@ISA = qw(Exporter);
-@EXPORT = qw(has_property property_matches ParseExpr ParseExprExt is_constant make_str unmake_str print_uuid MyDumper genpad);
+@EXPORT = qw(has_property property_matches ParseExpr ParseExprExt is_constant make_str unmake_str print_uuid MyDumper genpad parse_int parse_range);
use vars qw($VERSION);
$VERSION = '0.01';
use strict;
+use warnings;
use Parse::Pidl::Expr;
use Parse::Pidl qw(error);
@@ -190,6 +191,41 @@ sub genpad($)
return "\t"x($nt)." "x($ns);
}
+=item B<parse_int>
+
+Try to convert hex and octal strings to numbers. If a string doesn't
+look hexish or octish it will be left as is. If the unconverted string
+is actually a decimal number, Perl is likely to handle it correctly.
+
+=cut
+
+sub parse_int {
+ my $s = shift;
+ if ($s =~ /^0[xX][0-9A-Fa-f]+$/) {
+ return hex $s;
+ }
+ if ($s =~ /^0[0-7]+$/) {
+ return oct $s;
+ }
+ return $s;
+}
+
+=item B<parse_range>
+
+Read a range specification that might contain hex or octal numbers,
+and work out what those numbers are.
+
+=cut
+
+sub parse_range {
+ my $range = shift;
+ my ($low, $high) = split(/,/, $range, 2);
+ $low = parse_int($low);
+ $high = parse_int($high);
+ return ($low, $high);
+}
+
+
=back
=cut
diff --git a/tools/pidl/lib/Parse/Pidl/Wireshark/Conformance.pm b/tools/pidl/lib/Parse/Pidl/Wireshark/Conformance.pm
index 01a8c473..7f7ef184 100644
--- a/tools/pidl/lib/Parse/Pidl/Wireshark/Conformance.pm
+++ b/tools/pidl/lib/Parse/Pidl/Wireshark/Conformance.pm
@@ -115,6 +115,7 @@ $VERSION = '0.01';
@EXPORT_OK = qw(ReadConformance ReadConformanceFH valid_ft_type valid_base_type);
use strict;
+use warnings;
use Parse::Pidl qw(fatal warning error);
use Parse::Pidl::Util qw(has_property);
diff --git a/tools/pidl/lib/Parse/Pidl/Wireshark/NDR.pm b/tools/pidl/lib/Parse/Pidl/Wireshark/NDR.pm
index ada8dd66..6f672330 100644
--- a/tools/pidl/lib/Parse/Pidl/Wireshark/NDR.pm
+++ b/tools/pidl/lib/Parse/Pidl/Wireshark/NDR.pm
@@ -21,8 +21,9 @@ use Exporter;
@EXPORT_OK = qw(field2name %res PrintIdl StripPrefixes RegisterInterfaceHandoff register_hf_field CheckUsed ProcessImport ProcessInclude find_type DumpEttList DumpEttDeclaration DumpHfList DumpHfDeclaration DumpFunctionTable register_type register_ett);
use strict;
+use warnings;
use Parse::Pidl qw(error warning);
-use Parse::Pidl::Typelist qw(getType);
+use Parse::Pidl::Typelist qw(getType mapScalarType);
use Parse::Pidl::Util qw(has_property property_matches make_str);
use Parse::Pidl::NDR qw(ContainsString GetNextLevel);
use Parse::Pidl::Dump qw(DumpType DumpFunction);
@@ -41,6 +42,26 @@ my %ptrtype_mappings = (
"ptr" => "NDR_POINTER_PTR"
);
+my %variable_scalars = (
+ "int1632" => "int32_t",
+ "uint1632" => "uint32_t",
+ "int3264" => "int64_t",
+ "uint3264" => "uint64_t",
+);
+
+# map from an IDL type to a C header type, using the on-the-wire length.
+# Produces different results than mapScalarType in Parse::Pidl::Typelist
+# for the types that have different wire lengths in NDR and NDR64 (i.e.,
+# includes the padding for uint1632 and uint3264, unlike that function.)
+sub mapWireScalarType($)
+{
+ my ($name) = shift;
+
+ return $variable_scalars{$name} if defined($variable_scalars{$name});
+
+ return mapScalarType($name);
+}
+
sub StripPrefixes($$)
{
my ($s, $prefixes) = @_;
@@ -151,7 +172,7 @@ sub Enum($$$$)
}
$self->pidl_hdr("extern const value_string $valsstring\[];");
- $self->pidl_hdr("int $dissectorname(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *tree _U_, dcerpc_info* di _U_, guint8 *drep _U_, int hf_index _U_, g$e->{BASE_TYPE} *param _U_);");
+ $self->pidl_hdr("int $dissectorname(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *tree _U_, dcerpc_info* di _U_, uint8_t *drep _U_, int hf_index _U_, " . mapWireScalarType($e->{BASE_TYPE}) ." *param _U_);");
$self->pidl_def("const value_string ".$valsstring."[] = {");
foreach (@{$e->{ELEMENTS}}) {
@@ -164,10 +185,10 @@ sub Enum($$$$)
$self->pidl_fn_start($dissectorname);
$self->pidl_code("int");
- $self->pidl_code("$dissectorname(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *tree _U_, dcerpc_info* di _U_, guint8 *drep _U_, int hf_index _U_, g$e->{BASE_TYPE} *param _U_)");
+ $self->pidl_code("$dissectorname(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *tree _U_, dcerpc_info* di _U_, uint8_t *drep _U_, int hf_index _U_, " . mapWireScalarType($e->{BASE_TYPE}) . " *param _U_)");
$self->pidl_code("{");
$self->indent;
- $self->pidl_code("g$e->{BASE_TYPE} parameter=0;");
+ $self->pidl_code(mapWireScalarType($e->{BASE_TYPE}) . " parameter=0;");
$self->pidl_code("if (param) {");
$self->indent;
$self->pidl_code("parameter = *param;");
@@ -186,7 +207,9 @@ sub Enum($$$$)
my $enum_size = $e->{BASE_TYPE};
$enum_size =~ s/uint//g;
- $self->register_type($name, "offset = $dissectorname(tvb, offset, pinfo, tree, di, drep, \@HF\@, \@PARAM\@);", "FT_UINT$enum_size", "BASE_DEC", "0", "VALS($valsstring)", $enum_size / 8);
+ my $ws_base = "BASE_DEC";
+ $ws_base = "BASE_HEX" if (property_matches($e, "flag", ".*LIBNDR_PRINT_ARRAY_HEX.*"));
+ $self->register_type($name, "offset = $dissectorname(tvb, offset, pinfo, tree, di, drep, \@HF\@, \@PARAM\@);", "FT_UINT$enum_size", $ws_base, "0", "VALS($valsstring)", $enum_size / 8);
}
sub Pipe($$$$)
@@ -205,11 +228,11 @@ sub Bitmap($$$$)
$self->register_ett("ett_$ifname\_$name");
- $self->pidl_hdr("int $dissectorname(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *tree _U_, dcerpc_info* di _U_, guint8 *drep _U_, int hf_index _U_, guint32 param _U_);");
+ $self->pidl_hdr("int $dissectorname(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *tree _U_, dcerpc_info* di _U_, uint8_t *drep _U_, int hf_index _U_, uint32_t param _U_);");
$self->pidl_fn_start($dissectorname);
$self->pidl_code("int");
- $self->pidl_code("$dissectorname(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *parent_tree _U_, dcerpc_info* di _U_, guint8 *drep _U_, int hf_index _U_, guint32 param _U_)");
+ $self->pidl_code("$dissectorname(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *parent_tree _U_, dcerpc_info* di _U_, uint8_t *drep _U_, int hf_index _U_, uint32_t param _U_)");
$self->pidl_code("{");
$self->indent;
foreach (@{$e->{ELEMENTS}}) {
@@ -236,7 +259,7 @@ sub Bitmap($$$$)
$self->pidl_code("};");
}
- $self->pidl_code("g$e->{BASE_TYPE} flags;");
+ $self->pidl_code(mapWireScalarType($e->{BASE_TYPE}) . " flags;");
if ($e->{ALIGN} > 1) {
$self->pidl_code("ALIGN_TO_$e->{ALIGN}_BYTES;");
}
@@ -335,9 +358,10 @@ sub ElementLevel($$$$$$$$)
$self->pidl_code("offset = dissect_ndr_u" . $type . "array(tvb, offset, pinfo, tree, di, drep, $myname\_);");
} else {
my $nl = GetNextLevel($e,$l);
+ my $nl_ctype = mapScalarType($nl->{DATA_TYPE});
$self->pidl_code("char *data;");
$self->pidl_code("");
- $self->pidl_code("offset = dissect_ndr_$type" . "string(tvb, offset, pinfo, tree, di, drep, sizeof(g$nl->{DATA_TYPE}), $hf, FALSE, &data);");
+ $self->pidl_code("offset = dissect_ndr_$type" . "string(tvb, offset, pinfo, tree, di, drep, sizeof($nl_ctype), $hf, false, &data);");
$self->pidl_code("proto_item_append_text(tree, \": %s\", data);");
}
}
@@ -350,10 +374,10 @@ sub ElementLevel($$$$$$$$)
if (property_matches($e, "flag", ".*LIBNDR_FLAG_STR_SIZE4.*") and property_matches($e, "flag", ".*LIBNDR_FLAG_STR_LEN4.*")) {
$self->pidl_code("char *data;\n");
- $self->pidl_code("offset = dissect_ndr_cvstring(tvb, offset, pinfo, tree, di, drep, $bs, $hf, FALSE, &data);");
+ $self->pidl_code("offset = dissect_ndr_cvstring(tvb, offset, pinfo, tree, di, drep, $bs, $hf, false, &data);");
$self->pidl_code("proto_item_append_text(tree, \": %s\", data);");
} elsif (property_matches($e, "flag", ".*LIBNDR_FLAG_STR_SIZE4.*")) {
- $self->pidl_code("offset = dissect_ndr_vstring(tvb, offset, pinfo, tree, di, drep, $bs, $hf, FALSE, NULL);");
+ $self->pidl_code("offset = dissect_ndr_vstring(tvb, offset, pinfo, tree, di, drep, $bs, $hf, false, NULL);");
} elsif (property_matches($e, "flag", ".*STR_NULLTERM.*")) {
if ($bs == 2) {
$self->pidl_code("offset = dissect_null_term_wstring(tvb, offset, pinfo, tree, drep, $hf , 0);")
@@ -405,7 +429,7 @@ sub ElementLevel($$$$$$$$)
my $hf2 = $self->register_hf_field($hf."_", "Subcontext length", "$ifname.$pn.$_->{NAME}subcontext", "FT_UINT$num_bits", "BASE_HEX", "NULL", 0, "");
$num_bits = 3264 if ($num_bits == 32);
$self->{hf_used}->{$hf2} = 1;
- $self->pidl_code("guint$num_bits size;");
+ $self->pidl_code("uint${num_bits}_t size;");
$self->pidl_code("int conformant = di->conformant_run;");
$self->pidl_code("tvbuff_t *subtvb;");
$self->pidl_code("");
@@ -413,14 +437,14 @@ sub ElementLevel($$$$$$$$)
# and conformant run skips the dissections of scalars ...
$self->pidl_code("if (!conformant) {");
$self->indent;
- $self->pidl_code("guint32 saved_flags = di->call_data->flags;");
+ $self->pidl_code("uint32_t saved_flags = di->call_data->flags;");
$self->pidl_code("offset = dissect_ndr_uint$num_bits(tvb, offset, pinfo, tree, di, drep, $hf2, &size);");
# This is a subcontext, there is normally no such thing as
# 64 bit NDR is subcontext so we clear the flag so that we can
# continue to dissect handmarshalled stuff with pidl
$self->pidl_code("di->call_data->flags &= ~DCERPC_IS_NDR64;");
- $self->pidl_code("subtvb = tvb_new_subset_length_caplen(tvb, offset, (const gint)size, -1);");
+ $self->pidl_code("subtvb = tvb_new_subset_length_caplen(tvb, offset, (const int)size, -1);");
if ($param ne 0) {
$self->pidl_code("$myname\_(subtvb, 0, pinfo, tree, di, drep, $param);");
} else {
@@ -477,7 +501,7 @@ sub Element($$$$$$)
if (not defined($switch_raw_type)) {
die("Unknown type[$type]\n");
}
- my $switch_type = "g${switch_raw_type}";
+ my $switch_type = mapWireScalarType(${switch_raw_type});
if ($name ne "") {
$moreparam = ", $switch_type *".$name;
@@ -552,10 +576,10 @@ sub Element($$$$$$)
}
next if ($_->{TYPE} eq "SWITCH");
next if (defined($self->{conformance}->{noemit}->{"$dissectorname$add"}));
- $self->pidl_def("static int $dissectorname$add(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *tree _U_, dcerpc_info* di _U_, guint8 *drep _U_$moreparam);");
+ $self->pidl_def("static int $dissectorname$add(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *tree _U_, dcerpc_info* di _U_, uint8_t *drep _U_$moreparam);");
$self->pidl_fn_start("$dissectorname$add");
$self->pidl_code("static int");
- $self->pidl_code("$dissectorname$add(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *tree _U_, dcerpc_info* di _U_, guint8 *drep _U_$moreparam)");
+ $self->pidl_code("$dissectorname$add(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *tree _U_, dcerpc_info* di _U_, uint8_t *drep _U_$moreparam)");
$self->pidl_code("{");
$self->indent;
@@ -583,7 +607,7 @@ sub Function($$$)
my %dissectornames;
foreach (@{$fn->{ELEMENTS}}) {
- $dissectornames{$_->{NAME}} = $self->Element($_, $fn->{NAME}, $ifname, undef, undef) if not defined($dissectornames{$_->{NAME}});
+ $dissectornames{$_->{NAME}} = $self->Element($_, $fn->{NAME}, $ifname, undef, ()) if not defined($dissectornames{$_->{NAME}});
}
my $fn_name = $_->{NAME};
@@ -592,18 +616,18 @@ sub Function($$$)
$self->PrintIdl(DumpFunction($fn->{ORIGINAL}));
$self->pidl_fn_start("$ifname\_dissect\_$fn_name\_response");
$self->pidl_code("static int");
- $self->pidl_code("$ifname\_dissect\_${fn_name}_response(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *tree _U_, dcerpc_info* di _U_, guint8 *drep _U_)");
+ $self->pidl_code("$ifname\_dissect\_${fn_name}_response(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *tree _U_, dcerpc_info* di _U_, uint8_t *drep _U_)");
$self->pidl_code("{");
$self->indent;
if ( not defined($fn->{RETURN_TYPE})) {
} elsif ($fn->{RETURN_TYPE} eq "NTSTATUS" or $fn->{RETURN_TYPE} eq "WERROR" or $fn->{RETURN_TYPE} eq "HRESULT")
{
- $self->pidl_code("guint32 status;\n");
+ $self->pidl_code("uint32_t status;\n");
} elsif (my $type = getType($fn->{RETURN_TYPE})) {
if ($type->{DATA}->{TYPE} eq "ENUM") {
- $self->pidl_code("g".Parse::Pidl::Typelist::enum_type_fn($type->{DATA}) . " status;\n");
+ $self->pidl_code(Parse::Pidl::Typelist::enum_type_fn($type->{DATA}) . "_t status;\n");
} elsif ($type->{DATA}->{TYPE} eq "SCALAR") {
- $self->pidl_code("g$fn->{RETURN_TYPE} status;\n");
+ $self->pidl_code(mapWireScalarType($fn->{RETURN_TYPE}) . " status;\n");
} else {
error($fn, "return type `$fn->{RETURN_TYPE}' not yet supported");
}
@@ -624,22 +648,21 @@ sub Function($$$)
} elsif ($fn->{RETURN_TYPE} eq "NTSTATUS") {
$self->pidl_code("offset = dissect_ntstatus(tvb, offset, pinfo, tree, di, drep, hf\_$ifname\_status, &status);\n");
$self->pidl_code("if (status != 0)");
- $self->pidl_code("\tcol_append_fstr(pinfo->cinfo, COL_INFO, \", Error: %s\", val_to_str(status, NT_errors, \"Unknown NT status 0x%08x\"));\n");
+ $self->pidl_code("\tcol_append_fstr(pinfo->cinfo, COL_INFO, \", Error: %s\", val_to_str_ext(status, &NT_errors_ext, \"Unknown NT status 0x%08x\"));\n");
$return_types{$ifname}->{"status"} = ["NTSTATUS", "NT Error"];
} elsif ($fn->{RETURN_TYPE} eq "WERROR") {
$self->pidl_code("offset = dissect_ndr_uint32(tvb, offset, pinfo, tree, di, drep, hf\_$ifname\_werror, &status);\n");
$self->pidl_code("if (status != 0)");
- $self->pidl_code("\tcol_append_fstr(pinfo->cinfo, COL_INFO, \", Error: %s\", val_to_str(status, WERR_errors, \"Unknown DOS error 0x%08x\"));\n");
+ $self->pidl_code("\tcol_append_fstr(pinfo->cinfo, COL_INFO, \", Error: %s\", val_to_str_ext(status, &WERR_errors_ext, \"Unknown DOS error 0x%08x\"));\n");
$return_types{$ifname}->{"werror"} = ["WERROR", "Windows Error"];
} elsif ($fn->{RETURN_TYPE} eq "HRESULT") {
$self->pidl_code("offset = dissect_ndr_uint32(tvb, offset, pinfo, tree, di, drep, hf\_$ifname\_hresult, &status);\n");
$self->pidl_code("if (status != 0)");
- $self->pidl_code("\tcol_append_fstr(pinfo->cinfo, COL_INFO, \", Error: %s\", val_to_str(status, HRES_errors, \"Unknown HRES error 0x%08x\"));\n");
+ $self->pidl_code("\tcol_append_fstr(pinfo->cinfo, COL_INFO, \", Error: %s\", val_to_str_ext(status, &HRES_errors_ext, \"Unknown HRES error 0x%08x\"));\n");
$return_types{$ifname}->{"hresult"} = ["HRESULT", "HRES Windows Error"];
} elsif (my $type = getType($fn->{RETURN_TYPE})) {
if ($type->{DATA}->{TYPE} eq "ENUM") {
- my $return_type = "g".Parse::Pidl::Typelist::enum_type_fn($type->{DATA});
my $return_dissect = "dissect_ndr_" .Parse::Pidl::Typelist::enum_type_fn($type->{DATA});
$self->pidl_code("offset = $return_dissect(tvb, offset, pinfo, tree, di, drep, hf\_$ifname\_$fn->{RETURN_TYPE}_status, &status);");
@@ -661,7 +684,7 @@ sub Function($$$)
$self->pidl_fn_start("$ifname\_dissect\_$fn_name\_request");
$self->pidl_code("static int");
- $self->pidl_code("$ifname\_dissect\_${fn_name}_request(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *tree _U_, dcerpc_info* di _U_, guint8 *drep _U_)");
+ $self->pidl_code("$ifname\_dissect\_${fn_name}_request(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *tree _U_, dcerpc_info* di _U_, uint8_t *drep _U_)");
$self->pidl_code("{");
$self->indent;
$self->pidl_code("di->dcerpc_procedure_name=\"${fn_name}\";");
@@ -709,7 +732,7 @@ sub Struct($$$$)
if (not defined($switch_raw_type)) {
die("Unknown type[$_->{TYPE}]\n");
}
- my $switch_type = "g${switch_raw_type}";
+ my $switch_type = mapWireScalarType(${switch_raw_type});
if ($switch_type ne "") {
push @$vars, "$switch_type $v = 0;";
@@ -733,11 +756,11 @@ sub Struct($$$$)
$doalign = 0;
}
- $self->pidl_hdr("int $dissectorname(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *parent_tree _U_, dcerpc_info* di _U_, guint8 *drep _U_, int hf_index _U_, guint32 param _U_);");
+ $self->pidl_hdr("int $dissectorname(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *parent_tree _U_, dcerpc_info* di _U_, uint8_t *drep _U_, int hf_index _U_, uint32_t param _U_);");
$self->pidl_fn_start($dissectorname);
$self->pidl_code("int");
- $self->pidl_code("$dissectorname(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *parent_tree _U_, dcerpc_info* di _U_, guint8 *drep _U_, int hf_index _U_, guint32 param _U_)");
+ $self->pidl_code("$dissectorname(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *parent_tree _U_, dcerpc_info* di _U_, uint8_t *drep _U_, int hf_index _U_, uint32_t param _U_)");
$self->pidl_code("{");
$self->indent;
$self->pidl_code($_) foreach (@$vars);
@@ -746,7 +769,7 @@ sub Struct($$$$)
$self->pidl_code("proto_tree *tree = NULL;");
}
if (defined($doalign) and $doalign == 0) {
- $self->pidl_code("gboolean oldalign = di->no_align;");
+ $self->pidl_code("bool oldalign = di->no_align;");
}
$self->pidl_code("int old_offset;");
$self->pidl_code("");
@@ -756,7 +779,7 @@ sub Struct($$$$)
$self->pidl_code("ALIGN_TO_$e->{ALIGN}_BYTES;");
}
if ($doalign == 0) {
- $self->pidl_code("di->no_align = TRUE;");
+ $self->pidl_code("di->no_align = true;");
}
$self->pidl_code("");
}
@@ -813,7 +836,7 @@ sub Union($$$$)
foreach (@{$e->{ELEMENTS}}) {
$res.="\n\t\t$_->{CASE}:\n";
if ($_->{TYPE} ne "EMPTY") {
- $res.="\t\t\t".$self->Element($_, $name, $ifname, undef, undef)."\n";
+ $res.="\t\t\t".$self->Element($_, $name, $ifname, undef, ())."\n";
}
$res.="\t\tbreak;\n";
}
@@ -822,20 +845,20 @@ sub Union($$$$)
my $switch_dissect = undef;
my $switch_raw_type = SwitchType($e, $e->{SWITCH_TYPE});
if (defined($switch_raw_type)) {
- $switch_type = "g${switch_raw_type}";
+ $switch_type = mapWireScalarType(${switch_raw_type});
$switch_dissect = "dissect_ndr_${switch_raw_type}";
}
$self->pidl_fn_start($dissectorname);
$self->pidl_code("static int");
- $self->pidl_code("$dissectorname(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *parent_tree _U_, dcerpc_info* di _U_, guint8 *drep _U_, int hf_index _U_, guint32 param _U_)");
+ $self->pidl_code("$dissectorname(tvbuff_t *tvb _U_, int offset _U_, packet_info *pinfo _U_, proto_tree *parent_tree _U_, dcerpc_info* di _U_, uint8_t *drep _U_, int hf_index _U_, uint32_t param _U_)");
$self->pidl_code("{");
$self->indent;
$self->pidl_code("proto_item *item = NULL;");
$self->pidl_code("proto_tree *tree = NULL;");
$self->pidl_code("int old_offset;");
if (!defined $switch_type) {
- $self->pidl_code("guint32 level = param;");
+ $self->pidl_code("uint32_t level = param;");
} else {
$self->pidl_code("$switch_type level;");
}
@@ -1009,7 +1032,7 @@ sub ProcessInterface($$)
$self->pidl_hdr("#define $define");
$self->pidl_hdr("");
- $self->pidl_def("static gint proto_dcerpc_$x->{NAME} = -1;");
+ $self->pidl_def("static int proto_dcerpc_$x->{NAME};");
$self->register_ett("ett_dcerpc_$x->{NAME}");
$self->register_hf_field("hf_$x->{NAME}_opnum", "Operation", "$x->{NAME}.opnum", "FT_UINT16", "BASE_DEC", "NULL", 0, "");
@@ -1034,7 +1057,7 @@ sub ProcessInterface($$)
my $maj = 0x0000FFFF & $x->{VERSION};
$maj =~ s/\.(.*)$//g;
- $self->pidl_def("static guint16 ver_dcerpc_$x->{NAME} = $maj;");
+ $self->pidl_def("static uint16_t ver_dcerpc_$x->{NAME} = $maj;");
$self->pidl_def("");
}
@@ -1047,7 +1070,7 @@ sub ProcessInterface($$)
my ($type, $desc) = @{$return_types{$x->{NAME}}->{$_}};
my $dt = $self->find_type($type);
$dt or die("Unable to find information about return type `$type'");
- $self->register_hf_field("hf_$x->{NAME}_$_", $desc, "$x->{NAME}.$_", $dt->{FT_TYPE}, "BASE_HEX", $dt->{VALSSTRING}, 0, "");
+ $self->register_hf_field("hf_$x->{NAME}_$_", $desc, "$x->{NAME}.$_", $dt->{FT_TYPE}, $dt->{BASE_TYPE}, $dt->{VALSSTRING}, 0, "");
$self->{hf_used}->{"hf_$x->{NAME}_$_"} = 1;
}
@@ -1105,6 +1128,7 @@ sub Initialize($$)
$self->register_type("uint3264", "offset = dissect_ndr_uint3264(tvb, offset, pinfo, tree, di, drep, \@HF\@, NULL);", "FT_UINT32", "BASE_DEC", 0, "NULL", 8);
$self->register_type("hyper", "offset = dissect_ndr_uint64(tvb, offset, pinfo, tree, di, drep, \@HF\@, NULL);", "FT_UINT64", "BASE_DEC", 0, "NULL", 8);
+ $self->register_type("int64", "offset = dissect_ndr_uint64(tvb, offset, pinfo, tree, di, drep, \@HF\@, NULL);", "FT_INT64", "BASE_DEC", 0, "NULL", 8);
$self->register_type("udlong", "offset = dissect_ndr_duint32(tvb, offset, pinfo, tree, di, drep, \@HF\@, NULL);", "FT_UINT64", "BASE_DEC", 0, "NULL", 4);
$self->register_type("bool8", "offset = PIDL_dissect_uint8(tvb, offset, pinfo, tree, di, drep, \@HF\@, \@PARAM\@);","FT_INT8", "BASE_DEC", 0, "NULL", 1);
$self->register_type("char", "offset = PIDL_dissect_uint8(tvb, offset, pinfo, tree, di, drep, \@HF\@, \@PARAM\@);","FT_INT8", "BASE_DEC", 0, "NULL", 1);
@@ -1113,19 +1137,19 @@ sub Initialize($$)
$self->register_type("GUID", "offset = dissect_ndr_uuid_t(tvb, offset, pinfo, tree, di, drep, \@HF\@, NULL);","FT_GUID", "BASE_NONE", 0, "NULL", 4);
$self->register_type("policy_handle", "offset = PIDL_dissect_policy_hnd(tvb, offset, pinfo, tree, di, drep, \@HF\@, \@PARAM\@);","FT_BYTES", "BASE_NONE", 0, "NULL", 4);
$self->register_type("NTTIME", "offset = dissect_ndr_nt_NTTIME(tvb, offset, pinfo, tree, di, drep, \@HF\@);","FT_ABSOLUTE_TIME", "ABSOLUTE_TIME_LOCAL", 0, "NULL", 4);
- $self->register_type("NTTIME_hyper", "offset = dissect_ndr_nt_NTTIME(tvb, offset, pinfo, tree, di, drep, \@HF\@);","FT_ABSOLUTE_TIME", "ABSOLUTE_TIME_LOCAL", 0, "NULL", 4);
+ $self->register_type("NTTIME_hyper", "offset = dissect_ndr_nt_NTTIME_hyper(tvb, offset, pinfo, tree, di, drep, \@HF\@);","FT_ABSOLUTE_TIME", "ABSOLUTE_TIME_LOCAL", 0, "NULL", 4);
$self->register_type("time_t", "offset = dissect_ndr_time_t(tvb, offset, pinfo,tree, di, drep, \@HF\@, NULL);","FT_ABSOLUTE_TIME", "ABSOLUTE_TIME_LOCAL", 0, "NULL", 4);
- $self->register_type("NTTIME_1sec", "offset = dissect_ndr_nt_NTTIME(tvb, offset, pinfo, tree, di, drep, \@HF\@);", "FT_ABSOLUTE_TIME", "ABSOLUTE_TIME_LOCAL", 0, "NULL", 4);
+ $self->register_type("NTTIME_1sec", "offset = dissect_ndr_nt_NTTIME_1sec(tvb, offset, pinfo, tree, di, drep, \@HF\@);", "FT_ABSOLUTE_TIME", "ABSOLUTE_TIME_LOCAL", 0, "NULL", 4);
$self->register_type("dom_sid28",
"offset = dissect_ndr_nt_SID28(tvb, offset, pinfo, tree, di, drep, \@HF\@);", "FT_STRING", "BASE_NONE", 0, "NULL", 4);
$self->register_type("SID",
"offset = dissect_ndr_nt_SID_with_options(tvb, offset, pinfo, tree, di, drep, param, \@HF\@);","FT_STRING", "BASE_NONE", 0, "NULL", 4);
$self->register_type("WERROR",
- "offset = PIDL_dissect_uint32(tvb, offset, pinfo, tree, di, drep, \@HF\@, \@PARAM\@);","FT_UINT32", "BASE_DEC", 0, "VALS(WERR_errors)", 4);
+ "offset = PIDL_dissect_uint32(tvb, offset, pinfo, tree, di, drep, \@HF\@, \@PARAM\@);","FT_UINT32", "BASE_HEX|BASE_EXT_STRING", 0, "&WERR_errors_ext", 4);
$self->register_type("NTSTATUS",
- "offset = PIDL_dissect_uint32(tvb, offset, pinfo, tree, di, drep, \@HF\@, \@PARAM\@);","FT_UINT32", "BASE_DEC", 0, "VALS(NT_errors)", 4);
+ "offset = PIDL_dissect_uint32(tvb, offset, pinfo, tree, di, drep, \@HF\@, \@PARAM\@);","FT_UINT32", "BASE_HEX|BASE_EXT_STRING", 0, "&NT_errors_ext", 4);
$self->register_type("HRESULT",
- "offset = PIDL_dissect_uint32(tvb, offset, pinfo, tree, di, drep, \@HF\@, \@PARAM\@);","FT_UINT32", "BASE_DEC", 0, "VALS(HRES_errors)", 4);
+ "offset = PIDL_dissect_uint32(tvb, offset, pinfo, tree, di, drep, \@HF\@, \@PARAM\@);","FT_UINT32", "BASE_HEX|BASE_EXT_STRING", 0, "&HRES_errors_ext", 4);
$self->register_type("ipv6address", "proto_tree_add_item(tree, \@HF\@, tvb, offset, 16, ENC_NA); offset += 16;", "FT_IPv6", "BASE_NONE", 0, "NULL", 16);
$self->register_type("ipv4address", "proto_tree_add_item(tree, \@HF\@, tvb, offset, 4, ENC_BIG_ENDIAN); offset += 4;", "FT_IPv4", "BASE_NONE", 0, "NULL", 4);
@@ -1149,7 +1173,7 @@ sub Parse($$$$$)
Pidl is a perl based IDL compiler for DCE/RPC idl files.
It is maintained by the Samba team, not the Wireshark team.
Instructions on how to download and install Pidl can be
- found at https://gitlab.com/wireshark/wireshark/-/wikis/Pidl
+ found at https://wiki.wireshark.org/Pidl
*/
";
@@ -1157,9 +1181,10 @@ sub Parse($$$$$)
$self->{res}->{headers} = "\n";
$self->{res}->{headers} .= "#include \"config.h\"\n";
- $self->{res}->{headers} .= "#include <glib.h>\n";
$self->{res}->{headers} .= "#include <string.h>\n";
- $self->{res}->{headers} .= "#include <epan/packet.h>\n\n";
+ $self->{res}->{headers} .= "#include <wsutil/array.h>\n";
+ $self->{res}->{headers} .= "#include <epan/packet.h>\n";
+ $self->{res}->{headers} .= "#include <epan/tfs.h>\n\n";
$self->{res}->{headers} .= "#include \"packet-dcerpc.h\"\n";
$self->{res}->{headers} .= "#include \"packet-dcerpc-nt.h\"\n";
@@ -1217,7 +1242,7 @@ sub register_ett($$)
sub DumpEttList
{
my ($ett) = @_;
- my $res = "\tstatic gint *ett[] = {\n";
+ my $res = "\tstatic int *ett[] = {\n";
foreach (@$ett) {
$res .= "\t\t&$_,\n";
}
@@ -1230,7 +1255,7 @@ sub DumpEttDeclaration
my ($ett) = @_;
my $res = "\n/* Ett declarations */\n";
foreach (@$ett) {
- $res .= "static gint $_ = -1;\n";
+ $res .= "static int $_;\n";
}
return "$res\n";
@@ -1296,7 +1321,7 @@ sub DumpHfDeclaration($)
foreach (sort(keys %{$self->{conformance}->{header_fields}}))
{
- $res .= "static gint $_ = -1;\n";
+ $res .= "static int $_;\n";
}
return "$res\n";
@@ -1339,7 +1364,7 @@ sub DumpFunctionTable($)
{
my $if = shift;
- my $res = "static dcerpc_sub_dissector $if->{NAME}\_dissectors[] = {\n";
+ my $res = "static const dcerpc_sub_dissector $if->{NAME}\_dissectors[] = {\n";
foreach (@{$if->{FUNCTIONS}}) {
my $fn_name = $_->{NAME};
$fn_name =~ s/^$if->{NAME}_//;
diff --git a/tools/pidl/lib/Parse/Yapp/Driver.pm b/tools/pidl/lib/Parse/Yapp/Driver.pm
deleted file mode 100644
index 3652be06..00000000
--- a/tools/pidl/lib/Parse/Yapp/Driver.pm
+++ /dev/null
@@ -1,471 +0,0 @@
-#
-# Module Parse::Yapp::Driver
-#
-# This module is part of the Parse::Yapp package available on your
-# nearest CPAN
-#
-# Any use of this module in a standalone parser make the included
-# text under the same copyright as the Parse::Yapp module itself.
-#
-# This notice should remain unchanged.
-#
-# (c) Copyright 1998-2001 Francois Desarmenien, all rights reserved.
-# (see the pod text in Parse::Yapp module for use and distribution rights)
-#
-
-package Parse::Yapp::Driver;
-
-require 5.004;
-
-use strict;
-
-use vars qw ( $VERSION $COMPATIBLE $FILENAME );
-
-$VERSION = '1.05';
-$COMPATIBLE = '0.07';
-$FILENAME=__FILE__;
-
-use Carp;
-
-#Known parameters, all starting with YY (leading YY will be discarded)
-my(%params)=(YYLEX => 'CODE', 'YYERROR' => 'CODE', YYVERSION => '',
- YYRULES => 'ARRAY', YYSTATES => 'ARRAY', YYDEBUG => '');
-#Mandatory parameters
-my(@params)=('LEX','RULES','STATES');
-
-sub new {
- my($class)=shift;
- my($errst,$nberr,$token,$value,$check,$dotpos);
- my($self)={ ERROR => \&_Error,
- ERRST => \$errst,
- NBERR => \$nberr,
- TOKEN => \$token,
- VALUE => \$value,
- DOTPOS => \$dotpos,
- STACK => [],
- DEBUG => 0,
- CHECK => \$check };
-
- _CheckParams( [], \%params, \@_, $self );
-
- exists($$self{VERSION})
- and $$self{VERSION} < $COMPATIBLE
- and croak "Yapp driver version $VERSION ".
- "incompatible with version $$self{VERSION}:\n".
- "Please recompile parser module.";
-
- ref($class)
- and $class=ref($class);
-
- bless($self,$class);
-}
-
-sub YYParse {
- my($self)=shift;
- my($retval);
-
- _CheckParams( \@params, \%params, \@_, $self );
-
- if($$self{DEBUG}) {
- _DBLoad();
- $retval = eval '$self->_DBParse()';#Do not create stab entry on compile
- $@ and die $@;
- }
- else {
- $retval = $self->_Parse();
- }
- $retval
-}
-
-sub YYData {
- my($self)=shift;
-
- exists($$self{USER})
- or $$self{USER}={};
-
- $$self{USER};
-
-}
-
-sub YYErrok {
- my($self)=shift;
-
- ${$$self{ERRST}}=0;
- undef;
-}
-
-sub YYNberr {
- my($self)=shift;
-
- ${$$self{NBERR}};
-}
-
-sub YYRecovering {
- my($self)=shift;
-
- ${$$self{ERRST}} != 0;
-}
-
-sub YYAbort {
- my($self)=shift;
-
- ${$$self{CHECK}}='ABORT';
- undef;
-}
-
-sub YYAccept {
- my($self)=shift;
-
- ${$$self{CHECK}}='ACCEPT';
- undef;
-}
-
-sub YYError {
- my($self)=shift;
-
- ${$$self{CHECK}}='ERROR';
- undef;
-}
-
-sub YYSemval {
- my($self)=shift;
- my($index)= $_[0] - ${$$self{DOTPOS}} - 1;
-
- $index < 0
- and -$index <= @{$$self{STACK}}
- and return $$self{STACK}[$index][1];
-
- undef; #Invalid index
-}
-
-sub YYCurtok {
- my($self)=shift;
-
- @_
- and ${$$self{TOKEN}}=$_[0];
- ${$$self{TOKEN}};
-}
-
-sub YYCurval {
- my($self)=shift;
-
- @_
- and ${$$self{VALUE}}=$_[0];
- ${$$self{VALUE}};
-}
-
-sub YYExpect {
- my($self)=shift;
-
- keys %{$self->{STATES}[$self->{STACK}[-1][0]]{ACTIONS}}
-}
-
-sub YYLexer {
- my($self)=shift;
-
- $$self{LEX};
-}
-
-
-#################
-# Private stuff #
-#################
-
-
-sub _CheckParams {
- my($mandatory,$checklist,$inarray,$outhash)=@_;
- my($prm,$value);
- my($prmlst)={};
-
- while(($prm,$value)=splice(@$inarray,0,2)) {
- $prm=uc($prm);
- exists($$checklist{$prm})
- or croak("Unknown parameter '$prm'");
- ref($value) eq $$checklist{$prm}
- or croak("Invalid value for parameter '$prm'");
- $prm=unpack('@2A*',$prm);
- $$outhash{$prm}=$value;
- }
- for (@$mandatory) {
- exists($$outhash{$_})
- or croak("Missing mandatory parameter '".lc($_)."'");
- }
-}
-
-sub _Error {
- print "Parse error.\n";
-}
-
-sub _DBLoad {
- {
- no strict 'refs';
-
- exists(${__PACKAGE__.'::'}{_DBParse})#Already loaded ?
- and return;
- }
- my($fname)=__FILE__;
- my(@drv);
- open(DRV,"<$fname") or die "Report this as a BUG: Cannot open $fname";
- while(<DRV>) {
- /^\s*sub\s+_Parse\s*{\s*$/ .. /^\s*}\s*#\s*_Parse\s*$/
- and do {
- s/^#DBG>//;
- push(@drv,$_);
- }
- }
- close(DRV);
-
- $drv[0]=~s/_P/_DBP/;
- eval join('',@drv);
-}
-
-#Note that for loading debugging version of the driver,
-#this file will be parsed from 'sub _Parse' up to '}#_Parse' inclusive.
-#So, DO NOT remove comment at end of sub !!!
-sub _Parse {
- my($self)=shift;
-
- my($rules,$states,$lex,$error)
- = @$self{ 'RULES', 'STATES', 'LEX', 'ERROR' };
- my($errstatus,$nberror,$token,$value,$stack,$check,$dotpos)
- = @$self{ 'ERRST', 'NBERR', 'TOKEN', 'VALUE', 'STACK', 'CHECK', 'DOTPOS' };
-
-#DBG> my($debug)=$$self{DEBUG};
-#DBG> my($dbgerror)=0;
-
-#DBG> my($ShowCurToken) = sub {
-#DBG> my($tok)='>';
-#DBG> for (split('',$$token)) {
-#DBG> $tok.= (ord($_) < 32 or ord($_) > 126)
-#DBG> ? sprintf('<%02X>',ord($_))
-#DBG> : $_;
-#DBG> }
-#DBG> $tok.='<';
-#DBG> };
-
- $$errstatus=0;
- $$nberror=0;
- ($$token,$$value)=(undef,undef);
- @$stack=( [ 0, undef ] );
- $$check='';
-
- while(1) {
- my($actions,$act,$stateno);
-
- $stateno=$$stack[-1][0];
- $actions=$$states[$stateno];
-
-#DBG> print STDERR ('-' x 40),"\n";
-#DBG> $debug & 0x2
-#DBG> and print STDERR "In state $stateno:\n";
-#DBG> $debug & 0x08
-#DBG> and print STDERR "Stack:[".
-#DBG> join(',',map { $$_[0] } @$stack).
-#DBG> "]\n";
-
-
- if (exists($$actions{ACTIONS})) {
-
- defined($$token)
- or do {
- ($$token,$$value)=&$lex($self);
-#DBG> $debug & 0x01
-#DBG> and print STDERR "Need token. Got ".&$ShowCurToken."\n";
- };
-
- $act= exists($$actions{ACTIONS}{$$token})
- ? $$actions{ACTIONS}{$$token}
- : exists($$actions{DEFAULT})
- ? $$actions{DEFAULT}
- : undef;
- }
- else {
- $act=$$actions{DEFAULT};
-#DBG> $debug & 0x01
-#DBG> and print STDERR "Don't need token.\n";
- }
-
- defined($act)
- and do {
-
- $act > 0
- and do { #shift
-
-#DBG> $debug & 0x04
-#DBG> and print STDERR "Shift and go to state $act.\n";
-
- $$errstatus
- and do {
- --$$errstatus;
-
-#DBG> $debug & 0x10
-#DBG> and $dbgerror
-#DBG> and $$errstatus == 0
-#DBG> and do {
-#DBG> print STDERR "**End of Error recovery.\n";
-#DBG> $dbgerror=0;
-#DBG> };
- };
-
-
- push(@$stack,[ $act, $$value ]);
-
- $$token ne '' #Don't eat the eof
- and $$token=$$value=undef;
- next;
- };
-
- #reduce
- my($lhs,$len,$code,@sempar,$semval);
- ($lhs,$len,$code)=@{$$rules[-$act]};
-
-#DBG> $debug & 0x04
-#DBG> and $act
-#DBG> and print STDERR "Reduce using rule ".-$act." ($lhs,$len): ";
-
- $act
- or $self->YYAccept();
-
- $$dotpos=$len;
-
- unpack('A1',$lhs) eq '@' #In line rule
- and do {
- $lhs =~ /^\@[0-9]+\-([0-9]+)$/
- or die "In line rule name '$lhs' ill formed: ".
- "report it as a BUG.\n";
- $$dotpos = $1;
- };
-
- @sempar = $$dotpos
- ? map { $$_[1] } @$stack[ -$$dotpos .. -1 ]
- : ();
-
- $semval = $code ? &$code( $self, @sempar )
- : @sempar ? $sempar[0] : undef;
-
- splice(@$stack,-$len,$len);
-
- $$check eq 'ACCEPT'
- and do {
-
-#DBG> $debug & 0x04
-#DBG> and print STDERR "Accept.\n";
-
- return($semval);
- };
-
- $$check eq 'ABORT'
- and do {
-
-#DBG> $debug & 0x04
-#DBG> and print STDERR "Abort.\n";
-
- return(undef);
-
- };
-
-#DBG> $debug & 0x04
-#DBG> and print STDERR "Back to state $$stack[-1][0], then ";
-
- $$check eq 'ERROR'
- or do {
-#DBG> $debug & 0x04
-#DBG> and print STDERR
-#DBG> "go to state $$states[$$stack[-1][0]]{GOTOS}{$lhs}.\n";
-
-#DBG> $debug & 0x10
-#DBG> and $dbgerror
-#DBG> and $$errstatus == 0
-#DBG> and do {
-#DBG> print STDERR "**End of Error recovery.\n";
-#DBG> $dbgerror=0;
-#DBG> };
-
- push(@$stack,
- [ $$states[$$stack[-1][0]]{GOTOS}{$lhs}, $semval ]);
- $$check='';
- next;
- };
-
-#DBG> $debug & 0x04
-#DBG> and print STDERR "Forced Error recovery.\n";
-
- $$check='';
-
- };
-
- #Error
- $$errstatus
- or do {
-
- $$errstatus = 1;
- &$error($self);
- $$errstatus # if 0, then YYErrok has been called
- or next; # so continue parsing
-
-#DBG> $debug & 0x10
-#DBG> and do {
-#DBG> print STDERR "**Entering Error recovery.\n";
-#DBG> ++$dbgerror;
-#DBG> };
-
- ++$$nberror;
-
- };
-
- $$errstatus == 3 #The next token is not valid: discard it
- and do {
- $$token eq '' # End of input: no hope
- and do {
-#DBG> $debug & 0x10
-#DBG> and print STDERR "**At eof: aborting.\n";
- return(undef);
- };
-
-#DBG> $debug & 0x10
-#DBG> and print STDERR "**Dicard invalid token ".&$ShowCurToken.".\n";
-
- $$token=$$value=undef;
- };
-
- $$errstatus=3;
-
- while( @$stack
- and ( not exists($$states[$$stack[-1][0]]{ACTIONS})
- or not exists($$states[$$stack[-1][0]]{ACTIONS}{error})
- or $$states[$$stack[-1][0]]{ACTIONS}{error} <= 0)) {
-
-#DBG> $debug & 0x10
-#DBG> and print STDERR "**Pop state $$stack[-1][0].\n";
-
- pop(@$stack);
- }
-
- @$stack
- or do {
-
-#DBG> $debug & 0x10
-#DBG> and print STDERR "**No state left on stack: aborting.\n";
-
- return(undef);
- };
-
- #shift the error token
-
-#DBG> $debug & 0x10
-#DBG> and print STDERR "**Shift \$error token and go to state ".
-#DBG> $$states[$$stack[-1][0]]{ACTIONS}{error}.
-#DBG> ".\n";
-
- push(@$stack, [ $$states[$$stack[-1][0]]{ACTIONS}{error}, undef ]);
-
- }
-
- #never reached
- croak("Error in driver logic. Please, report it as a BUG");
-
-}#_Parse
-#DO NOT remove comment
-
-1;
-
diff --git a/tools/pidl/lib/wscript_build b/tools/pidl/lib/wscript_build
deleted file mode 100644
index 54b3170c..00000000
--- a/tools/pidl/lib/wscript_build
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/env python
-
-# install the pidl modules
-bld.INSTALL_FILES(bld.env.PERL_LIB_INSTALL_DIR,
- '''
- Parse/Pidl.pm
- Parse/Pidl/Samba4.pm
- Parse/Pidl/CUtil.pm
- Parse/Pidl/Expr.pm
- Parse/Pidl/Wireshark/Conformance.pm
- Parse/Pidl/Wireshark/NDR.pm
- Parse/Pidl/ODL.pm
- Parse/Pidl/Dump.pm
- Parse/Pidl/Util.pm
- Parse/Pidl/Samba4/Header.pm
- Parse/Pidl/Samba4/COM/Header.pm
- Parse/Pidl/Samba4/COM/Proxy.pm
- Parse/Pidl/Samba4/COM/Stub.pm
- Parse/Pidl/Samba4/TDR.pm
- Parse/Pidl/Samba4/NDR/Server.pm
- Parse/Pidl/Samba4/NDR/Client.pm
- Parse/Pidl/Samba4/NDR/Parser.pm
- Parse/Pidl/Samba4/Python.pm
- Parse/Pidl/Samba4/Template.pm
- Parse/Pidl/IDL.pm
- Parse/Pidl/Typelist.pm
- Parse/Pidl/Samba3/ClientNDR.pm
- Parse/Pidl/Samba3/ServerNDR.pm
- Parse/Pidl/Compat.pm
- Parse/Pidl/NDR.pm
- ''',
- flat=False)
-
-if not bld.CONFIG_SET('USING_SYSTEM_PARSE_YAPP_DRIVER'):
- bld.INSTALL_FILES(bld.env.PERL_LIB_INSTALL_DIR,
- 'Parse/Yapp/Driver.pm',
- flat=False)
diff --git a/tools/pidl/pidl b/tools/pidl/pidl
index e8e6941f..762824e6 100755
--- a/tools/pidl/pidl
+++ b/tools/pidl/pidl
@@ -382,7 +382,7 @@ usesgetlasterror, vararg, vi_progid, wire_marshal.
=head1 SEE ALSO
L<https://msdn.microsoft.com/en-us/library/windows/desktop/aa373864%28v=vs.85%29.aspx>
-L<https://gitlab.com/wireshark/wireshark/-/wikis/DCE/RPC>,
+L<https://wiki.wireshark.org/DCE/RPC>,
L<https://www.samba.org/>,
L<yapp(1)>
@@ -402,6 +402,7 @@ pidl README by Andrew Tridgell.
use strict;
+use warnings;
use FindBin qw($RealBin $Script);
use lib "$RealBin/lib";
use Getopt::Long;
@@ -474,6 +475,7 @@ my($opt_template) = 0;
my($opt_client);
my($opt_typelib);
my($opt_server);
+my($opt_server_compat);
my($opt_ndr_parser);
my($opt_tdr_parser);
my($opt_ws_parser);
@@ -559,6 +561,7 @@ my $result = GetOptions (
'samba3-template' => \$opt_samba3_template,
'header:s' => \$opt_header,
'server:s' => \$opt_server,
+ 'server-compat:s' => \$opt_server_compat,
'typelib:s' => \$opt_typelib,
'tdr-parser:s' => \$opt_tdr_parser,
'template' => \$opt_template,
@@ -669,6 +672,7 @@ sub process_file($)
if (defined($opt_ws_parser) or
defined($opt_client) or
defined($opt_server) or
+ defined($opt_server_compat) or
defined($opt_header) or
defined($opt_ndr_parser) or
defined($opt_python) or
@@ -712,8 +716,11 @@ sub process_file($)
if (defined($opt_python)) {
require Parse::Pidl::Samba4::Python;
my $generator = new Parse::Pidl::Samba4::Python();
+ if (!defined($opt_client)) {
+ $c_header = undef;
+ }
my ($prsr) = $generator->Parse($basename, $ndr,
- "$outputdir/ndr_$basename\_c.h", $h_filename);
+ $c_header, $h_filename);
FileSave("$outputdir/py_$basename.c", $prsr);
}
@@ -794,6 +801,19 @@ sub process_file($)
FileSave($header, $h_code);
}
+ if (defined($opt_server_compat)) {
+ require Parse::Pidl::Samba4::NDR::ServerCompat;
+
+ my $c_scompat = ($opt_server_compat or "$outputdir/ndr_$basename\_scompat.c");
+ my $h_scompat = $c_scompat;
+ $h_scompat =~ s/\.c$/.h/;
+
+ my $generator = new Parse::Pidl::Samba4::NDR::ServerCompat();
+ my ($source, $header) = $generator->Parse($ndr, $h_scompat, $h_filename);
+
+ FileSave($c_scompat, $source);
+ FileSave($h_scompat, $header);
+ }
}
if (scalar(@ARGV) == 0) {
diff --git a/tools/pidl/tests/Util.pm b/tools/pidl/tests/Util.pm
index 86b521bf..a7b5a63f 100644
--- a/tools/pidl/tests/Util.pm
+++ b/tools/pidl/tests/Util.pm
@@ -9,6 +9,7 @@ require Exporter;
@EXPORT = qw(test_samba4_ndr test_warnings test_errors);
use strict;
+use warnings;
use FindBin qw($RealBin);
use lib "$RealBin/../lib";
diff --git a/tools/pidl/tests/header.pl b/tools/pidl/tests/header.pl
index db594844..dc8bbd7a 100755
--- a/tools/pidl/tests/header.pl
+++ b/tools/pidl/tests/header.pl
@@ -4,7 +4,7 @@
use strict;
use warnings;
-use Test::More tests => 27;
+use Test::More tests => 30;
use FindBin qw($RealBin);
use lib "$RealBin";
use Util;
@@ -23,6 +23,16 @@ sub parse_idl($)
return Parse::Pidl::Samba4::Header::Parse($ndr);
}
+sub load_and_parse_idl($)
+{
+ my $text = shift;
+ my $ndr;
+ my $idl = Parse::Pidl::IDL::parse_string($text, "nofile");
+ Parse::Pidl::Typelist::LoadIdl($idl, "noname");
+ $ndr = Parse::Pidl::NDR::Parse($idl);
+ return Parse::Pidl::Samba4::Header::Parse($ndr);
+}
+
like(parse_idl(""), qr/\/\* header auto-generated by pidl \*\/\n/sm, "includes work");
like(parse_idl("interface x {}"), qr/\/\* header auto-generated by pidl \*\/\n/sm, "simple empty interface doesn't cause overhead");
like(parse_idl("interface p { typedef struct { int y; } x; };"),
@@ -59,6 +69,15 @@ like(parse_idl("interface p { typedef struct x { int p; } x; };"),
like(parse_idl("cpp_quote(\"some-foo\")"),
qr/some-foo/sm, "cpp quote");
+like(load_and_parse_idl("interface hang {typedef [public] struct { wsp_cbasestoragevariant a[SINGLE]; } foo; typedef [public,nodiscriminant,switch_type(uint16)] union { [case(VT_I1)] int8 vt_i1; [case(VT_VARIANT)] foo b; } variant_types; typedef [public] struct { [switch_is(vtype)] variant_types vvalue; } bar;};"),
+ qr/struct foo.*{.*struct wsp_cbasestoragevariant \*a.*struct bar \{.*union variant_types vvalue.*;/sm,"test for hang with nested struct with union");
+
+like(load_and_parse_idl("interface hang { typedef struct { uint32 count; bar a[count];} foo ; typedef struct { foo b; } bar; };"),
+ qr/struct foo.*{.*struct bar \*a;/sm,"test for hang with nested struct");
+
+like(load_and_parse_idl("interface hang { typedef struct { bar a; } foo ; typedef struct { foo b; } bar; };"),
+ qr/struct foo.*{.*struct bar a;/sm,"test for hang with uncompilable nested struct");
+
# Make sure GenerateFunctionInEnv and GenerateFunctionOutEnv work
my $fn = { ELEMENTS => [ { DIRECTION => ["in"], NAME => "foo" } ] };
is_deeply({ "foo" => "r->in.foo" }, GenerateFunctionInEnv($fn));
diff --git a/tools/pidl/tests/ndr.pl b/tools/pidl/tests/ndr.pl
index b6fd4899..8f845452 100755
--- a/tools/pidl/tests/ndr.pl
+++ b/tools/pidl/tests/ndr.pl
@@ -4,7 +4,7 @@
use strict;
use warnings;
-use Test::More tests => 47;
+use Test::More tests => 48;
use FindBin qw($RealBin);
use lib "$RealBin";
use Util;
@@ -480,6 +480,7 @@ $ne = ParseElement($e, undef, 0);
is($ne->{REPRESENTATION_TYPE}, "uint8");
is(align_type("hyper"), 8);
+is(align_type("int64"), 8);
is(align_type("double"), 8);
is(align_type("uint32"), 4);
is(align_type("uint16"), 2);
diff --git a/tools/pidl/tests/ndr_align.pl b/tools/pidl/tests/ndr_align.pl
index cc089eaa..80d61158 100755
--- a/tools/pidl/tests/ndr_align.pl
+++ b/tools/pidl/tests/ndr_align.pl
@@ -2,6 +2,7 @@
# NDR alignment tests
# (C) 2005 Jelmer Vernooij. Published under the GNU GPL
use strict;
+use warnings;
use Test::More tests => 5 * 8;
use FindBin qw($RealBin);
@@ -16,7 +17,7 @@ test_samba4_ndr('align-uint8-uint16',
} bla;
',
'
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct bla r;
uint8_t expected[] = { 0x0D, 0x00, 0xef, 0xbe };
DATA_BLOB expected_blob = { expected, 4 };
@@ -41,7 +42,7 @@ test_samba4_ndr('align-uint8-uint32',
} bla;
',
'
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct bla r;
uint8_t expected[] = { 0x0D, 0x00, 0x00, 0x00, 0xef, 0xbe, 0xef, 0xbe };
DATA_BLOB expected_blob = { expected, 8 };
@@ -67,7 +68,7 @@ test_samba4_ndr('align-uint8-hyper',
} bla;
',
'
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct bla r;
uint8_t expected[] = { 0x0D, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0xef, 0xbe, 0xef, 0xbe, 0xef, 0xbe, 0xef, 0xbe };
@@ -93,7 +94,7 @@ test_samba4_ndr('noalignflag-uint8-uint16',
} bla;
',
'
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct bla r;
uint8_t expected[] = { 0x0D, 0xef, 0xbe };
DATA_BLOB expected_blob = { expected, 3 };
@@ -121,7 +122,7 @@ test_samba4_ndr('align-blob-align2',
} blie;
',
'
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct blie r;
uint8_t data[] = { 0x01, 0x02 };
uint8_t expected[] = { 0x0D, 0x00, 0x0E };
diff --git a/tools/pidl/tests/ndr_alloc.pl b/tools/pidl/tests/ndr_alloc.pl
index 399fbd21..c708c3b5 100755
--- a/tools/pidl/tests/ndr_alloc.pl
+++ b/tools/pidl/tests/ndr_alloc.pl
@@ -2,6 +2,7 @@
# NDR allocation tests
# (C) 2005 Jelmer Vernooij. Published under the GNU GPL
use strict;
+use warnings;
use Test::More tests => 5 * 8;
use FindBin qw($RealBin);
diff --git a/tools/pidl/tests/ndr_array.pl b/tools/pidl/tests/ndr_array.pl
index 2a6b5bbd..46ab83ec 100755
--- a/tools/pidl/tests/ndr_array.pl
+++ b/tools/pidl/tests/ndr_array.pl
@@ -3,6 +3,7 @@
# (C) 2005 Jelmer Vernooij <jelmer@samba.org>
# Published under the GNU General Public License
use strict;
+use warnings;
use Test::More tests => 8;
use FindBin qw($RealBin);
diff --git a/tools/pidl/tests/ndr_compat.pl b/tools/pidl/tests/ndr_compat.pl
index 355e7f67..06f7efb4 100755
--- a/tools/pidl/tests/ndr_compat.pl
+++ b/tools/pidl/tests/ndr_compat.pl
@@ -2,6 +2,7 @@
# (C) 2007 Jelmer Vernooij <jelmer@samba.org>
# Published under the GNU General Public License
use strict;
+use warnings;
use Test::More tests => 2;
use FindBin qw($RealBin);
diff --git a/tools/pidl/tests/ndr_fullptr.pl b/tools/pidl/tests/ndr_fullptr.pl
index cc6fca7a..109b368e 100755
--- a/tools/pidl/tests/ndr_fullptr.pl
+++ b/tools/pidl/tests/ndr_fullptr.pl
@@ -3,6 +3,7 @@
# (C) 2006 Jelmer Vernooij <jelmer@samba.org>.
# Published under the GNU General Public License.
use strict;
+use warnings;
use Test::More tests => 1 * 8;
use FindBin qw($RealBin);
@@ -17,7 +18,7 @@ test_samba4_ndr("fullptr-push-dup",
[public] uint16 echo_TestFull([in,ptr] uint32 *x, [in,ptr] uint32 *y);
',
'
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
uint32_t v = 13;
struct echo_TestFull r;
r.in.x = &v;
diff --git a/tools/pidl/tests/ndr_refptr.pl b/tools/pidl/tests/ndr_refptr.pl
index d5dd8395..94676a80 100755
--- a/tools/pidl/tests/ndr_refptr.pl
+++ b/tools/pidl/tests/ndr_refptr.pl
@@ -4,6 +4,7 @@
# (C) 2005 Jelmer Vernooij <jelmer@samba.org>.
# Published under the GNU General Public License.
use strict;
+use warnings;
use Test::More tests => 22 * 8;
use FindBin qw($RealBin);
@@ -18,7 +19,7 @@ test_samba4_ndr("noptr-push",
[public] uint16 echo_TestRef([in] xstruct foo);
',
'
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
uint16_t v = 13;
struct echo_TestRef r;
r.in.foo.x = v;
@@ -48,7 +49,7 @@ test_samba4_ndr("ptr-embedded-push",
',
'
uint16_t v = 13;
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct echo_TestRef r;
r.in.foo.x = &v;
@@ -74,7 +75,7 @@ test_samba4_ndr("ptr-embedded-push-null",
[public] uint16 echo_TestRef([in] xstruct foo);
',
'
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct echo_TestRef r;
r.in.foo.x = NULL;
@@ -99,7 +100,7 @@ test_samba4_ndr("refptr-embedded-push",
',
'
uint16_t v = 13;
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct echo_TestRef r;
r.in.foo.x = &v;
@@ -126,7 +127,7 @@ test_samba4_ndr("refptr-embedded-push-null",
[public] uint16 echo_TestRef([in] xstruct foo);
',
'
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct echo_TestRef r;
r.in.foo.x = NULL;
@@ -144,7 +145,7 @@ test_samba4_ndr("ptr-top-push",
[public] uint16 echo_TestRef([in] xstruct *foo);
',
'
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct echo_TestRef r;
struct xstruct s;
s.x = 13;
@@ -169,7 +170,7 @@ test_samba4_ndr("ptr-top-push-null",
[public] uint16 echo_TestRef([in] xstruct *foo);
',
'
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct echo_TestRef r;
r.in.foo = NULL;
@@ -189,7 +190,7 @@ test_samba4_ndr("refptr-top-push",
[public] uint16 echo_TestRef([in,ref] xstruct *foo);
',
'
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct echo_TestRef r;
struct xstruct s;
s.x = 13;
@@ -214,7 +215,7 @@ test_samba4_ndr("refptr-top-push-null",
[public] uint16 echo_TestRef([in,ref] xstruct *foo);
',
'
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct echo_TestRef r;
r.in.foo = NULL;
@@ -233,7 +234,7 @@ test_samba4_ndr("uniqueptr-top-push",
[public] uint16 echo_TestRef([in,unique] xstruct *foo);
',
'
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct echo_TestRef r;
struct xstruct s;
s.x = 13;
@@ -261,7 +262,7 @@ test_samba4_ndr("uniqueptr-top-push-null",
[public] uint16 echo_TestRef([in,unique] xstruct *foo);
',
'
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct echo_TestRef r;
r.in.foo = NULL;
@@ -381,7 +382,7 @@ test_samba4_ndr("ptr-top-push-double",
'
[public] void echo_TestRef([in] uint16 **foo);
',
-' struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+' struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct echo_TestRef r;
uint16_t v = 13;
uint16_t *pv = &v;
@@ -408,7 +409,7 @@ test_samba4_ndr("ptr-top-push-double-sndnull",
'
[public] void echo_TestRef([in] uint16 **foo);
',
-' struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+' struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct echo_TestRef r;
uint16_t *pv = NULL;
r.in.foo = &pv;
@@ -429,7 +430,7 @@ test_samba4_ndr("ptr-top-push-double-fstnull",
'
[public] void echo_TestRef([in] uint16 **foo);
',
-' struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+' struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct echo_TestRef r;
r.in.foo = NULL;
@@ -445,7 +446,7 @@ test_samba4_ndr("refptr-top-push-double",
'
[public] void echo_TestRef([in,ref] uint16 **foo);
',
-' struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+' struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct echo_TestRef r;
uint16_t v = 13;
uint16_t *pv = &v;
@@ -473,7 +474,7 @@ test_samba4_ndr("refptr-top-push-double-sndnull",
'
[public] void echo_TestRef([in,ref] uint16 **foo);
',
-' struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+' struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct echo_TestRef r;
uint16_t *pv = NULL;
r.in.foo = &pv;
@@ -494,7 +495,7 @@ test_samba4_ndr("refptr-top-push-double-fstnull",
'
[public] void echo_TestRef([in,ref] uint16 **foo);
',
-' struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+' struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct echo_TestRef r;
r.in.foo = NULL;
@@ -511,7 +512,7 @@ test_samba4_ndr("ignore-ptr",
'
[public] void echo_TestRef([in,ignore] uint16 *foo, [in] uint16 *bar);
',
-' struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+' struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct echo_TestRef r;
uint16_t v = 10;
r.in.foo = &v;
diff --git a/tools/pidl/tests/ndr_represent.pl b/tools/pidl/tests/ndr_represent.pl
index 2d65fb92..af9a92b6 100755
--- a/tools/pidl/tests/ndr_represent.pl
+++ b/tools/pidl/tests/ndr_represent.pl
@@ -2,6 +2,7 @@
# NDR represent_as() / transmit_as() tests
# (C) 2006 Jelmer Vernooij. Published under the GNU GPL
use strict;
+use warnings;
use Test::More tests => 2 * 8;
use FindBin qw($RealBin);
diff --git a/tools/pidl/tests/ndr_simple.pl b/tools/pidl/tests/ndr_simple.pl
index 15e07d56..c5c32445 100755
--- a/tools/pidl/tests/ndr_simple.pl
+++ b/tools/pidl/tests/ndr_simple.pl
@@ -3,6 +3,7 @@
# (C) 2005 Jelmer Vernooij <jelmer@samba.org>
# Published under the GNU General Public License
use strict;
+use warnings;
use Test::More tests => 8;
use FindBin qw($RealBin);
diff --git a/tools/pidl/tests/ndr_string.pl b/tools/pidl/tests/ndr_string.pl
index 8e8b8ecb..aa5fd8b5 100755
--- a/tools/pidl/tests/ndr_string.pl
+++ b/tools/pidl/tests/ndr_string.pl
@@ -3,6 +3,7 @@
# (C) 2005 Jelmer Vernooij <jelmer@samba.org>
# Published under the GNU General Public License
use strict;
+use warnings;
use Test::More tests => 6 * 8;
use FindBin qw($RealBin);
diff --git a/tools/pidl/tests/ndr_tagtype.pl b/tools/pidl/tests/ndr_tagtype.pl
index 3f9b717b..fa09cf77 100755
--- a/tools/pidl/tests/ndr_tagtype.pl
+++ b/tools/pidl/tests/ndr_tagtype.pl
@@ -2,7 +2,7 @@
# Support for tagged types
# (C) 2005 Jelmer Vernooij. Published under the GNU GPL
use strict;
-
+use warnings;
use Test::More tests => 3 * 8;
use FindBin qw($RealBin);
use lib "$RealBin";
@@ -10,7 +10,7 @@ use Util qw(test_samba4_ndr);
test_samba4_ndr('struct-notypedef', '[public] struct bla { uint8 x; }; ',
'
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct bla r;
uint8_t expected[] = { 0x0D };
DATA_BLOB expected_blob = { expected, 1 };
@@ -29,7 +29,7 @@ test_samba4_ndr('struct-notypedef', '[public] struct bla { uint8 x; }; ',
test_samba4_ndr('struct-notypedef-used', '[public] struct bla { uint8 x; };
[public] void myfn([in] struct bla r); ',
'
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct myfn fn;
uint8_t expected[] = { 0x0D };
DATA_BLOB expected_blob = { expected, 1 };
@@ -49,7 +49,7 @@ test_samba4_ndr('struct-notypedef-used', '[public] struct bla { uint8 x; };
test_samba4_ndr('struct-notypedef-embedded', 'struct bla { uint8 x; };
[public] struct myst { struct bla r; }; ',
'
- struct ndr_push *ndr = ndr_push_init_ctx(NULL, NULL);
+ struct ndr_push *ndr = ndr_push_init_ctx(NULL);
struct myst st;
uint8_t expected[] = { 0x0D };
DATA_BLOB expected_blob = { expected, 1 };
diff --git a/tools/pidl/tests/parse_idl.pl b/tools/pidl/tests/parse_idl.pl
index 14138a37..caf701b3 100755
--- a/tools/pidl/tests/parse_idl.pl
+++ b/tools/pidl/tests/parse_idl.pl
@@ -3,7 +3,7 @@
# (C) 2005 Jelmer Vernooij <jelmer@samba.org>
# Published under the GNU General Public License
use strict;
-
+use warnings;
use Test::More tests => 65 * 2 + 7;
use FindBin qw($RealBin);
use lib "$RealBin";
diff --git a/tools/pidl/tests/samba-ndr.pl b/tools/pidl/tests/samba-ndr.pl
index 7c53cbc7..54a4e46d 100755
--- a/tools/pidl/tests/samba-ndr.pl
+++ b/tools/pidl/tests/samba-ndr.pl
@@ -8,7 +8,6 @@ use Test::More tests => 31;
use FindBin qw($RealBin);
use lib "$RealBin";
use Util;
-use strict;
use Parse::Pidl::Util qw(MyDumper);
use Parse::Pidl::Samba4::NDR::Parser qw(check_null_pointer
NeededFunction NeededElement NeededType
diff --git a/tools/pidl/tests/samba3-cli.pl b/tools/pidl/tests/samba3-cli.pl
index c758ef45..4349abb3 100755
--- a/tools/pidl/tests/samba3-cli.pl
+++ b/tools/pidl/tests/samba3-cli.pl
@@ -9,7 +9,7 @@ use FindBin qw($RealBin);
use lib "$RealBin";
use Util;
use Parse::Pidl::Util qw(MyDumper);
-use Parse::Pidl::Samba3::ClientNDR qw(ParseFunction);
+use Parse::Pidl::Samba3::ClientNDR;
use Parse::Pidl::Samba4::Header qw(GenerateFunctionInEnv GenerateFunctionOutEnv);
# Make sure GenerateFunctionInEnv and GenerateFunctionOutEnv work
diff --git a/tools/pidl/tests/tdr.pl b/tools/pidl/tests/tdr.pl
index d6cd7a03..41a560c8 100755
--- a/tools/pidl/tests/tdr.pl
+++ b/tools/pidl/tests/tdr.pl
@@ -13,18 +13,18 @@ use Parse::Pidl::Samba4::TDR qw(ParserType);
my $tdr = new Parse::Pidl::Samba4::TDR();
$tdr->ParserType({TYPE => "STRUCT", NAME => "foo", PROPERTIES => {public => 1}}, "pull");
-is($tdr->{ret}, "NTSTATUS tdr_pull_foo (struct tdr_pull *tdr, TALLOC_CTX *mem_ctx, struct foo *v)
+is($tdr->{res}, "NTSTATUS tdr_pull_foo (struct tdr_pull *tdr, TALLOC_CTX *mem_ctx, struct foo *v)
{
return NT_STATUS_OK;
}
");
-is($tdr->{ret_hdr}, "NTSTATUS tdr_pull_foo (struct tdr_pull *tdr, TALLOC_CTX *mem_ctx, struct foo *v);\n");
+is($tdr->{res_hdr}, "NTSTATUS tdr_pull_foo (struct tdr_pull *tdr, TALLOC_CTX *mem_ctx, struct foo *v);\n");
$tdr = new Parse::Pidl::Samba4::TDR();
$tdr->ParserType({TYPE => "UNION", NAME => "bar", PROPERTIES => {public => 1}}, "pull");
-is($tdr->{ret}, "NTSTATUS tdr_pull_bar(struct tdr_pull *tdr, TALLOC_CTX *mem_ctx, int level, union bar *v)
+is($tdr->{res}, "NTSTATUS tdr_pull_bar(struct tdr_pull *tdr, TALLOC_CTX *mem_ctx, int level, union bar *v)
{
switch (level) {
}
@@ -33,11 +33,11 @@ is($tdr->{ret}, "NTSTATUS tdr_pull_bar(struct tdr_pull *tdr, TALLOC_CTX *mem_ctx
}
");
-is($tdr->{ret_hdr}, "NTSTATUS tdr_pull_bar(struct tdr_pull *tdr, TALLOC_CTX *mem_ctx, int level, union bar *v);\n");
+is($tdr->{res_hdr}, "NTSTATUS tdr_pull_bar(struct tdr_pull *tdr, TALLOC_CTX *mem_ctx, int level, union bar *v);\n");
$tdr = new Parse::Pidl::Samba4::TDR();
$tdr->ParserType({TYPE => "UNION", NAME => "bar", PROPERTIES => {}}, "pull");
-is($tdr->{ret}, "static NTSTATUS tdr_pull_bar(struct tdr_pull *tdr, TALLOC_CTX *mem_ctx, int level, union bar *v)
+is($tdr->{res}, "static NTSTATUS tdr_pull_bar(struct tdr_pull *tdr, TALLOC_CTX *mem_ctx, int level, union bar *v)
{
switch (level) {
}
@@ -46,4 +46,4 @@ is($tdr->{ret}, "static NTSTATUS tdr_pull_bar(struct tdr_pull *tdr, TALLOC_CTX *
}
");
-is($tdr->{ret_hdr}, "");
+is($tdr->{res_hdr}, "");
diff --git a/tools/pidl/tests/test_util.pl b/tools/pidl/tests/test_util.pl
index 2d59f628..93addcea 100755
--- a/tools/pidl/tests/test_util.pl
+++ b/tools/pidl/tests/test_util.pl
@@ -2,6 +2,7 @@
# (C) 2007 Jelmer Vernooij <jelmer@samba.org>
# Published under the GNU General Public License
use strict;
+use warnings;
use Test::More tests => 6;
use FindBin qw($RealBin);
diff --git a/tools/pidl/tests/typelist.pl b/tools/pidl/tests/typelist.pl
index 681c0eac..e012c806 100755
--- a/tools/pidl/tests/typelist.pl
+++ b/tools/pidl/tests/typelist.pl
@@ -4,7 +4,7 @@
use strict;
use warnings;
-use Test::More tests => 56;
+use Test::More tests => 58;
use FindBin qw($RealBin);
use lib "$RealBin";
use Util;
@@ -21,6 +21,7 @@ is("int32", expandAlias("int32"));
is("uint32_t", mapScalarType("uint32"));
is("void", mapScalarType("void"));
is("uint64_t", mapScalarType("hyper"));
+is("int64_t", mapScalarType("int64"));
is("double", mapScalarType("double"));
my $x = { TYPE => "ENUM", NAME => "foo", EXTRADATA => 1 };
@@ -66,6 +67,7 @@ is(1, is_scalar({TYPE => "TYPEDEF", DATA => {TYPE => "ENUM" }}));
is(1, is_scalar("mytypedef"));
is(1, scalar_is_reference("string"));
+is(1, scalar_is_reference("u16string"));
is(0, scalar_is_reference("uint32"));
is(0, scalar_is_reference({TYPE => "STRUCT", NAME => "echo_foobar"}));
diff --git a/tools/pidl/tests/wireshark-ndr.pl b/tools/pidl/tests/wireshark-ndr.pl
index 229315b0..0edb0ec2 100755
--- a/tools/pidl/tests/wireshark-ndr.pl
+++ b/tools/pidl/tests/wireshark-ndr.pl
@@ -45,7 +45,7 @@ is($x->{res}->{code}, 'void proto_reg_handoff_dcerpc_bla(void)
is($x->{hf_used}->{hf_bla_opnum}, 1);
$x->{conformance} = {};
-is("hf_bla_idx",
+is("hf_bla_idx",
$x->register_hf_field("hf_bla_idx", "bla", "my.filter", "FT_UINT32", "BASE_HEX", "NULL", 0xF, undef));
is_deeply($x->{conformance}, {
header_fields => {
@@ -68,7 +68,7 @@ $x->{conformance} = { fielddescription => { hf_bla_idx => { DESCRIPTION => "Some
is("hf_bla_idx",
$x->register_hf_field("hf_bla_idx", "bla", "my.filter", "FT_UINT32", "BASE_HEX", "NULL", 0xF, undef));
is_deeply($x->{conformance}, {
- fielddescription => {
+ fielddescription => {
hf_bla_idx => {
DESCRIPTION => "Some Description",
USED => 1
@@ -91,10 +91,10 @@ is_deeply($x->{conformance}, {
$x->{conformance} = { fielddescription => { hf_bla_idx => { DESCRIPTION => "Some Description" }}};
is("hf_bla_idx",
- $x->register_hf_field("hf_bla_idx", "bla", "my.filter", "FT_UINT32", "BASE_HEX", "NULL", 0xF,
+ $x->register_hf_field("hf_bla_idx", "bla", "my.filter", "FT_UINT32", "BASE_HEX", "NULL", 0xF,
"Actual Description"));
is_deeply($x->{conformance}, {
- fielddescription => {
+ fielddescription => {
hf_bla_idx => { DESCRIPTION => "Some Description" }
},
header_fields => {
@@ -120,14 +120,14 @@ is_deeply($x->{conformance}, {
hf_renames => { hf_bla_idx => { USED => 1, NEWNAME => "hf_bloe_idx" } } });
$x->{hf_used} = { hf_bla => 1 };
-test_warnings("", sub {
+test_warnings("", sub {
$x->CheckUsed({ header_fields => { foo => { INDEX => "hf_bla" }}})});
$x->{hf_used} = { };
-test_warnings("hf field `hf_bla' not used\n", sub {
+test_warnings("hf field `hf_bla' not used\n", sub {
$x->CheckUsed({ header_fields => { foo => { INDEX => "hf_bla" }}})});
-test_warnings("hf field `hf_id' not used\n",
+test_warnings("hf field `hf_id' not used\n",
sub { $x->CheckUsed({
hf_renames => {
hf_id => {
@@ -171,7 +171,7 @@ test_warnings("nofile:1: type never used\n",
types => {
bla => {
USED => 0,
- POS => { FILE => "nofile", LINE => 1 }
+ POS => { FILE => "nofile", LINE => 1 }
}
}
}); } );
@@ -191,39 +191,39 @@ is($x->{res}->{hdr}, "#include \"packet-dcerpc-bla.h\"\n\n");
$x = new Parse::Pidl::Wireshark::NDR();
$x->ProcessImport("\"bla.idl\"", "\"foo.idl\"");
-is($x->{res}->{hdr}, "#include \"packet-dcerpc-bla.h\"\n" .
+is($x->{res}->{hdr}, "#include \"packet-dcerpc-bla.h\"\n" .
"#include \"packet-dcerpc-foo.h\"\n\n");
$x = new Parse::Pidl::Wireshark::NDR();
$x->ProcessInclude("foo.h", "bla.h", "bar.h");
-is($x->{res}->{hdr}, "#include \"foo.h\"\n" .
- "#include \"bla.h\"\n" .
+is($x->{res}->{hdr}, "#include \"foo.h\"\n" .
+ "#include \"bla.h\"\n" .
"#include \"bar.h\"\n\n");
-
+
$x->{conformance} = {types => { bla => "brainslug" } };
is("brainslug", $x->find_type("bla"));
-is(DumpEttList(["ett_t1", "ett_bla"]),
- "\tstatic gint *ett[] = {\n" .
+is(DumpEttList(["ett_t1", "ett_bla"]),
+ "\tstatic int *ett[] = {\n" .
"\t\t&ett_t1,\n" .
"\t\t&ett_bla,\n" .
"\t};\n");
-is(DumpEttList(), "\tstatic gint *ett[] = {\n\t};\n");
-is(DumpEttList(["bla"]), "\tstatic gint *ett[] = {\n\t\t&bla,\n\t};\n");
+is(DumpEttList(), "\tstatic int *ett[] = {\n\t};\n");
+is(DumpEttList(["bla"]), "\tstatic int *ett[] = {\n\t\t&bla,\n\t};\n");
-is(DumpEttDeclaration(["void", "zoid"]),
- "\n/* Ett declarations */\n" .
- "static gint void = -1;\n" .
- "static gint zoid = -1;\n" .
+is(DumpEttDeclaration(["void", "zoid"]),
+ "\n/* Ett declarations */\n" .
+ "static int void;\n" .
+ "static int zoid;\n" .
"\n");
is(DumpEttDeclaration(), "\n/* Ett declarations */\n\n");
$x->{conformance} = {
header_fields => {
- hf_bla => { INDEX => "hf_bla", NAME => "Bla", FILTER => "bla.field", FT_TYPE => "FT_UINT32", BASE_TYPE => "BASE_DEC", VALSSTRING => "NULL", MASK => 0xFF, BLURB => "NULL" }
- }
+ hf_bla => { INDEX => "hf_bla", NAME => "Bla", FILTER => "bla.field", FT_TYPE => "FT_UINT32", BASE_TYPE => "BASE_DEC", VALSSTRING => "NULL", MASK => 0xFF, BLURB => "NULL" }
+ }
};
is($x->DumpHfList(), "\tstatic hf_register_info hf[] = {
@@ -234,14 +234,14 @@ is($x->DumpHfList(), "\tstatic hf_register_info hf[] = {
is($x->DumpHfDeclaration(), "
/* Header field declarations */
-static gint hf_bla = -1;
+static int hf_bla;
");
is(DumpFunctionTable({
NAME => "someif",
FUNCTIONS => [ { NAME => "fn1", OPNUM => 3 }, { NAME => "someif_fn2", OPNUM => 2 } ] }),
-'static dcerpc_sub_dissector someif_dissectors[] = {
+'static const dcerpc_sub_dissector someif_dissectors[] = {
{ 3, "fn1",
someif_dissect_fn1_request, someif_dissect_fn1_response},
{ 2, "fn2",
diff --git a/tools/pidl/wscript b/tools/pidl/wscript
index f4ff9028..01f8f5e4 100644
--- a/tools/pidl/wscript
+++ b/tools/pidl/wscript
@@ -1,77 +1,36 @@
#!/usr/bin/env python
-import os, Logs
-from samba_utils import MODE_755
+from waflib import Logs, Errors
# This function checks if a perl module is installed on the system.
def check_system_perl_module(conf, module, version=None):
- bundle_name = module.replace('::', '_')
module_check = module
# Create module string with version
if version:
module_check = module + ' ' + str(version)
- # Check if we have to bundle it.
- if conf.LIB_MUST_BE_BUNDLED(bundle_name.lower()):
- return False
-
# Check for system perl module
- if not conf.check_perl_module(module_check):
+ if conf.check_perl_module(module_check) is None:
return False
- conf.define('USING_SYSTEM_%s' % bundle_name.upper(), 1)
-
return True
-def set_options(opt):
+def options(opt):
return
def configure(conf):
# Check if perl(Parse::Yapp::Driver) is available.
- check_system_perl_module(conf, "Parse::Yapp::Driver", 1.05)
-
- # we need a recent version of MakeMaker to get the right man page names
- if conf.CHECK_PERL_MANPAGE():
- conf.env.PERLMAN1EXT = conf.CHECK_PERL_MANPAGE(section='1')
- conf.env.PERLMAN3EXT = conf.CHECK_PERL_MANPAGE(section='3')
- conf.DEFINE('HAVE_PERL_MAKEMAKER', 1)
+ if not check_system_perl_module(conf,
+ "Parse::Yapp::Driver",
+ 1.05):
+ raise Errors.WafError('perl module "Parse::Yapp::Driver" not found')
# yapp is used for building the parser
- conf.find_program('yapp', var='YAPP')
- conf.find_program('pod2man', var='POD2MAN')
+ if not conf.find_program('yapp', var='YAPP'):
+ raise Errors.WafError('yapp not found')
def build(bld):
- bld.INSTALL_FILES('${BINDIR}', 'pidl', chmod=MODE_755, perl_fixup=True)
-
- bld.RECURSE('lib')
-
- if not bld.CONFIG_SET('HAVE_PERL_MAKEMAKER'):
- return
-
- pidl_manpages = {
- 'pidl': 'man1/pidl.${PERLMAN1EXT}',
- 'lib/Parse/Pidl/NDR.pm': 'man3/Parse::Pidl::NDR.${PERLMAN3EXT}',
- 'lib/Parse/Pidl/Wireshark/Conformance.pm': 'man3/Parse::Pidl::Wireshark::Conformance.${PERLMAN3EXT}',
- 'lib/Parse/Pidl/Dump.pm': 'man3/Parse::Pidl::Dump.${PERLMAN3EXT}',
- 'lib/Parse/Pidl/Util.pm': 'man3/Parse::Pidl::Util.${PERLMAN3EXT}',
- 'lib/Parse/Pidl/Wireshark/NDR.pm': 'man3/Parse::Pidl::Wireshark::NDR.${PERLMAN3EXT}'
- }
-
- for k, v in pidl_manpages.iteritems():
- pidl_manpages[k] = bld.EXPAND_VARIABLES(v)
-
- # use perl to build the manpages
- bld.env.pidl_srcdir = os.path.join(bld.srcnode.abspath(), 'pidl')
-
- bld.SET_BUILD_GROUP('final')
- if 'POD2MAN' in bld.env and bld.env['POD2MAN'] != '':
- for src, manpage in pidl_manpages.iteritems():
- bld(rule='${POD2MAN} -c "Samba Documentation" ${SRC} ${TGT}',
- shell=True,
- source=src,
- install_path=os.path.dirname(bld.EXPAND_VARIABLES('${MANDIR}/'+manpage)),
- target=os.path.basename(manpage))
# we want to prefer the git version of the parsers if we can.
# Only if the source has changed do we want to re-run yapp
@@ -95,9 +54,8 @@ $ git add lib/Parse/Pidl/IDL.pm lib/Parse/Pidl/Expr.pm
$ git commit
$ cd -
-If your 100% sure you haven't changed idl.yp and expr.yp
+If you're 100% sure you haven't changed idl.yp and expr.yp
try this to avoid this message:
$ touch ../pidl/lib/Parse/Pidl/IDL.pm ../pidl/lib/Parse/Pidl/Expr.pm
''')
-
diff --git a/tools/pre-commit-ignore.py b/tools/pre-commit-ignore.py
index 63ecf3e8..d0699034 100755
--- a/tools/pre-commit-ignore.py
+++ b/tools/pre-commit-ignore.py
@@ -7,7 +7,6 @@
# SPDX-License-Identifier: GPL-2.0-or-later
import sys
-import os
import fnmatch
IGNORE_CONF = "pre-commit-ignore.conf"
@@ -29,8 +28,8 @@ def load_checkignore(path):
patterns = f.read()
except OSError as err:
sys.exit(str(err))
- ign = [l.strip() for l in patterns.splitlines()]
- ign = [l for l in ign if l and not l.startswith("#")]
+ ign = [line.strip() for line in patterns.splitlines()]
+ ign = [line for line in ign if line and not line.startswith("#")]
return ign
ignore_list = load_checkignore(ignore_path)
diff --git a/tools/process-x11-fields.pl b/tools/process-x11-fields.pl
index 9b66a8d5..8aedef48 100755
--- a/tools/process-x11-fields.pl
+++ b/tools/process-x11-fields.pl
@@ -144,7 +144,7 @@ while(<>) {
$variable =~ s/-/_/go;
$variable =~ s/\./_/go;
- print DECL "static int hf_x11_$variable = -1;\n";
+ print DECL "static int hf_x11_$variable;\n";
print REG <<END;
{ &hf_x11_$variable, { "$abbrev", "x11.$field", FT_$type, $fieldDisplay, $fieldStrings, $mask, $longName, HFILL }},
diff --git a/tools/process-x11-xcb.pl b/tools/process-x11-xcb.pl
index 91dcf427..b3cec07d 100755
--- a/tools/process-x11-xcb.pl
+++ b/tools/process-x11-xcb.pl
@@ -44,20 +44,20 @@ my @register;
my $script_name = File::Spec->abs2rel ($0, $srcdir);
my %basictype = (
- char => { size => 1, encoding => 'ENC_ASCII|ENC_NA', type => 'FT_STRING', base => 'BASE_NONE', get => 'tvb_get_guint8', list => 'listOfByte', },
- void => { size => 1, encoding => 'ENC_NA', type => 'FT_BYTES', base => 'BASE_NONE', get => 'tvb_get_guint8', list => 'listOfByte', },
- BYTE => { size => 1, encoding => 'ENC_NA', type => 'FT_BYTES', base => 'BASE_NONE', get => 'tvb_get_guint8', list => 'listOfByte', },
- CARD8 => { size => 1, encoding => 'byte_order', type => 'FT_UINT8', base => 'BASE_HEX_DEC', get => 'tvb_get_guint8', list => 'listOfByte', },
- CARD16 => { size => 2, encoding => 'byte_order', type => 'FT_UINT16', base => 'BASE_HEX_DEC', get => 'tvb_get_guint16', list => 'listOfCard16', },
- CARD32 => { size => 4, encoding => 'byte_order', type => 'FT_UINT32', base => 'BASE_HEX_DEC', get => 'tvb_get_guint32', list => 'listOfCard32', },
- CARD64 => { size => 8, encoding => 'byte_order', type => 'FT_UINT64', base => 'BASE_HEX_DEC', get => 'tvb_get_guint64', list => 'listOfCard64', },
- INT8 => { size => 1, encoding => 'byte_order', type => 'FT_INT8', base => 'BASE_DEC', get => 'tvb_get_guint8', list => 'listOfByte', },
- INT16 => { size => 2, encoding => 'byte_order', type => 'FT_INT16', base => 'BASE_DEC', get => 'tvb_get_guint16', list => 'listOfInt16', },
- INT32 => { size => 4, encoding => 'byte_order', type => 'FT_INT32', base => 'BASE_DEC', get => 'tvb_get_guint32', list => 'listOfInt32', },
- INT64 => { size => 8, encoding => 'byte_order', type => 'FT_INT64', base => 'BASE_DEC', get => 'tvb_get_guint64', list => 'listOfInt64', },
+ char => { size => 1, encoding => 'ENC_ASCII|ENC_NA', type => 'FT_STRING', base => 'BASE_NONE', get => 'tvb_get_uint8', list => 'listOfByte', },
+ void => { size => 1, encoding => 'ENC_NA', type => 'FT_BYTES', base => 'BASE_NONE', get => 'tvb_get_uint8', list => 'listOfByte', },
+ BYTE => { size => 1, encoding => 'ENC_NA', type => 'FT_BYTES', base => 'BASE_NONE', get => 'tvb_get_uint8', list => 'listOfByte', },
+ CARD8 => { size => 1, encoding => 'byte_order', type => 'FT_UINT8', base => 'BASE_HEX_DEC', get => 'tvb_get_uint8', list => 'listOfByte', },
+ CARD16 => { size => 2, encoding => 'byte_order', type => 'FT_UINT16', base => 'BASE_HEX_DEC', get => 'tvb_get_uint16', list => 'listOfCard16', },
+ CARD32 => { size => 4, encoding => 'byte_order', type => 'FT_UINT32', base => 'BASE_HEX_DEC', get => 'tvb_get_uint32', list => 'listOfCard32', },
+ CARD64 => { size => 8, encoding => 'byte_order', type => 'FT_UINT64', base => 'BASE_HEX_DEC', get => 'tvb_get_uint64', list => 'listOfCard64', },
+ INT8 => { size => 1, encoding => 'byte_order', type => 'FT_INT8', base => 'BASE_DEC', get => 'tvb_get_uint8', list => 'listOfByte', },
+ INT16 => { size => 2, encoding => 'byte_order', type => 'FT_INT16', base => 'BASE_DEC', get => 'tvb_get_uint16', list => 'listOfInt16', },
+ INT32 => { size => 4, encoding => 'byte_order', type => 'FT_INT32', base => 'BASE_DEC', get => 'tvb_get_uint32', list => 'listOfInt32', },
+ INT64 => { size => 8, encoding => 'byte_order', type => 'FT_INT64', base => 'BASE_DEC', get => 'tvb_get_uint64', list => 'listOfInt64', },
float => { size => 4, encoding => 'byte_order', type => 'FT_FLOAT', base => 'BASE_NONE', get => 'tvb_get_ieee_float', list => 'listOfFloat', },
double => { size => 8, encoding => 'byte_order', type => 'FT_DOUBLE', base => 'BASE_NONE', get => 'tvb_get_ieee_double', list => 'listOfDouble', },
- BOOL => { size => 1, encoding => 'byte_order', type => 'FT_BOOLEAN',base => 'BASE_NONE', get => 'tvb_get_guint8', list => 'listOfByte', },
+ BOOL => { size => 1, encoding => 'byte_order', type => 'FT_BOOLEAN',base => 'BASE_NONE', get => 'tvb_get_uint8', list => 'listOfByte', },
);
my %simpletype; # Reset at the beginning of each extension
@@ -124,6 +124,7 @@ my %struct = # Not reset; contains structures already defined.
'xinput:ButtonClass' => 1,
'xinput:KeyClass' => 1,
'xinput:ScrollClass' => 1,
+ 'xinput:GestureClass' => 1,
'xinput:TouchClass' => 1,
'xinput:ValuatorClass' => 1,
@@ -200,7 +201,7 @@ sub mesa_type {
if($name eq 'enum') {
# enum does not have a direct X equivalent
$gltype{'GLenum'} = { size => 4, encoding => 'byte_order', type => 'FT_UINT32', base => 'BASE_HEX|BASE_EXT_STRING',
- get => 'tvb_get_guint32', list => 'listOfCard32',
+ get => 'tvb_get_uint32', list => 'listOfCard32',
val => '&mesa_enum_ext', };
return;
}
@@ -258,13 +259,13 @@ sub mesa_function {
# Wireshark defines _U_ to mean "Unused" (compiler specific define)
if (!@elements) {
print $impl <<eot
-static void mesa_$name(tvbuff_t *tvb _U_, int *offsetp _U_, proto_tree *t _U_, guint byte_order _U_, int length _U_)
+static void mesa_$name(tvbuff_t *tvb _U_, int *offsetp _U_, proto_tree *t _U_, unsigned byte_order _U_, int length _U_)
{
eot
;
} else {
print $impl <<eot
-static void mesa_$name(tvbuff_t *tvb, int *offsetp, proto_tree *t, guint byte_order, int length _U_)
+static void mesa_$name(tvbuff_t *tvb, int *offsetp, proto_tree *t, unsigned byte_order, int length _U_)
{
eot
;
@@ -299,28 +300,28 @@ eot
my $variable_param = $e->att('variable_param');
if ($list and $count and $variable_param) {
- print $decl "static int ${regname} = -1;\n";
+ print $decl "static int ${regname};\n";
print $reg "{ &$regname, { \"$fieldname\", \"x11.glx.render.$name.$fieldname\", FT_NONE, BASE_NONE, NULL, 0, NULL, HFILL }},\n";
- print $decl "static int ${regname}_signed = -1;\n";
+ print $decl "static int ${regname}_signed;\n";
print $reg "{ &${regname}_signed, { \"$fieldname\", \"x11.glx.render.$name.$fieldname\", FT_INT8, BASE_DEC, NULL, 0, NULL, HFILL }},\n";
- print $decl "static int ${regname}_unsigned = -1;\n";
+ print $decl "static int ${regname}_unsigned;\n";
print $reg "{ &${regname}_unsigned, { \"$fieldname\", \"x11.glx.render.$name.$fieldname\", FT_UINT8, BASE_DEC, NULL, 0, NULL, HFILL }},\n";
- print $decl "static int ${regname}_item_card16 = -1;\n";
+ print $decl "static int ${regname}_item_card16;\n";
print $reg "{ &${regname}_item_card16, { \"$fieldname\", \"x11.glx.render.$name.$fieldname\", FT_UINT16, BASE_DEC, NULL, 0, NULL, HFILL }},\n";
- print $decl "static int ${regname}_item_int16 = -1;\n";
+ print $decl "static int ${regname}_item_int16;\n";
print $reg "{ &${regname}_item_int16, { \"$fieldname\", \"x11.glx.render.$name.$fieldname\", FT_INT16, BASE_DEC, NULL, 0, NULL, HFILL }},\n";
- print $decl "static int ${regname}_item_card32 = -1;\n";
+ print $decl "static int ${regname}_item_card32;\n";
print $reg "{ &${regname}_item_card32, { \"$fieldname\", \"x11.glx.render.$name.$fieldname\", FT_UINT32, BASE_DEC, NULL, 0, NULL, HFILL }},\n";
- print $decl "static int ${regname}_item_int32 = -1;\n";
+ print $decl "static int ${regname}_item_int32;\n";
print $reg "{ &${regname}_item_int32, { \"$fieldname\", \"x11.glx.render.$name.$fieldname\", FT_INT32, BASE_DEC, NULL, 0, NULL, HFILL }},\n";
- print $decl "static int ${regname}_item_float = -1;\n";
+ print $decl "static int ${regname}_item_float;\n";
print $reg "{ &${regname}_item_float, { \"$fieldname\", \"x11.glx.render.$name.$fieldname\", FT_FLOAT, BASE_NONE, NULL, 0, NULL, HFILL }},\n";
} else {
- print $decl "static int $regname = -1;\n";
+ print $decl "static int $regname;\n";
if ($list and $info->{'size'} > 1) {
print $reg "{ &$regname, { \"$fieldname\", \"x11.glx.render.$name.$fieldname.list\", FT_NONE, BASE_NONE, NULL, 0, NULL, HFILL }},\n";
$regname .= '_item';
- print $decl "static int $regname = -1;\n";
+ print $decl "static int $regname;\n";
}
print $reg "{ &$regname, { \"$fieldname\", \"x11.glx.render.$name.$fieldname\", $ft, $base, $val, 0, NULL, HFILL }},\n";
@@ -337,7 +338,7 @@ eot
my $varname = $wholename;
$varname =~ s/\s//g;
my $regname = registered_name($name, $varname);
- print $decl "static int $regname = -1;\n";
+ print $decl "static int $regname;\n";
print $reg "{ &$regname, { \"$wholename\", \"x11.glx.render.$name.$varname\", FT_BOOLEAN, BASE_NONE, NULL, 0, NULL, HFILL }},\n";
}
foreach my $wholename (('row length', 'skip rows', 'skip pixels', 'alignment')) {
@@ -345,7 +346,7 @@ eot
my $varname = $wholename;
$varname =~ s/\s//g;
my $regname = registered_name($name, $varname);
- print $decl "static int $regname = -1;\n";
+ print $decl "static int $regname;\n";
print $reg "{ &$regname, { \"$wholename\", \"x11.glx.render.$name.$varname\", FT_UINT32, BASE_HEX_DEC, NULL, 0, NULL, HFILL }},\n";
}
}
@@ -397,7 +398,7 @@ eot
my $get = $info->{'get'};
if ($e->att('counter') or $type_param{$fieldname}) {
- if ($get ne "tvb_get_guint8") {
+ if ($get ne "tvb_get_uint8") {
print $impl " $fieldname = $get(tvb, *offsetp, $encoding);\n";
} else {
print $impl " $fieldname = $get(tvb, *offsetp);\n";
@@ -665,7 +666,7 @@ sub register_element($$$$;$)
given ($e->name()) {
when ('pad') { return; } # Pad has no variables
- when ('switch') { return; } # Switch defines varaibles in a tighter scope to avoid collisions
+ when ('switch') { return; } # Switch defines variables in a tighter scope to avoid collisions
}
# Register field with wireshark
@@ -710,16 +711,16 @@ sub register_element($$$$;$)
my $itemhuman = $humanname . '.' . $itemname;
my $bitshift = "1U << $val";
- say $decl "static int $item = -1;";
+ say $decl "static int $item;";
say $reg "{ &$item, { \"$itemname\", \"$itemhuman\", FT_BOOLEAN, $bitsize, NULL, $bitshift, NULL, HFILL }},";
}
}
- print $decl "static int $regname = -1;\n";
+ print $decl "static int $regname;\n";
if ($e->name() eq 'list' and defined $info->{'size'} and $info->{'size'} > 1) {
print $reg "{ &$regname, { \"$fieldname\", \"$humanname.list\", FT_NONE, BASE_NONE, NULL, 0, NULL, HFILL }},\n";
$regname .= '_item';
- print $decl "static int $regname = -1;\n";
+ print $decl "static int $regname;\n";
}
print $reg "{ &$regname, { \"$fieldname\", \"$humanname\", $ft, $base, $vals, 0, NULL, HFILL }},\n";
@@ -731,11 +732,11 @@ sub register_element($$$$;$)
if ($refref->{field}{$fieldname} and get_simple_info($type)) {
# Pre-declare variable
if ($ft eq 'FT_FLOAT') {
- print $impl $indent."gfloat f_$fieldname;\n";
+ print $impl $indent."float f_$fieldname;\n";
} elsif ($ft eq 'FT_DOUBLE') {
- print $impl $indent."gdouble f_$fieldname;\n";
+ print $impl $indent."double f_$fieldname;\n";
} elsif ($ft eq 'FT_INT64' or $ft eq 'FT_UINT64') {
- print $impl $indent."gint64 f_$fieldname;\n";
+ print $impl $indent."int64_t f_$fieldname;\n";
} else {
print $impl $indent."int f_$fieldname;\n";
}
@@ -796,7 +797,7 @@ sub dissect_element($$$$$;$$)
say $impl "field$fieldsize(tvb, offsetp, t, $regname, byte_order);";
} elsif ($e->att('mask')) {
if ($refref->{field}{$fieldname}) {
- if ($get ne "tvb_get_guint8") {
+ if ($get ne "tvb_get_uint8") {
say $impl $indent."f_$fieldname = $get(tvb, *offsetp, byte_order);";
} else {
say $impl $indent."f_$fieldname = $get(tvb, *offsetp);";
@@ -804,7 +805,7 @@ sub dissect_element($$$$$;$$)
}
my $bitmask_field = $fieldname . "_bits";
say $impl $indent."{";
- say $impl $indent." int* const $bitmask_field [] = {";
+ say $impl $indent." static int* const $bitmask_field [] = {";
my $bit = $enum{$enum_name{$e->att('mask')}}{bit};
for my $val (sort { $a <=> $b } keys %$bit) {
my $item = $regname . '_mask_' . $$bit{$val};
@@ -818,7 +819,7 @@ sub dissect_element($$$$$;$$)
say $impl $indent."*offsetp += $size;";
} else {
if ($refref->{field}{$fieldname}) {
- if ($get ne "tvb_get_guint8") {
+ if ($get ne "tvb_get_uint8") {
say $impl $indent."f_$fieldname = $get(tvb, *offsetp, byte_order);";
} else {
say $impl $indent."f_$fieldname = $get(tvb, *offsetp);";
@@ -871,7 +872,7 @@ sub dissect_element($$$$$;$$)
say $impl $indent."{";
say $impl $indent." int i;";
say $impl $indent." for (i = 0; i < $lencalc; i++) {";
- if ($get ne "tvb_get_guint8") {
+ if ($get ne "tvb_get_uint8") {
say $impl $indent." sumof_$fieldname += $get(tvb, *offsetp + i * $size, byte_order);";
} else {
say $impl $indent." sumof_$fieldname += $get(tvb, *offsetp + i * $size);";
@@ -1077,7 +1078,7 @@ sub struct {
print $impl <<eot
-static int struct_size_$name(tvbuff_t *tvb _U_, int *offsetp _U_, guint byte_order _U_$prefs)
+static int struct_size_$name(tvbuff_t *tvb _U_, int *offsetp _U_, unsigned byte_order _U_$prefs)
{
int size = 0;
eot
@@ -1139,7 +1140,7 @@ eot
my $fname = $e->att('name');
if (defined($refs{$fname})) {
my $get = $info->{'get'};
- if ($get ne "tvb_get_guint8") {
+ if ($get ne "tvb_get_uint8") {
say $impl " f_$fname = $info->{'get'}(tvb, *offsetp + size + $size, byte_order);";
} else {
say $impl " f_$fname = $info->{'get'}(tvb, *offsetp + size + $size);";
@@ -1154,12 +1155,12 @@ eot
$size = 0; # 0 means "dynamic calcuation required"
}
- print $decl "static int hf_x11_struct_$name = -1;\n";
+ print $decl "static int hf_x11_struct_$name;\n";
print $reg "{ &hf_x11_struct_$name, { \"$name\", \"x11.struct.$name\", FT_NONE, BASE_NONE, NULL, 0, NULL, HFILL }},\n";
print $impl <<eot
-static void struct_$name(tvbuff_t *tvb, int *offsetp, proto_tree *root, guint byte_order _U_, int count$prefs)
+static void struct_$name(tvbuff_t *tvb, int *offsetp, proto_tree *root, unsigned byte_order _U_, int count$prefs)
{
int i;
for (i = 0; i < count; i++) {
@@ -1237,12 +1238,12 @@ sub union {
@sizes = sort {$b <=> $a} @sizes;
my $size = $sizes[0];
- print $decl "static int hf_x11_union_$name = -1;\n";
+ print $decl "static int hf_x11_union_$name;\n";
print $reg "{ &hf_x11_union_$name, { \"$name\", \"x11.union.$name\", FT_NONE, BASE_NONE, NULL, 0, NULL, HFILL }},\n";
print $impl <<eot
-static void struct_$name(tvbuff_t *tvb, int *offsetp, proto_tree *root, guint byte_order, int count)
+static void struct_$name(tvbuff_t *tvb, int *offsetp, proto_tree *root, unsigned byte_order, int count)
{
int i;
int base = *offsetp;
@@ -1365,14 +1366,14 @@ sub request {
if (!@elements) {
print $impl <<eot
-static void $header$name(tvbuff_t *tvb _U_, packet_info *pinfo _U_, int *offsetp _U_, proto_tree *t _U_, guint byte_order _U_, int length _U_)
+static void $header$name(tvbuff_t *tvb _U_, packet_info *pinfo _U_, int *offsetp _U_, proto_tree *t _U_, unsigned byte_order _U_, int length _U_)
{
eot
;
} else {
print $impl <<eot
-static void $header$name(tvbuff_t *tvb, packet_info *pinfo _U_, int *offsetp, proto_tree *t, guint byte_order, int length _U_)
+static void $header$name(tvbuff_t *tvb, packet_info *pinfo _U_, int *offsetp, proto_tree *t, unsigned byte_order, int length _U_)
{
eot
;
@@ -1410,9 +1411,9 @@ eot
# Wireshark defines _U_ to mean "Unused" (compiler specific define)
if (!@elements) {
- say $impl "static void $header$name"."_Reply(tvbuff_t *tvb _U_, packet_info *pinfo, int *offsetp _U_, proto_tree *t _U_, guint byte_order _U_)\n{";
+ say $impl "static void $header$name"."_Reply(tvbuff_t *tvb _U_, packet_info *pinfo, int *offsetp _U_, proto_tree *t _U_, unsigned byte_order _U_)\n{";
} else {
- say $impl "static void $header$name"."_Reply(tvbuff_t *tvb, packet_info *pinfo, int *offsetp, proto_tree *t, guint byte_order)\n{";
+ say $impl "static void $header$name"."_Reply(tvbuff_t *tvb, packet_info *pinfo, int *offsetp, proto_tree *t, unsigned byte_order)\n{";
}
say $impl ' int sequence_number;' if (@elements);
@@ -1438,13 +1439,13 @@ eot
$length = dissect_element($e, $varpat, $humanpat, $length, $refs);
if ($first) {
$first = 0;
- say $impl ' sequence_number = tvb_get_guint16(tvb, *offsetp, byte_order);';
+ say $impl ' sequence_number = tvb_get_uint16(tvb, *offsetp, byte_order);';
say $impl ' proto_tree_add_uint_format_value(t, hf_x11_reply_sequencenumber, tvb, *offsetp, 2, sequence_number,';
say $impl ' "%d ('.$header.'-'.$name.')", sequence_number);';
say $impl ' *offsetp += 2;';
if ($refs->{field}{length}) {
- say $impl ' f_length = tvb_get_guint32(tvb, *offsetp, byte_order);';
+ say $impl ' f_length = tvb_get_uint32(tvb, *offsetp, byte_order);';
}
if ($refs->{length}) {
say $impl ' length = f_length * 4 + 32;';
@@ -1465,7 +1466,7 @@ sub defxid(@) {
my $name;
while ($name = shift) {
my $qualname = qualname($name);
- $simpletype{$qualname} = { size => 4, encoding => 'byte_order', type => 'FT_UINT32', base => 'BASE_HEX', get => 'tvb_get_guint32', list => 'listOfCard32', };
+ $simpletype{$qualname} = { size => 4, encoding => 'byte_order', type => 'FT_UINT32', base => 'BASE_HEX', get => 'tvb_get_uint32', list => 'listOfCard32', };
$type_name{$name} = $qualname;
}
}
@@ -1534,12 +1535,12 @@ sub event {
if ($xge) {
print $impl <<eot
-static void $header$name(tvbuff_t *tvb _U_, int length _U_, int *offsetp _U_, proto_tree *t _U_, guint byte_order _U_)
+static void $header$name(tvbuff_t *tvb _U_, int length _U_, int *offsetp _U_, proto_tree *t _U_, unsigned byte_order _U_)
{
} else {
print $impl <<eot
-static void $header$name(tvbuff_t *tvb _U_, int *offsetp _U_, proto_tree *t _U_, guint byte_order _U_)
+static void $header$name(tvbuff_t *tvb _U_, int *offsetp _U_, proto_tree *t _U_, unsigned byte_order _U_)
{
eot
;
@@ -1549,14 +1550,14 @@ eot
$length = 10;
print $impl <<eot
-static void $header$name(tvbuff_t *tvb, int length _U_, int *offsetp, proto_tree *t, guint byte_order)
+static void $header$name(tvbuff_t *tvb, int length _U_, int *offsetp, proto_tree *t, unsigned byte_order)
{
eot
;
} else {
print $impl <<eot
-static void $header$name(tvbuff_t *tvb, int *offsetp, proto_tree *t, guint byte_order)
+static void $header$name(tvbuff_t *tvb, int *offsetp, proto_tree *t, unsigned byte_order)
{
eot
;
@@ -1650,7 +1651,7 @@ sub xcb_start {
%enum_name = ();
%type_name = ();
- print $error "const char *$header"."_errors[] = {\n";
+ print $error "static const char * const $header"."_errors[] = {\n";
}
sub xcb {
@@ -1663,7 +1664,7 @@ sub xcb {
my $genevent_name = 'NULL';
my $reply_name = $header . "_replies";
- print $decl "static int hf_x11_$lookup_name = -1;\n\n";
+ print $decl "static int hf_x11_$lookup_name;\n\n";
print $impl "static const value_string $lookup_name"."[] = {\n";
foreach my $req (sort {$a <=> $b} keys %request) {
@@ -1672,7 +1673,7 @@ sub xcb {
print $impl " { 0, NULL }\n";
print $impl "};\n";
- say $impl "const x11_event_info $event_name".'[] = {';
+ say $impl "static const x11_event_info $event_name".'[] = {';
foreach my $e (sort {$a <=> $b} keys %event) {
say $impl " { \"$header-$event{$e}\", $header$event{$e} },";
}
@@ -1691,7 +1692,7 @@ sub xcb {
say $impl '';
}
- print $impl "static x11_reply_info $reply_name"."[] = {\n";
+ print $impl "static const x11_reply_info $reply_name"."[] = {\n";
foreach my $e (sort {$a <=> $b} keys %reply) {
print $impl " { $e, $header$reply{$e}_Reply },\n";
}
@@ -1702,7 +1703,7 @@ sub xcb {
print $impl <<eot
-static void dispatch_$header(tvbuff_t *tvb, packet_info *pinfo, int *offsetp, proto_tree *t, guint byte_order)
+static void dispatch_$header(tvbuff_t *tvb, packet_info *pinfo, int *offsetp, proto_tree *t, unsigned byte_order)
{
int minor, length;
minor = CARD8($lookup_name);
@@ -1832,7 +1833,7 @@ if (-e "$mesadir/gl_API.xml") {
print $enum "};\n";
$enum->close();
- print $decl "static int hf_x11_glx_render_op_name = -1;\n\n";
+ print $decl "static int hf_x11_glx_render_op_name;\n\n";
print $impl "static const value_string glx_render_op_name"."[] = {\n";
foreach my $req (sort {$a <=> $b} keys %request) {
@@ -1847,17 +1848,17 @@ if (-e "$mesadir/gl_API.xml") {
# Uses ett_x11_list_of_rectangle, since I am unable to see how the subtree type matters.
print $impl <<eot
-static void dispatch_glx_render(tvbuff_t *tvb, packet_info *pinfo, int *offsetp, proto_tree *t, guint byte_order, int length)
+static void dispatch_glx_render(tvbuff_t *tvb, packet_info *pinfo, int *offsetp, proto_tree *t, unsigned byte_order, int length)
{
while (length >= 4) {
- guint32 op, len;
+ uint32_t op, len;
int next;
proto_item *ti;
proto_tree *tt;
- len = tvb_get_guint16(tvb, *offsetp, byte_order);
+ len = tvb_get_uint16(tvb, *offsetp, byte_order);
- op = tvb_get_guint16(tvb, *offsetp + 2, byte_order);
+ op = tvb_get_uint16(tvb, *offsetp + 2, byte_order);
ti = proto_tree_add_uint(t, hf_x11_glx_render_op_name, tvb, *offsetp, len, op);
tt = proto_item_add_subtree(ti, ett_x11_list_of_rectangle);
diff --git a/tools/radiotap-gen/radiotap-gen.c b/tools/radiotap-gen/radiotap-gen.c
index 3f319ab5..927920af 100644
--- a/tools/radiotap-gen/radiotap-gen.c
+++ b/tools/radiotap-gen/radiotap-gen.c
@@ -121,7 +121,7 @@ static void gen_u_sig_pkts(pcap_dumper_t *dumper)
* EHT SIG MCS: 1 (EHT-MCS 1)
*/
pkt.radiotap.u_sig_hdr.common = PHY_VERSION_ID_KNOWN | BW_KNOWN | \
- UL_DL_KNOWN | 0x00018000;;
+ UL_DL_KNOWN | 0x00018000;
pkt.radiotap.u_sig_hdr.mask = 0x003fbec0;
pkt.radiotap.u_sig_hdr.value = 0x0001183F;
diff --git a/tools/rpm-setup.sh b/tools/rpm-setup.sh
index 86a3ab29..15f285d1 100755
--- a/tools/rpm-setup.sh
+++ b/tools/rpm-setup.sh
@@ -71,38 +71,42 @@ then
exit 1
fi
-BASIC_LIST="cmake \
- gcc \
- gcc-c++ \
- flex \
- python3 \
- desktop-file-utils \
- git \
- glib2-devel \
- libpcap-devel \
- pcre2-devel \
- zlib-devel \
- libgcrypt-devel"
-
-ADDITIONAL_LIST="libcap-devel \
- libssh-devel \
- krb5-devel \
- perl-Parse-Yapp \
- snappy-devel \
- minizip-devel \
- lz4 \
- libxml2-devel \
- perl \
- spandsp-devel \
- systemd-devel \
- python3-pytest \
- python3-pytest-xdist"
+BASIC_LIST="
+ cmake
+ desktop-file-utils
+ flex
+ gcc
+ gcc-c++
+ git
+ glib2-devel
+ libgcrypt-devel
+ libpcap-devel
+ pcre2-devel
+ python3
+ zlib-devel
+ "
+
+ADDITIONAL_LIST="
+ krb5-devel
+ libcap-devel
+ libssh-devel
+ libxml2-devel
+ lz4
+ minizip-devel
+ perl
+ perl-Parse-Yapp
+ python3-pytest
+ python3-pytest-xdist
+ snappy-devel
+ spandsp-devel
+ systemd-devel
+ "
# Uncomment to add PNG compression utilities used by compress-pngs:
-# ADDITIONAL_LIST="$ADDITIONAL_LIST \
-# advancecomp \
-# optipng \
-# oxipng \
+# ADDITIONAL_LIST="$ADDITIONAL_LIST
+# advancecomp
+# optipng
+# oxipng
# pngcrush"
# XXX
@@ -166,14 +170,8 @@ add_packages() {
add_package BASIC_LIST glib2 || add_package BASIC_LIST libglib-2_0-0 ||
echo "Required package glib2|libglib-2_0-0 is unavailable" >&2
-# lua51, lua51-devel: OpenSUSE Leap 42.3 (lua would be fine too, as it installs lua52), OpenSUSE Leap 15.0 (lua installs lua53, so it wouldn't work)
-# compat-lua, compat-lua-devel: Fedora 28, Fedora 29, CentOS 8
-# lua, lua-devel: CentOS 7
-add_package BASIC_LIST lua51-devel || add_package BASIC_LIST compat-lua-devel || add_package BASIC_LIST lua-devel ||
-echo "Required package lua51-devel|compat-lua-devel|lua-devel is unavailable" >&2
-
-add_package BASIC_LIST lua51 || add_package BASIC_LIST compat-lua || add_package BASIC_LIST lua ||
-echo "Required package lua51|compat-lua|lua is unavailable" >&2
+add_package BASIC_LIST lua-devel || add_package BASIC_LIST lua54-devel || add_package BASIC_LIST lua53-devel ||
+echo "Required package lua-devel|lua54-devel|lua53-devel is unavailable" >&2
add_package BASIC_LIST libpcap || add_package BASIC_LIST libpcap1 ||
echo "Required package libpcap|libpcap1 is unavailable" >&2
@@ -217,16 +215,20 @@ then
# OpenSUSE additionally has a separate Qt5PrintSupport package.
add_package BASIC_LIST qt5-qtmultimedia-devel ||
add_packages BASIC_LIST libqt5-qtmultimedia-devel libQt5PrintSupport-devel ||
- echo "Required Qt5 Mutlimedia and/or Qt5 Print Support is unavailable" >&2
+ echo "Required Qt5 Multimedia and/or Qt5 Print Support is unavailable" >&2
- # This in only required on OpenSUSE
+ # This is only required on OpenSUSE
add_package BASIC_LIST libqt5-qtsvg-devel ||
echo "Required OpenSUSE package libqt5-qtsvg-devel is unavailable. Not required for other distributions." >&2
- # This in only required on OpenSUSE
+ # This is only required on OpenSUSE
add_package BASIC_LIST libQt5Concurrent-devel ||
echo "Required OpenSUSE package libQt5Concurrent-devel is unavailable. Not required for other distributions." >&2
+ # This is only required on OpenSUSE
+ add_package ADDITIONAL_LIST libQt5DBus-devel ||
+ echo "Optional OpenSUSE package libQt5DBus-devel is unavailable. Not required for other distributions." >&2
+
add_package ADDITIONAL_LIST qt5-qtimageformats ||
add_package ADDITIONAL_LIST libqt5-qtimageformats ||
echo "Optional Qt5 Image Formats is unavailable" >&2
@@ -234,26 +236,41 @@ fi
if [ $ADD_QT6 -ne 0 ]
then
- # Fedora Qt6 packages required from a minimal installation
- QT6_LIST=(qt6-qtbase-devel
- qt6-qttools-devel
- qt6-qt5compat-devel
- qt6-qtmultimedia-devel
- libxkbcommon-devel)
-
- for pkg in "${QT6_LIST[@]}"
+ # See CMakeLists.txt in the root directory for a list of
+ # Qt6 modules required for a minimal installation
+ # Base and Multimedia pull in most of the other required modules
+ # RH/Fedora and SUSE use slightly different pkg names for modules
+ QT6_LIST=(base
+ tools
+ multimedia)
+
+ for module in "${QT6_LIST[@]}"
do
- add_package BASIC_LIST "$pkg" ||
- echo "Qt6 dependency $pkg is unavailable" >&2
+ add_package BASIC_LIST "qt6-qt${module}-devel" ||
+ add_package BASIC_LIST "qt6-${module}-devel" ||
+ echo "Required Qt6 Module $module is unavailable" >&2
done
+ # qt6-linguist: RHEL, Fedora
+ # qt6-linguist-devel: OpenSUSE
+ add_package BASIC_LIST qt6-linguist ||
+ add_package BASIC_LIST qt6-linguist-devel ||
+ echo "Required Qt6 module LinguistTools is unavailable" >&2
+
+ add_package BASIC_LIST qt6-qt5compat-devel ||
+ echo "Required Qt6 module Qt5Compat is unavailable"
+
+ add_package BASIC_LIST libxkbcommon-devel ||
+ echo "Required Qt6 dependency libxkbcommon-devel is unavailable"
+
add_package ADDITIONAL_LIST qt6-qtimageformats ||
- echo "Optional Qt6 Image Formats is unavailable" >&2
+ add_package ADDITIONAL_LIST qt6-imageformats ||
+ echo "Optional Qt6 module Image Formats is unavailable" >&2
fi
# This in only required on OpenSUSE
add_packages BASIC_LIST hicolor-icon-theme xdg-utils ||
-echo "Required OpenSUSE packages hicolor-icon-theme and xdg-utils are unavailable. Not required for other distirbutions." >&2
+echo "Required OpenSUSE packages hicolor-icon-theme and xdg-utils are unavailable. Not required for other distributions." >&2
# This in only required (and available) on OpenSUSE
add_package BASIC_LIST update-desktop-files ||
@@ -318,6 +335,9 @@ echo "Optional package opus-devel|libopus-devel is unavailable" >&2
add_package ADDITIONAL_LIST bcg729-devel ||
echo "Optional package bcg729-devel is unavailable" >&2
+add_package ADDITIONAL_LIST zlib-ng-devel ||
+echo "Optional package zlib-ng-devel is unavailable" >&2
+
# RHEL 8 / CentOS 8 are missing the -devel packages for sbc and libsmi due to
# RH deciding not to ship all -devel packages.
# https://wiki.centos.org/FAQ/CentOS8/UnshippedPackages
diff --git a/tools/update-appdata.py b/tools/update-appdata.py
index b2960ee2..4b8ee7da 100755
--- a/tools/update-appdata.py
+++ b/tools/update-appdata.py
@@ -32,7 +32,6 @@ import io
import os.path
import re
import subprocess
-import sys
import time
def main():
@@ -63,10 +62,13 @@ def main():
<url>https://www.wireshark.org/docs/relnotes/wireshark-{0}.{1}.html</url>
</release>
'''
+ cur_date = date.fromtimestamp(time.time()).isoformat()
release_tag_l = [
f' <!-- Automatically generated by tools/{os.path.basename(__file__)} -->\n',
- release_tag_fmt.format(maj_min, next_micro, date.fromtimestamp(time.time()).isoformat())
+ release_tag_fmt.format(maj_min, next_micro, cur_date)
]
+ print(f'Added {maj_min}.{next_micro}, {cur_date}')
+
for micro in range(int(next_micro) - 1, -1, -1):
try:
tag_date = subprocess.run(
@@ -75,6 +77,7 @@ def main():
encoding='UTF-8',
stdout=subprocess.PIPE, stderr=subprocess.PIPE).stdout.strip()
release_tag_l.append(release_tag_fmt.format(maj_min, micro, tag_date))
+ print(f'Added {maj_min}.{micro}, {tag_date}')
except Exception:
print('Unable to fetch release tag')
raise
diff --git a/tools/update-tools-help.py b/tools/update-tools-help.py
index f951e8ee..9651828a 100755
--- a/tools/update-tools-help.py
+++ b/tools/update-tools-help.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python3
#
-# update-tools-help.py - Update the command line help output in docbook/wsug_src.
+# update-tools-help.py - Update the command line help output in doc/wsug_src.
#
# Wireshark - Network traffic analyzer
# By Gerald Combs <gerald@wireshark.org>
@@ -9,7 +9,7 @@
# SPDX-License-Identifier: GPL-2.0-or-later
'''Update tools help
-For each file that matches docbook/wsug_src/<command>-<flag>.txt, run
+For each file that matches doc/wsug_src/<command>-<flag>.txt, run
that command and flag. Update the file if the output differs.
'''
@@ -28,11 +28,11 @@ def main():
args = parser.parse_args()
this_dir = os.path.dirname(__file__)
- wsug_src_dir = os.path.join(this_dir, '..', 'docbook', 'wsug_src')
+ wsug_src_dir = os.path.join(this_dir, '..', 'doc', 'wsug_src')
tools_help_files = glob.glob(os.path.join(wsug_src_dir, '*-*.txt'))
tools_help_files.sort()
- tool_pat = re.compile('(\w+)(-\w).txt')
+ tool_pat = re.compile(r'(\w+)(-\w).txt')
# If tshark is present, assume that our other executables are as well.
program_path = args.program_path[0]
@@ -67,8 +67,8 @@ def main():
cur_lines = cur_help.splitlines()
new_lines = new_help.splitlines()
# Assume we have an extended version. Strip it.
- cur_lines[0] = re.split(' \(v\d+\.\d+\.\d+', cur_lines[0])[0]
- new_lines[0] = re.split(' \(v\d+\.\d+\.\d+', new_lines[0])[0]
+ cur_lines[0] = re.split(r' \(v\d+\.\d+\.\d+', cur_lines[0])[0]
+ new_lines[0] = re.split(r' \(v\d+\.\d+\.\d+', new_lines[0])[0]
diff = list(difflib.unified_diff(cur_lines, new_lines))
if (len(diff) > 0):
diff --git a/tools/validate-commit.py b/tools/validate-commit.py
index cf4980be..b5d85afe 100755
--- a/tools/validate-commit.py
+++ b/tools/validate-commit.py
@@ -19,7 +19,6 @@ import json
import os
import subprocess
import sys
-import tempfile
import urllib.request
import re
@@ -190,6 +189,14 @@ def verify_merge_request():
print("This doesn't appear to be a merge request. CI_MERGE_REQUEST_PROJECT_ID={}, CI_MERGE_REQUEST_IID={}".format(project_id, m_r_iid))
return True
+ m_r_sb_protected = os.getenv('CI_MERGE_REQUEST_SOURCE_BRANCH_PROTECTED')
+ if m_r_sb_protected == 'true':
+ print(f'''\
+You're pushing from a protected branch ({os.getenv('CI_MERGE_REQUEST_SOURCE_BRANCH_NAME')}). You will probably
+have to close this merge request and push from a different branch.\n
+''')
+ # Assume that the "Allow commits" test is about to fail.
+
m_r_url = '{}/projects/{}/merge_requests/{}'.format(gitlab_api_pfx, project_id, m_r_iid)
req = urllib.request.Request(m_r_url)
# print('req', repr(req), m_r_url)
@@ -219,8 +226,8 @@ def main():
try:
with open(args.commitmsg) as f:
return 0 if verify_body(f.read()) else 1
- except:
- print("Couldn't verify body of message from file '", + args.commitmsg + "'");
+ except Exception:
+ print("Couldn't verify body of message from file '", + args.commitmsg + "'")
return 1
diff --git a/tools/win-setup.ps1 b/tools/win-setup.ps1
index 93c1bb61..3db7efcf 100644
--- a/tools/win-setup.ps1
+++ b/tools/win-setup.ps1
@@ -24,7 +24,7 @@ Wireshark.
.PARAMETER Destination
Specifies the destination directory for the text files. The path must
-contain the pattern "wireshark-*-libs-4.2".
+contain the pattern "wireshark-*-libs-4.4".
.PARAMETER Platform
Target platform. Must be one of "win64" or "arm64".
@@ -43,12 +43,12 @@ their compressed archives.
A manifest file (library-manifest.xml)
.EXAMPLE
-C:\PS> .\tools\win-setup.ps1 -Destination C:\wireshark-master-64-libs-4.2 -Platform x64
+C:\PS> .\tools\win-setup.ps1 -Destination C:\wireshark-master-64-libs-4.4 -Platform x64
#>
Param(
[Parameter(Mandatory=$true, Position=0)]
- [ValidateScript({$_ -like "*[/\]wireshark-*-libs-4.2"})]
+ [ValidateScript({$_ -like "*[/\]wireshark-*-libs-4.4"})]
[String]
$Destination,
@@ -74,56 +74,66 @@ $X64Archives = @{
"AirPcap/AirPcap_Devpack_4_1_0_1622.zip" = "09d637f28a79b1d2ecb09f35436271a90c0f69bd0a1ee82b803abaaf63c18a69";
"bcg729/bcg729-1.0.4-win64ws.zip" = "9a095fda4c39860d96f0c568830faa6651cd17635f68e27aa6de46c689aa0ee2";
"brotli/brotli-1.0.9-1-win64ws.zip" = "3f8d24aec8668201994327ff8d8542fe507d1d468a500a1aec50d0415f695aab";
- "c-ares/c-ares-1.27.0-1-x64-windows-ws.zip" = "c9f1fb4836d55ae3aca0be077d2363678454820f9efad5c09371351a8770b5fc";
+ "c-ares/c-ares-1.28.1-1-x64-windows-ws.zip" = "6509df8e15ed67e87fac84a3b0acaa7b804b59f272fdf9decfb6157d241e73da";
+ "falcosecurity-libs/falcosecurity-libs-0.17.1-1-x64-ws.zip" = "371278147543e4b92dc404040b01aeacf221347f434f7b67143acd474555eecf";
+ "falcosecurity-libs/falcosecurity-plugins-2024-06-05-1-x64-ws.zip" = "3d19595f4ef9de77fef2ec2233000432b7b1e5a0f9353f6c8d99859205e113f8";
"gnutls/gnutls-3.8.4-2-x64-mingw-dynamic-ws.zip" = "e875c6c34f633c487ce390e25a4d26a3e27d3dca3f9fdfa1d8fd66026d1e257c";
- "krb5/krb5-1.20.1-1-x64-windows-ws.zip" = "a1e5c582afce6e2f72f0f5bd66df2c0f3cc984532a1da5314fc89d7b7f29cdbf";
+ "krb5/krb5-1.21.3-1-x64-windows-ws.zip" = "49b83da4baa476c4c31ed3ee463f962114a469b8c3d601db68bdb6bc03a88e42";
"libgcrypt/libgcrypt-1.10.2-2-x64-mingw-dynamic-ws.zip" = "477cfce91d791b34df75a5ad83626f1ac2ee147eff7965e52266a4fc3da0f920";
"libilbc/libilbc-2.0.2-4-x64-windows-ws.zip" = "4f35a1ffa03c89bf473f38249282a7867b203988d2b6d3d2f0924764619fd5f5";
"libmaxminddb/libmaxminddb-1.4.3-1-win64ws.zip" = "ee89944a19ab6e1c873bdecb9fc6205d317c41e6da6ec1d30bc892fddfd143da";
- "libpcap/libpcap-1.10.1-1-win64ws.zip" = "59f8e0e90a3ab5671df561266ed2b02870a6f8f3a895b80c9db19fea9a12ffb2";
+ "libpcap/libpcap-1.10.4-1-x64-windows-ws.zip" = "ad18ee1da72ce9df524b8baf9c185f237e534ef8e356c0b3eb3a5d6762004656";
"libsmi/libsmi-2021-01-15-2-x64-windows-ws.zip" = "ee8e349427d2a4ee9c18fc6b5839bd6df41685ecba03506179c21425e04f3413";
- "libssh/libssh-0.10.5-1-x64-mingw-dynamic-ws.zip" = "9c1410d1033a540d118e17938905144956291b4c6ca7a9b7af6959b2632a1aaa";
- "lua/lua-5.2.4-unicode-win64-vc14.zip" = "e8968d2c7871ce1ea82cbd29ac1b3a2c59d3dec25e483c5e12de85df66f5d928";
- "lz4/lz4-1.9.3-1-win64ws.zip" = "7129515893ffdc439f4ffe9673c4bc43f9042e910bb2607e68dde6b99a1ab058";
+ "libssh/libssh-0.10.6plus-1-x64-mingw-dynamic-ws.zip" = "b4debbc7b5ec34dd998cdc17699526191219e0c593d9797a4bd6147eab020934";
+ "lua/lua-5.4.6-unicode-win64-vc14.zip" = "f0c6c7eb28733425b16717beb338d44c041dfbb5c6807e618d96bd754276aaff";
+ "lz4/lz4-1.9.4-1-x64-windows-ws.zip" = "179cc6b9a509d7bf07b910389886a00c1cf4738164f32b8e6c245bfb973a4dc7";
"minizip/minizip-1.3-1-x64-windows-ws.zip" = "eb0bb5fffda5328e192d0d7951ff0254e64dcd736d46909fde7db792c1c53bcc";
- "nghttp2/nghttp2-1.61.0-1-x64-windows-ws.zip" = "c9f9976ae890acdee24f4a0a6514b345c4e34121e11f0439ba5d90ba6b122d31";
- "nghttp3/nghttp3-1.0.0-1-x64-windows-ws.zip" = "219a0024b79627c00fa1c134085678edbfac72b7b5eaf45db84f36e2553e1638";
- "opus/opus-1.3.1-3-win64ws.zip" = "1f7a55a6d2d7215dffa4a43bca8ca05024bd4ba1ac3d0d0c405fd38b09cc2205";
+ "minizip-ng/minizip-ng-4.0.5-1-x64-windows-ws.zip" = "965c13ec9944ab3515cdfdec36c361f70d76ec773e0897bbe60bdcf1b4eac01b";
+ "nghttp2/nghttp2-1.62.1-1-x64-windows-ws.zip" = "381f995791bf48c43a4ab4bdbc68f89d51b8cde8501ded9ce280e3697bb911e5";
+ "nghttp3/nghttp3-1.1.0-1-x64-windows-ws.zip" = "e7e181f08ef6e7f592ba0cfef043822c2d516d130c2aad9447a588ade31a258a";
+ "opencore-amr/opencore-amr-0.1.6-1-x64-mingw-dynamic-ws.zip" = "013a7b29b62bec123482fed6acd8aed882be3478870c2ec8aec15b7cb81cda02";
+ "opus/opus-1.5.1-1-x64-windows-ws.zip" = "30d293b6e4902edae0ca5d747881d9a18f7f03b66a4758bf797f341f89592e6a";
"sbc/sbc-2.0-1-x64-windows-ws.zip" = "d1a58f977dcffa168b11b280bd10228191582d263b7c901e50cde7c1c43d9c04";
- "snappy/snappy-1.1.9-1-win64ws.zip" = "fa907724be019bcc55d27ebe88257ba8898b5c38b719099b8164ac78600d81cc";
+ "snappy/snappy-1.2.1-1-x64-windows-ws.zip" = "e2ffccb26e91881b42d03061dcc728a98af9037705cb4595c8ccbe8d912b5d68";
"spandsp/spandsp-0.0.6-5-x64-windows-ws.zip" = "cbb18310876ec6f081662253a2d37f5174ac60c58b0b7cd6759852fbcfaa7d7f";
"speexdsp/speexdsp-1.21.1-1-win64ws.zip" = "d36db62e64ffaee38d9f607bef07d3778d8957ad29757f3eba169eb135f1a4e5";
- "vcpkg-export/vcpkg-export-20231017-1-x64-windows-ws.zip" = "fc5ea8110ce5e905e3342197481a805b6c2c87e273b0370bcc6a5964316c20ee";
+ "vcpkg-export/vcpkg-export-20240524-1-x64-windows-ws.zip" = "c566f41f20ae87fa4357d204f92cbbe2f236bc1df28c3d106fecfe21a8fbfa11";
"WinSparkle/WinSparkle-0.8.0-4-gb320893.zip" = "3ae42326bcd34594bc21b1e7948863a839ee76e87d9f4cf6b59b9d9f9a083881";
- "zstd/zstd-1.5.2-1-win64ws.zip" = "d920afe636951cfcf144824d9c075d1f2c13387f4739152fe185fd9c09fc58f2";
+ "zlib-ng/zlib-ng-2.1.5-1-x64-windows-ws.zip" = "a9f90e349d041d464afc1e0926d628ebee02e7093ab9983c5a7808e2b70d7873";
+ "zstd/zstd-1.5.6-1-x64-windows-ws.zip" = "f3f59351d273a1c1f2b84b60164556c8d2726155da2148f917d260d9efd16b6e";
}
$Arm64Archives = @{
"bcg729/bcg729-1.1.1-1-win64armws.zip" = "f4d76b9acf0d0e12e87a020e9805d136a0e8775e061eeec23910a10828153625";
"brotli/brotli-1.0.9-1-win64armws.zip" = "5ba1b62ebc514d55c3eae85a00ff107e587b6e7cb1275e2d33fcddcd49f8e2af";
- "c-ares/c-ares-1.27.0-1-arm64-windows-ws.zip" = "d96bd88aeed45350b2d14f023bd6d9e5dc63aa3bb0b47da85e4e125d6f74bfcf";
+ "c-ares/c-ares-1.28.1-1-arm64-windows-ws.zip" = "84954f593d02d1af0ff5c7af1646b0fec5af3260fecda6cda7bbc84f9e343e10";
+ "falcosecurity-libs/falcosecurity-libs-0.17.1-2-arm64-ws.zip" = "c9a2e0ae1636b53fd843c87bb136eebe24595d658eb7a82ca9aff2d25b185902";
+ "falcosecurity-libs/falcosecurity-plugins-2024-06-05-1-arm64-ws.zip" = "81f7b5a918c3b4cd1c0e08d8e2fadd6859363897d9d6a48f8b408aa67f072b5c";
"gnutls/gnutls-3.8.4-2-arm64-mingw-dynamic-ws.zip" = "17f28b4a47857db86d9c3f9b7ba12528c8e6368524314fb0fe5ea9303f1a58f9";
- "krb5/krb5-1.20.1-1-arm64-windows-ws.zip" = "6afe3185ea7621224544683a89d7c724d32bef6f1b552738dbc713ceb2151437";
+ "krb5/krb5-1.21.3-1-arm64-windows-ws.zip" = "26166173cb653fdf2153c311a9f611a76575359393222cebd5228842632a0ccb";
"libgcrypt/libgcrypt-1.10.2-2-arm64-mingw-dynamic-ws.zip" = "cd42fa2739a204e129d655e1b0dda83ceb27399812b8b2eccddae4a9ecd8d0ce";
"libilbc/libilbc-2.0.2-4-arm64-windows-ws.zip" = "00a506cc1aac8a2e31856e463a555d899b5a6ccf376485a124104858ccf0be6d";
"libmaxminddb/libmaxminddb-1.4.3-1-win64armws.zip" = "9996327f301cb4a4de797bc024ad0471acd95c1850a2afc849c57fcc93360610";
- "libpcap/libpcap-1.10.1-1-win64armws.zip" = "c0c5d42d96cc407303d71ba5afd06615c660228fa2260d7ecbc8453140529137";
+ "libpcap/libpcap-1.10.4-1-arm64-windows-ws.zip" = "98dbac265e3617eb0ab1a690902a4989e022d0761098c2753bff4cd0189419b3";
"libsmi/libsmi-2021-01-15-2-arm64-windows-ws.zip" = "3f5b7507a19436bd6494e2cbc89856a5980950f931f7cf0d637a8e764914d015";
- "libssh/libssh-0.10.5-1-arm64-mingw-dynamic-ws.zip" = "b99c9573d9a30ba2898ce6ac131b23b1699009761d5dbe351a1a958cca0f85ca";
- "lua/lua-5.2.4-unicode-arm64-windows-vc17.zip" = "5848e23352e35b69f4cdabaca3754c2c5fb11e5461bb92b71e059e558e4b2d12";
- "lz4/lz4-1.9.4-1-win64armws.zip" = "59a3ed3f9161be7614a89afd2ca21c43f26dd916afd4aa7bfdc4b148fb10d485";
+ "libssh/libssh-0.10.6plus-1-arm64-mingw-dynamic-ws.zip" = "2de3a300b0fbb7593c863aa8f302f801a2a1041ced8dfa8d65b7e7b42008c7ef";
+ "lua/lua-5.4.6-unicode-arm64-windows-vc14.zip" = "a28c38acde71de5c495420cd8bf480e2e41f1a14bac81503b700fc64a9679b95";
+ "lz4/lz4-1.9.4-1-arm64-windows-ws.zip" = "4bb37fb184bcbe350a137df54124faf45fc0871777146b469b7fd08f6dd07337";
"minizip/minizip-1.3-1-arm64-windows-ws.zip" = "e5b35d064ff10f1ab1ee9193a0965fd1eb3d1e16eab5a905ab3fea9b14fb5afe";
- "nghttp2/nghttp2-1.61.0-1-arm64-windows-ws.zip" = "628822778c1ae540943f4b48bcfd22551826318a66b8082eebc4e99943e743b3";
- "nghttp3/nghttp3-1.0.0-1-arm64-windows-ws.zip" = "cf53090b514d3193d75b81562235ae1e7a8a9d462e37f515f9a9a29c6b469236";
- "opus/opus-1.4-1-win64armws.zip" = "51d10381360d5691b2022dde5b284266d9b0ce9a3c9bd7e86f9a4ff1a4f7d904";
+ "minizip-ng/minizip-ng-4.0.5-1-arm64-windows-ws.zip" = "66ccd6ae1f6b0078632f87c9c9cc153ab0015874c8c65d855f8b90beef20cd4e";
+ "nghttp2/nghttp2-1.62.1-1-arm64-windows-ws.zip" = "3610c71da9deabf2edab4e09329817911a4e2b493d847035093a7e93d7993c12";
+ "nghttp3/nghttp3-1.1.0-1-arm64-windows-ws.zip" = "ae00b65fda2d5e9ffa979be406f127d050a95b0c59654acf7b7411e77b2feb1f";
+ "opencore-amr/opencore-amr-0.1.6-1-arm64-mingw-dynamic-ws.zip" = "581ec9e8ee4dde2236b689eec4d39802e2f998baa8d1604a4e91c1da32556b57";
+ "opus/opus-1.5.1-1-arm64-windows-ws.zip" = "b50db665b50f12185dacd8efd77cd28eb30e53ac5dcbb09b403e9fb90a9768f4";
"sbc/sbc-2.0-1-arm64-windows-ws.zip" = "83cfe4a8b6fa5bae253ecacc1c02e6e4c61b4ad9ad0e5e63f0f30422fb6eac96";
- "snappy/snappy-1.1.9-1-win64armws.zip" = "f3f6ec841024d18df06934ff70f44068a4e8f1008eca1f363257645647f74d4a";
+ "snappy/snappy-1.2.1-1-arm64-windows-ws.zip" = "71d6987360eb1a10abd0d070768e6b7b250c6ea87feaee044ecbc8864c7e57f4";
"spandsp/spandsp-0.0.6-5-arm64-windows-ws.zip" = "fdf01e3c33e739ff9399b7d42cd8230c97cb27ce51865a0f06285a8f68206b6c";
"speexdsp/speexdsp-1.2.1-1-win64armws.zip" = "1759a9193065f27e50dd79dbb1786d24031ac43ccc48c40dca46d8a48552e3bb";
- "vcpkg-export/vcpkg-export-20231017-1-arm64-windows-ws.zip" = "2752e2e059ea13e8b4e1ef5f8892b81b745da6838e513bd6e4e548d290d9f472";
+ "vcpkg-export/vcpkg-export-20240524-1-arm64-windows-ws.zip" = "5d1e186b77ec3bc7072253be90b6aa36d7e317bccc382209c1570b60e488000b";
"WinSparkle/WinSparkle-0.8.0-4-gb320893.zip" = "3ae42326bcd34594bc21b1e7948863a839ee76e87d9f4cf6b59b9d9f9a083881";
- "zstd/zstd-1.5.5-1-win64armws.zip" = "0e448875380cc5d5f5539d994062201bfa564e4a27466bc3fdfec84d9008e51d";
+ "zlib-ng/zlib-ng-2.1.5-1-arm64-windows-ws.zip" = "de3a42d0096a17085b27630402a710b036cc8e3c85029ad37536d929697271e5";
+ "zstd/zstd-1.5.6-1-arm64-windows-ws.zip" = "167261f9605a28f8f5a45a2fa400daa5072290a89d5fdc218595da52d57f938b";
}
# Subdirectory to extract an archive to
diff --git a/tools/wireshark_gen.py b/tools/wireshark_gen.py
index e53fbf9e..15817c4f 100755
--- a/tools/wireshark_gen.py
+++ b/tools/wireshark_gen.py
@@ -102,23 +102,23 @@ from omniidl import idlast, idltype, idlutil, output
class wireshark_gen_C:
# Some string constants for our templates
- c_u_octet8 = "guint64 u_octet8;"
- c_s_octet8 = "gint64 s_octet8;"
- c_u_octet4 = "guint32 u_octet4;"
- c_s_octet4 = "gint32 s_octet4;"
- c_u_octet2 = "guint16 u_octet2;"
- c_s_octet2 = "gint16 s_octet2;"
- c_u_octet1 = "guint8 u_octet1;"
- c_s_octet1 = "gint8 s_octet1;"
-
- c_float = "gfloat my_float;"
- c_double = "gdouble my_double;"
-
- c_seq = "const gchar *seq = NULL;" # pointer to buffer of gchars
- c_i = "guint32 i_" # loop index
- c_i_lim = "guint32 u_octet4_loop_" # loop limit
- c_u_disc = "guint32 disc_u_" # unsigned int union discriminant variable name (enum)
- c_s_disc = "gint32 disc_s_" # signed int union discriminant variable name (other cases, except Enum)
+ c_u_octet8 = "uint64_t u_octet8;"
+ c_s_octet8 = "int64_t s_octet8;"
+ c_u_octet4 = "uint32_t u_octet4;"
+ c_s_octet4 = "int32_t s_octet4;"
+ c_u_octet2 = "uint16_t u_octet2;"
+ c_s_octet2 = "int16_t s_octet2;"
+ c_u_octet1 = "uint8_t u_octet1;"
+ c_s_octet1 = "int8_t s_octet1;"
+
+ c_float = "float my_float;"
+ c_double = "double my_double;"
+
+ c_seq = "const char *seq = NULL;" # pointer to buffer of chars
+ c_i = "uint32_t i_" # loop index
+ c_i_lim = "uint32_t u_octet4_loop_" # loop limit
+ c_u_disc = "uint32_t disc_u_" # unsigned int union discriminant variable name (enum)
+ c_s_disc = "int32_t disc_s_" # signed int union discriminant variable name (other cases, except Enum)
def __init__(self, st, protocol_name, dissector_name, description, debug=False, aggressive=False):
self.DEBUG = debug
@@ -238,7 +238,7 @@ class wireshark_gen_C:
rt = op.returnType()
if rt.kind() != idltype.tk_void:
- if rt.kind() == idltype.tk_alias: # a typdef return val possibly ?
+ if rt.kind() == idltype.tk_alias: # a typedef return val possibly ?
#self.get_CDR_alias(rt, rt.name())
if rt.unalias().kind() == idltype.tk_sequence:
self.st.out(self.template_hf, name=sname + "_return_loop")
@@ -482,7 +482,7 @@ class wireshark_gen_C:
def genAtList(self, atlist):
"""in: atlist
- out: C code for IDL attribute decalarations.
+ out: C code for IDL attribute declarations.
ie: def genAtlist(self,atlist,language)
"""
@@ -502,7 +502,7 @@ class wireshark_gen_C:
def genEnList(self, enlist):
"""in: enlist
- out: C code for IDL Enum decalarations using "static const value_string" template
+ out: C code for IDL Enum declarations using "static const value_string" template
"""
self.st.out(self.template_comment_enums_start)
@@ -1110,7 +1110,7 @@ class wireshark_gen_C:
string_digits = '%i ' % type.digits() # convert int to string
string_scale = '%i ' % type.scale() # convert int to string
- string_length = '%i ' % self.dig_to_len(type.digits()) # how many octets to hilight for a number of digits
+ string_length = '%i ' % self.dig_to_len(type.digits()) # how many octets to highlight for a number of digits
self.st.out(self.template_get_CDR_fixed, hfname=pn, digits=string_digits, scale=string_scale, length=string_length)
self.addvar(self.c_seq)
@@ -1412,7 +1412,7 @@ class wireshark_gen_C:
self.getCDR(st, sname + "_" + un.identifier())
- # Depending on what kind of discriminant I come accross (enum,integer,char,
+ # Depending on what kind of discriminant I come across (enum,integer,char,
# short, boolean), make sure I cast the return value of the get_XXX accessor
# to an appropriate value. Omniidl idlast.CaseLabel.value() accessor will
# return an integer, or an Enumerator object that is then converted to its
@@ -1650,8 +1650,8 @@ class wireshark_gen_C:
self.st.out(self.template_get_CDR_sequence_length, seqname=pn)
self.st.out(self.template_get_CDR_sequence_octet, seqname=pn)
self.addvar(self.c_i_lim + pn + ";")
- self.addvar("const guint8 * binary_seq_" + pn + ";")
- self.addvar("gchar * text_seq_" + pn + ";")
+ self.addvar("const uint8_t * binary_seq_" + pn + ";")
+ self.addvar("char * text_seq_" + pn + ";")
@staticmethod
def namespace(node, sep):
@@ -1885,7 +1885,7 @@ class wireshark_gen_C:
template_helper_function_start = """\
static void
-decode_@sname@(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, proto_item *item _U_, int *offset _U_, MessageHeader *header, const gchar *operation _U_, gboolean stream_is_big_endian _U_)
+decode_@sname@(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, proto_item *item _U_, int *offset _U_, MessageHeader *header, const char *operation _U_, bool stream_is_big_endian _U_)
{"""
template_helper_function_end = """\
@@ -1919,22 +1919,22 @@ void proto_reg_handoff_giop_@dissector_name@(void);"""
# template_protocol = """
#/* Initialise the protocol and subtree pointers */
-#static int proto_@dissector_name@ = -1;
-#static gint ett_@dissector_name@ = -1;
+#static int proto_@dissector_name@;
+#static int ett_@dissector_name@;
#"""
template_protocol = """
/* Initialise the protocol and subtree pointers */
-static int proto_@dissector_name@ = -1;
-static gint ett_@dissector_name@ = -1;
-static int ett_giop_struct = -1;
-static int ett_giop_sequence = -1;
-static int ett_giop_array = -1;
-static int ett_giop_union = -1;
+static int proto_@dissector_name@;
+static int ett_@dissector_name@;
+static int ett_giop_struct;
+static int ett_giop_sequence;
+static int ett_giop_array;
+static int ett_giop_union;
"""
template_init_boundary = """
/* Initialise the initial Alignment */
-static guint32 boundary = GIOP_HEADER_SIZE; /* initial value */"""
+static uint32_t boundary = GIOP_HEADER_SIZE; /* initial value */"""
# plugin_register and plugin_reg_handoff templates
@@ -1944,7 +1944,7 @@ static guint32 boundary = GIOP_HEADER_SIZE; /* initial value */"""
WS_DLL_PUBLIC_DEF void
plugin_register(void)
{
- if (proto_@dissector_name@ == -1) {
+ if (proto_@dissector_name@ <= 0) {
proto_register_giop_@dissector_name@();
}
}
@@ -1976,7 +1976,7 @@ void proto_register_giop_@dissector_name@(void)
/* setup protocol subtree array */
- static gint *ett[] = {
+ static int *ett[] = {
&ett_@dissector_name@,
&ett_giop_struct,
&ett_giop_sequence,
@@ -2014,9 +2014,9 @@ void proto_register_giop_@dissector_name@(void)
template_proto_register_ei_filters = """\
/* Expert info filters */
-static expert_field ei_@dissector_name@_unknown_giop_msg = EI_INIT;
-static expert_field ei_@dissector_name@_unknown_exception = EI_INIT;
-static expert_field ei_@dissector_name@_unknown_reply_status = EI_INIT;
+static expert_field ei_@dissector_name@_unknown_giop_msg;
+static expert_field ei_@dissector_name@_unknown_exception;
+static expert_field ei_@dissector_name@_unknown_reply_status;
"""
# template for delegation code
@@ -2027,13 +2027,13 @@ if (strcmp(operation, "@opname@") == 0
item = process_RequestOperation(tvb, pinfo, ptree, header, operation); /* fill-up Request_Operation field & info column */
tree = start_dissecting(tvb, pinfo, ptree, offset);
decode_@sname@(tvb, pinfo, tree, item, offset, header, operation, stream_is_big_endian);
- return TRUE;
+ return true;
}
"""
template_no_ops_to_delegate = """\
// NOTE: this should only appear if your IDL has absolutely no operations
if (!idlname) {
- return FALSE;
+ return false;
}
"""
# Templates for the helper functions
@@ -2199,9 +2199,9 @@ item = proto_tree_add_uint(tree, hf_@seqname@_loop, tvb,*offset-4, 4, u_octet4_l
template_get_CDR_sequence_octet = """\
if (u_octet4_loop_@seqname@ > 0 && tree) {
- get_CDR_octet_seq(tvb, &binary_seq_@seqname@, offset,
+ get_CDR_octet_seq(pinfo->pool, tvb, &binary_seq_@seqname@, offset,
u_octet4_loop_@seqname@);
- text_seq_@seqname@ = make_printable_string(binary_seq_@seqname@,
+ text_seq_@seqname@ = make_printable_string(pinfo->pool, binary_seq_@seqname@,
u_octet4_loop_@seqname@);
proto_tree_add_bytes_format_value(tree, hf_@seqname@, tvb, *offset - u_octet4_loop_@seqname@,
u_octet4_loop_@seqname@, binary_seq_@seqname@, \"%s\", text_seq_@seqname@);
@@ -2365,6 +2365,7 @@ proto_tree *union_tree = proto_tree_add_subtree(tree, tvb, *offset, -1, ett_giop
#include <epan/proto.h>
#include "packet-giop.h"
#include <epan/expert.h>
+#include <wsutil/array.h>
#include "ws_diag_control.h"
#include "ws_compiler_tests.h"
@@ -2411,7 +2412,7 @@ start_dissecting(tvbuff_t *tvb, packet_info *pinfo, proto_tree *ptree, int *offs
}
static proto_item*
-process_RequestOperation(tvbuff_t *tvb, packet_info *pinfo, proto_tree *ptree, MessageHeader *header, const gchar *operation)
+process_RequestOperation(tvbuff_t *tvb, packet_info *pinfo, proto_tree *ptree, MessageHeader *header, const char *operation)
{
proto_item *pi;
if(header->message_type == Reply) {
@@ -2424,12 +2425,12 @@ process_RequestOperation(tvbuff_t *tvb, packet_info *pinfo, proto_tree *ptree, M
return pi;
}
-static gboolean
-dissect_@dissname@(tvbuff_t *tvb, packet_info *pinfo, proto_tree *ptree, int *offset, MessageHeader *header, const gchar *operation, gchar *idlname)
+static bool
+dissect_@dissname@(tvbuff_t *tvb, packet_info *pinfo, proto_tree *ptree, int *offset, MessageHeader *header, const char *operation, char *idlname)
{
proto_item *item _U_;
proto_tree *tree _U_;
- gboolean stream_is_big_endian = is_big_endian(header); /* get endianess */
+ bool stream_is_big_endian = is_big_endian(header); /* get endianess */
/* If we have a USER Exception, then decode it and return */
if ((header->message_type == Reply) && (header->rep_status == USER_EXCEPTION)) {
@@ -2454,16 +2455,16 @@ case LocateReply:
case CloseConnection:
case MessageError:
case Fragment:
- return FALSE; /* not handled yet */
+ return false; /* not handled yet */
default:
- return FALSE; /* not handled yet */
+ return false; /* not handled yet */
} /* switch */
"""
template_main_dissector_end = """\
- return FALSE;
+ return false;
} /* End of main dissector */
"""
@@ -2487,32 +2488,32 @@ default:
* Main delegator for exception handling
*
*/
-static gboolean
-decode_user_exception(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *ptree _U_, int *offset _U_, MessageHeader *header, const gchar *operation _U_, gboolean stream_is_big_endian _U_)
+static bool
+decode_user_exception(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *ptree _U_, int *offset _U_, MessageHeader *header, const char *operation _U_, bool stream_is_big_endian _U_)
{
proto_tree *tree _U_;
if (!header->exception_id)
- return FALSE;
+ return false;
"""
template_ex_delegate_code = """\
if (strcmp(header->exception_id, "@exname@") == 0) {
tree = start_dissecting(tvb, pinfo, ptree, offset);
decode_ex_@sname@(tvb, pinfo, tree, offset, header, operation, stream_is_big_endian); /* @exname@ */
- return TRUE;
+ return true;
}
"""
template_main_exception_delegator_end = """
- return FALSE; /* user exception not found */
+ return false; /* user exception not found */
}
"""
template_exception_helper_function_start = """\
/* Exception = @exname@ */
static void
-decode_ex_@sname@(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, int *offset _U_, MessageHeader *header _U_, const gchar *operation _U_, gboolean stream_is_big_endian _U_)
+decode_ex_@sname@(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, int *offset _U_, MessageHeader *header _U_, const char *operation _U_, bool stream_is_big_endian _U_)
{
"""
@@ -2523,7 +2524,7 @@ decode_ex_@sname@(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U
template_struct_helper_function_start = """\
/* Struct = @stname@ */
static void
-decode_@sname@_st(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, proto_item *item _U_, int *offset _U_, MessageHeader *header _U_, const gchar *operation _U_, gboolean stream_is_big_endian _U_)
+decode_@sname@_st(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, proto_item *item _U_, int *offset _U_, MessageHeader *header _U_, const char *operation _U_, bool stream_is_big_endian _U_)
{
"""
@@ -2534,14 +2535,14 @@ decode_@sname@_st(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U
template_union_helper_function_start = """\
/* Union = @unname@ */
static void
-decode_@sname@_un(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, int *offset _U_, MessageHeader *header _U_, const gchar *operation _U_, gboolean stream_is_big_endian _U_)
+decode_@sname@_un(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, int *offset _U_, MessageHeader *header _U_, const char *operation _U_, bool stream_is_big_endian _U_)
{
"""
template_union_helper_function_start_with_item = """\
/* Union = @unname@ */
static void
-decode_@sname@_un(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, int *offset _U_, MessageHeader *header _U_, const gchar *operation _U_, gboolean stream_is_big_endian _U_)
+decode_@sname@_un(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, int *offset _U_, MessageHeader *header _U_, const char *operation _U_, bool stream_is_big_endian _U_)
{
proto_item* item = NULL;
"""
@@ -2615,14 +2616,14 @@ static const value_string @valstringname@[] = {
if (strcmp(operation, get_@sname@_at) == 0 && (header->message_type == Reply) && (header->rep_status == NO_EXCEPTION) ) {
tree = start_dissecting(tvb, pinfo, ptree, offset);
decode_get_@sname@_at(tvb, pinfo, tree, offset, header, operation, stream_is_big_endian);
- return TRUE;
+ return true;
}
"""
template_at_delegate_code_set = """\
if (strcmp(operation, set_@sname@_at) == 0 && (header->message_type == Request) ) {
tree = start_dissecting(tvb, pinfo, ptree, offset);
decode_set_@sname@_at(tvb, pinfo, tree, offset, header, operation, stream_is_big_endian);
- return TRUE;
+ return true;
}
"""
template_attribute_helpers_start = """\
@@ -2637,7 +2638,7 @@ if (strcmp(operation, set_@sname@_at) == 0 && (header->message_type == Request)
/* Attribute = @atname@ */
static void
-decode_@sname@_at(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, int *offset _U_, MessageHeader *header _U_, const gchar *operation _U_, gboolean stream_is_big_endian _U_)
+decode_@sname@_at(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, int *offset _U_, MessageHeader *header _U_, const char *operation _U_, bool stream_is_big_endian _U_)
{
"""
@@ -2684,37 +2685,37 @@ decode_@sname@_at(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U
"""
# Cast Unions types to something appropriate
- # Enum value cast to guint32, all others cast to gint32
+ # Enum value cast to uint32_t, all others cast to int32_t
# as omniidl accessor returns integer or Enum.
template_union_code_save_discriminant_enum = """\
-disc_s_@discname@ = (gint32) u_octet4; /* save Enum Value discriminant and cast to gint32 */
+disc_s_@discname@ = (int32_t) u_octet4; /* save Enum Value discriminant and cast to int32_t */
"""
template_union_code_save_discriminant_long = """\
*offset -= 4; // rewind
-disc_s_@discname@ = (gint32) get_CDR_long(tvb,offset,stream_is_big_endian, boundary); /* save gint32 discriminant and cast to gint32 */
+disc_s_@discname@ = (int32_t) get_CDR_long(tvb,offset,stream_is_big_endian, boundary); /* save int32_t discriminant and cast to int32_t */
"""
template_union_code_save_discriminant_ulong = """\
*offset -= 4; // rewind
-disc_s_@discname@ = (gint32) get_CDR_ulong(tvb,offset,stream_is_big_endian, boundary); /* save guint32 discriminant and cast to gint32 */
+disc_s_@discname@ = (int32_t) get_CDR_ulong(tvb,offset,stream_is_big_endian, boundary); /* save uint32_t discriminant and cast to int32_t */
"""
template_union_code_save_discriminant_short = """\
*offset -= 2; // rewind
-disc_s_@discname@ = (gint32) get_CDR_short(tvb,offset,stream_is_big_endian, boundary); /* save gint16 discriminant and cast to gint32 */
+disc_s_@discname@ = (int32_t) get_CDR_short(tvb,offset,stream_is_big_endian, boundary); /* save int16_t discriminant and cast to int32_t */
"""
template_union_code_save_discriminant_ushort = """\
*offset -= 2; // rewind
-disc_s_@discname@ = (gint32) get_CDR_ushort(tvb,offset,stream_is_big_endian, boundary); /* save gint16 discriminant and cast to gint32 */
+disc_s_@discname@ = (int32_t) get_CDR_ushort(tvb,offset,stream_is_big_endian, boundary); /* save int16_t discriminant and cast to int32_t */
"""
template_union_code_save_discriminant_char = """\
*offset -= 1; // rewind
-disc_s_@discname@ = (gint32) get_CDR_char(tvb,offset); /* save guint1 discriminant and cast to gint32 */
+disc_s_@discname@ = (int32_t) get_CDR_char(tvb,offset); /* save uint8_t discriminant and cast to int32_t */
"""
template_union_code_save_discriminant_boolean = """\
*offset -= 1; // rewind
-disc_s_@discname@ = (gint32) get_CDR_boolean(tvb, offset); /* save guint1 discriminant and cast to gint32 */
+disc_s_@discname@ = (int32_t) get_CDR_boolean(tvb, offset); /* save uint8_t discriminant and cast to int32_t */
"""
template_comment_union_code_label_compare_start = """\
if (disc_s_@discname@ == @labelval@) {
@@ -2736,11 +2737,11 @@ if (disc_s_@discname@ == @labelval@) {
# for structs and union helper functions.
template_hf_operations = """
-static int hf_operationrequest = -1;/* Request_Operation field */
+static int hf_operationrequest;/* Request_Operation field */
"""
template_hf = """\
-static int hf_@name@ = -1;"""
+static int hf_@name@;"""
template_prototype_start_dissecting = """
static proto_tree *start_dissecting(tvbuff_t *tvb, packet_info *pinfo, proto_tree *ptree, int *offset);
@@ -2754,7 +2755,7 @@ static proto_tree *start_dissecting(tvbuff_t *tvb, packet_info *pinfo, proto_tre
"""
template_prototype_struct_body = """\
/* Struct = @stname@ */
-static void decode_@name@_st(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, proto_item *item _U_, int *offset _U_, MessageHeader *header _U_, const gchar *operation _U_, gboolean stream_is_big_endian _U_);
+static void decode_@name@_st(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, proto_item *item _U_, int *offset _U_, MessageHeader *header _U_, const char *operation _U_, bool stream_is_big_endian _U_);
"""
template_decode_struct = """\
decode_@name@_st(tvb, pinfo, struct_tree, item, offset, header, operation, stream_is_big_endian);"""
@@ -2767,13 +2768,13 @@ decode_@name@_st(tvb, pinfo, struct_tree, item, offset, header, operation, strea
template_prototype_union_body = """
/* Union = @unname@ */
-static void decode_@name@_un(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, int *offset _U_, MessageHeader *header _U_, const gchar *operation _U_, gboolean stream_is_big_endian _U_);
+static void decode_@name@_un(tvbuff_t *tvb _U_, packet_info *pinfo _U_, proto_tree *tree _U_, int *offset _U_, MessageHeader *header _U_, const char *operation _U_, bool stream_is_big_endian _U_);
"""
template_decode_union = """\
decode_@name@_un(tvb, pinfo, union_tree, offset, header, operation, stream_is_big_endian);
"""
template_proto_item = """\
-proto_item *item = (proto_item*) wmem_alloc0(wmem_packet_scope(), sizeof(proto_item));
+proto_item *item = wmem_new0(pinfo->pool, proto_item);
"""
#
diff --git a/tools/wireshark_words.txt b/tools/wireshark_words.txt
index 7a0857a8..26b201d0 100644
--- a/tools/wireshark_words.txt
+++ b/tools/wireshark_words.txt
@@ -8,9 +8,13 @@
3gpp2
3pcap
5views
+7login
80211n
80mhz
abbrev
+aborts
+accel
+accelerations
accelerometer
acceptor
accessor
@@ -19,17 +23,26 @@ accuracies
acked
acknack
acknowledgement
+acknowledgements
+acknowlegement
acp133
+acquirer
+actioned
activations
actuator
acyclic
+adaption
addba
additionals
additionsr
addon
+addrbook
+addrtype
adjacency
adlink
+admin
administrable
+administratively
adpclk
adspec
advatek
@@ -37,72 +50,120 @@ adwin
aes128
aes256
aethra
+afnum
+aggreg
+aggregated
+aggregates
aggregations
aggregator
agnss
aifsn
+aires
aironet
airpcap
airtel
alcap
alcatel
+aliased
alljoyn
alloc
+allocator
allocators
alteon
+altera
+altsvc
+american
ampdu
amperage
ampere
amperes
+amsdu
+amsterdam
anacap
+analysed
+analysing
analyzers
analyzes
+andxoffset
+angeles
annexc
+annotation
+annotations
annunc
+annunciation
anonsvn
anonymization
+antsel
aperiodic
+appcheader
appdata
+append
+apphdr
appid
appkey
+applctrl
applicability
+approx
appset
+appsig
+april
+arabic
arbitrated
+arcnet
arduino
arfcn
+argparse
arista
+armscii
+arphrd
+aruba
+aruuid
+ascii
asciidoc
ashrae
asn1
asn1cnf
asn2deb
asn2wrs
+assembler
+assignate
assignee
assignor
+assistive
assoc
-assymetric
+asterix
async
+asynchronous
asynchronously
asyncmap
+atapi
+athens
atheros
+atime
atomically
atsss
attendee
attrib
+attribs
attrs
audigy
authcitrix
authen
authenticates
+authentications
authenticator
authenticators
authgss
authn
authntransitioning
+authorisation
+authorised
authorizer
authtoken
authtype
authz
+autocompletions
+autoconf
autoconfiguration
autodiscovery
autoneg
@@ -110,12 +171,17 @@ autosar
available
avaya
avrcp
+axcor
+b100base
bacapp
backedup
backend
backhaul
backoff
bacnet
+bangladesh
+barbados
+basicmcsmap
batched
baudrate
bayer
@@ -126,25 +192,47 @@ beamformed
beamformee
beamformer
beamforming
+behaviour
+behaviours
+beijing
+belgacom
+berhad
+berkeley
+bgpsec
bgpspec
+bharti
bibliographic
bibliography
+bidir
+bidirection
bidirectional
bidirectionally
+bifuel
bigint
+bigtk
binlog
bitfield
+bitmap
bitmask
bitrate
bitstring
blackhole
bnode
+bogota
boolflag
bootfile
bootloader
bootopt
bootp
+bootup
+bosch
+bouygues
+bratislava
+britain
+british
broadcom
+brotli
+browsers
bsmap
bssap
bssid
@@ -157,17 +245,24 @@ bthci
btmesh
btsdp
btsnoop
+btxnq
+bucuresti
+buenos
bugzilla
buildbot
builtin
bulleted
+bundler
butype
byte
byteorder
+c1220func
cablelabs
+caching
cadenced
callback
callid
+callout
callsign
calorific
canceled
@@ -191,12 +286,16 @@ cardbus
carrierfreq
carrierid
casio
+categorization
categorizes
+cattp
cblock
ccache
cccid
ccitt
ccpch
+ccsds
+ccsid
cctrch
cdma2000
cdmacallmode
@@ -204,27 +303,40 @@ cdmachanneldata
celeron
cellid
cellidentity
+celsius
+celtel
centillion
centiseconds
+centre
+centric
centrino
cfilters
+cflags
cframe
+chacha
chan1
chan2
changelog
channelisation
+channelised
channelized
+channelmap
+channelseq
+chars
charset
charsets
chauvet
checkbox
checkout
checksum
+chinese
chksum
chmod
choco
chocolatey
+chooser
choplen
+chroma
chromaticities
chromaticity
chunked
@@ -236,8 +348,10 @@ ciphersuite
ciphertext
ciplus
cipso
+circularly
citrix
cksum
+clasportinfo
classifiers
classmark
classmark3
@@ -245,22 +359,33 @@ claypaky
clearallportcounters
clientkey
clientout
+clksrc
+cllog
clopts
clsfr
+clusctl
+clusprop
clustermap
cmake
cmdcontrol
+cmdmfg
cmstatus
codabar
codebook
+codec
codecs
codepoint
codeset
codingrate
+codings
codute
+colinfo
+collation
collectd
collimation
+colmetadata
colocated
+colons
coloring
colorise
colorization
@@ -268,42 +393,69 @@ colorize
colorized
colorizing
colormap
+colour
+colouring
combi
combiner
combiners
+combobox
+comcast
+commaize
+comms
communication
+commview
+companded
compat
+compatibiity
+compatibles
compilable
compilers
compr
computable
+comunicaciones
concatenate
concatenated
concatenates
+concatenating
+concatenation
+concentrator
concurrent
conferenced
+config
configitem
configurable
+conformance
conformant
congctrl
connectionless
connid
connp
+conout
const
+constr
contactless
contextp
contiguity
contiguously
+controlee
+convolutional
+convs
coord
Coord3D
+coords
+coprocessor
copycss
copyfile
corba
+corosync
+correlator
corrigendum
couchbase
coverity
+cp56time2a
cpdlc
cpich
+cplusplus
cppcheck
cpubus
cpuregisters
@@ -314,28 +466,45 @@ credssp
criticalextensions
criticalextensionsfuture
crnti
+croatian
crypto
cryptographic
csapi
ctime
+ctrls
+ctrlstat
ctxinfo
+ctxinst
ctype
cumulated
cumulatively
+curvezmq
customizable
customization
customizing
+cxxflags
+cyber
+cymru
cyphering
+cyrillic
daintree
+dallas
+darwin
datagram
datagrams
dataitem
datalen
+dataln
datarate
+dataset
datastate
datetime
+datum
+daysofweek
+dbuild
dccreq
dcerpc
+dcname
dct3trace
deact
deactivated
@@ -351,10 +520,18 @@ deauthentication
deauthentications
debian
debug
+debugger
+debuggers
decapsulation
decca
+december
decentralization
dechunk
+decimals
+declarator
+decnet
+decnum
+decompressed
decompressing
decompressor
decrement
@@ -368,24 +545,40 @@ dedup
deduplicate
deenabled
deenablement
+deencapsulate
+defence
+deferral
defragment
defragmentation
defragmented
defragmenting
dehumidification
+deinterlaced
+deinterlacer
+deinterlacing
deinterleaved
+deleg
+deletions
+delhi
+delimit
delimited
delimiters
delimiting
demodulator
+demotivation
+demultiplex
demultiplexed
demultiplexer
demultiplexers
+demultiplexing
denso
deobfuscated
deobfuscation
depassivated
+dependencies
+deprecate
deprecated
+deprecates
deprotection
dequeue
dequeued
@@ -399,77 +592,127 @@ deregistering
deregistration
derivate
des40
+descname
descr
+descriptor
descriptors
desegment
desegmentation
+desegmented
desegmenting
deselect
+deserialize
+designator
destip
destport
+determinant
deutschland
devcap
+devel
+devfs
deviceid
devmode
+devno
+devtype
dfdpck
dfilter
dfilters
+dfref
dfsauth
dftest
dgotyp
dgram
dhaka
dhcpv
+diagdata
dialed
+dialling
dialup
+dicom
+differentiator
diffie
+diffserv
Digicel
+digitech
digitizer
digium
+dilithium
diplexer
directionality
+dirname
disambiguate
disambiguation
+discontinuities
discriminant
+discriminator
+dishnet
dissection
dissector
dissectors
+distclean
distinguisher
+distrib
+distributions
+diversifications
diversifier
divisor
djiuav
dlmap
dlsch
+dlstatus
+dmacros
dmepi
+dmrpc
+dmx4all
dnskey
+dnsop
+dnssec
+dnssrv
+docomo
docsis
dodag
+doppler
dot11Qos
dot1q
+dotnet
double
downlink
doxygen
dpauxmon
dpnss
+drange
drbid
drdynvc
droppable
+drsuapi
dsmcc
+dstmode
dstport
dtwin
dumpcap
duple
+duplications
+durations
+düsseldorf
dword
dwords
+dyntatom
+e2node
eapol
earcfn
earfcn
+eastat
ebcdic
ecdhe
ecdsa
+econet
ecpri
+ed137a
+ed137b
editcap
+edonkey
eeprom
+effecter
egprs
egroup
eigrp
@@ -478,8 +721,15 @@ elektronik
elided
elink
ellipsoid
+elsif
+emlsr
+emulates
+emule
+enablement
encap
encaps
+encapsulating
+encapsulation
encapsulations
encapsulator
encinfo
@@ -488,139 +738,227 @@ encodings
encrypt
encrypting
encryptionkey
+encrypts
endace
endian
endianness
endif
endpoint
+endpt
engineid
+enlight
enodeb
enqueue
enrollee
entityid
entryid
enttec
+enumerated
enumerates
+enumeration
enumerations
enumerator
+enums
envchange
epasv
epdcch
eperm
+ephemeris
+epoll
epsem
equinf
+equinix
equiv
+equivalence
ericsson
erldp
+errcnt
errinf
errno
errorcode
errored
+errorlog
errorportinfo
erspan
España
esperanto
+essar
+estonia
+estream
+ethcm
etheraddr
ethercat
+ethernet
ethers
ethertype
+ethname
+etisalat
etlfile
ettarr
etwdump
etype
+europe
eutra
eutran
eventlog
+evolutions
+exclusions
executables
exflags
+exfunc
exocet
+expandtab
+expansions
+extapp
extattr
extcap
+extcaps
+extcopy
extensibility
extensible
extern
+extflags
exthdr
extlen
+extopt
+extpfx
+extractors
extrainformation
eyesdn
+f1removal
+f1setup
+f5eth
+fa1922dfa9a
facch
+fahrenheit
failover
+falco
+falcodump
+falcosecurity
+fallthru
fastcom
fastip
fastmsg
fattr
+fdata
featureful
+february
fhandle
fiber
+fibre
+fileseg
fileset
+filsfils
+filter2d
+finditer
+firefox
firewall
+fiveco
fixme
flag1
flag2
+flanger
flavored
+flavour
+flavours
flexray
flowid
flowmod
flowset
flowspec
fmconfig
+fmdata
+fname
+foffset
followup
foobar
format0
+forti
fortigate
fortinet
fpiur
+fprintf
fraghdr
+fragmenter
framenum
framenumber
frametype
frcrpt
freebsd
+friday
frontend
+fruid
fsctl
+fstst
ftenum
+ftserver
ftype
ftypes
fujitsu
+funcdesc
functionalities
funkt
fuzzed
fuzzer
+fuzzing
fvalue
g711a
g711u
+galileo
gamepad
ganss
gboolean
gchar
gcrypt
+gencp
gendc
+geninfo
gentoo
geoip
+geolocation
geonw
+geospatial
+geostd
+gerald
geran
getattr
getentrybyname
getgroupinfo
+getimageinfo
getnext
+getopt
+getset
+getsetinfo
getter
+getters
+getvfinfo
gidaddr
gigabit
gigamon
gigpod
github
gitlab
+givei
+globals
gluster
gmail
gmprs
gnodeb
gnutls
goaway
+goddard
golomb
+goodput
google
+gopname
+goroutines
+gostr
gpointer
gprscdr
gprsmeasurementparams3g
+granularity
+greek
+greenwich
gregex
greyed
groupa
@@ -628,29 +966,50 @@ groupadd
groupb
groupcast
groupmod
+gryphon
gssapi
+gtalk
+guangzhou
+guatemala
guint
gzipped
handoff
+hangul
hangup
harqid
+harris
hartip
hashed
hashes
hazelcast
+hbreq
hcidump
headend
+hebrew
+heimdal
+hellman
heuristic
+heuristicly
+heuristics
+hexnum
hfarr
hfill,
+hfindex
+hfinfo
+hfname
HI2Operations
+histogram
+historizing
+hitachi
hnbap
homedir
homeplug
hopcount
+hostkey
hostname
howto
hpfeeds
+hplmn
hresult
hsdpa
hsdsch
@@ -663,25 +1022,44 @@ http2
https
huawei
huffman
+hungarian
+hutchison
+hysteresis
hytec
+ibsdh
+icapability
icmpv
+iconv
+icount
+idempotent
ident
identifier
+ideographic
idiographic
idl2deb
idl2wrs
+idltype
iec60870
ieee1609dot
ieee17221
ieee80211
+ienergy
iface
ifconfig
ifdef
+ifindex
ifname
+ifndef
+iftype
ikev2
illuminance
+image1d
+image2d
+image3d
+image4dsgis
imeisv
immersive
+impinj
implementations
implementer
implementers
@@ -689,6 +1067,8 @@ implementor
inactivated
inband
incits
+incompatibilities
+inconsistently
incremented
incrementing
indenting
@@ -699,9 +1079,12 @@ infix
infolist
informationitem
informationlist
+informations
infos
inited
+initialisation
initialise
+initialised
initialising
initialization
initializations
@@ -711,38 +1094,70 @@ initializer
initializers
initializes
initializing
+initiators
+inkey
inline
+inlined
+inmarsat
+inoage
inode
inodes
inspiron
+installers
instantiate
+instantiated
+instantiation
instdir
+instr
+instrmnt
+instrmt
instrumented
+intcon
+intel
+intensities
+interchangibly
interferer
interleaving
+internacional
interop
+interpolated
interruptible
interworking
+intparam
+intra
+intrinsics
+intro
intval
inuse
invalidation
+invalidity
invalidly
+invariance
+invariant
+inversed
ioctl
+ioepoch
ioerr
ioflag
iograph
+ionospheric
iotecha
+ipacc
ipaccess
ipaddr
ipaddress
ipcomp
ipconfig
+ipdum
iperf
ipfix
+ipmap
ipphone
ipprim
ipsec
ipseckey
+ipset
+ipsilon
iptables
iptrace
ipv4addr
@@ -750,21 +1165,35 @@ ipv6addr
ipxnet
ipxnets
irqmask
+isafe
isakmp
isatap
iscsi
iseries
+isinstance
isobus
+isocenter
isochronous
+issuer
italia
+italic
+italics
iterating
+iterative
iterator
itunes
iwarp
ixveriwave
jacobson
+january
+japanese
jetds
+jminfo
+joiners
jsonraw
+jtids
+jupmngr
+k12text
k12xx
kademlia
kasme
@@ -773,6 +1202,7 @@ kbytes
kchip
keepalive
kerberos
+keybcs
keydes
keygen
keyid
@@ -784,22 +1214,47 @@ keyring
keyset
keytab
knxip
+korean
+krb5kdc
+krb5krb
+l1sync
l2cap
l2vpn
l3vpn
laggy
lanalyzer
+lanka
latencies
+latin
+layerables
lbmpdm
lcgid
lcids
lcsap
+ldflags
leasequery
+legitimisation
+lempar
+level1login
+lexer
+lexpos
+lfsck
+libcap
+libdir
libgcrypt
+libiconv
+libndr
+libnl
+libosmocore
libpcap
+libscap
libsmi
+libssh
+libxml
licmgr
+lidaddr
linearity
+lineno
linkaddr
linkcss
linker
@@ -809,71 +1264,121 @@ linux
list1
literals
lithionics
+llcgprs
lnpdqp
+localise
+locamation
+locoinfof
+log3gpp
logcat
loghans
+loginack
loglocal
logoff
+logon
logout
logray
+london
+lookup
lookups
loopback
+loran
lossy
+lsarpc
lscap
+lsflags
+lstat
+ltype
lucent
+lumascape
luminaire
luminance
+luxembourg
lycamobile
macaddr
macaddress
+macos
macosx
+macros
macsec
+madge
+madrid
+magnitudes
mailto
+maintainers
malloc
+manand
manarg
+manip
mantissa
manuf
mappable
mariadb
marvell
+masterkey
mathieson
matrixes
+maxbytes
maxlen
+maxlength
+maxmind
+maxnoof
maybefcs
mbits
mbsfn
mbytes
+mcaal
mcast
+mcinfo
mcmemberrecord
+mcount
mcptt
mcsset
measurability
measurements
+measx
+mediaone
medion
megabit
megaco
mellanox
memcache
memcpy
+memset
menubar
mergecap
merkle
+meshconf
meshcop
messageid
metadata
+metamethod
+metamethods
+metatable
+metatraffic
meteorological
metermod
+meterset
+metre
+metres
México
+mfgfield
mgmtmsg
+miami
microapp
microbit
midamble
+migratable
+milano
millimeters
milliwatt
mingw
+minimap
miniport
minislot
minislots
+minizip
+minmax
minus1
mirrorlink
misconfiguration
@@ -882,21 +1387,40 @@ mitel
mitsubishi
mkdir
mmdbresolve
+mmpdu
+modality
modbus
mode01
mode7
modepage
modespecificinfo
+modifier
+modifiers
+modulations
modulo
+modulus
+modus
+monday
+moscow
motorola
+móviles
mozilla
+mp4ves
mpeg4
mplstp
+mpquic
mpsse
mptcp
+mqcacf
+mqcmd
+mqiacf
+mqiach
+mqiamo
+mqrccf
mrcpv
msbuild
mscldap
+msgflag
msgid
msglen
msgreq
@@ -909,6 +1433,7 @@ mtime
mtrace
mudurl
mulaw
+multi
multiband
multicarrier
multicast
@@ -928,25 +1453,43 @@ multiplexers
multiplexing
multiplicative
multiplicator
+multiplier
multirat
multirate
multislot
multistate
mumbai
+mutex
+muting
+myanmar
mycapture
mycaptures
mydns
myhost
mysql
nacks
+nagle
namelen
+nameor
namespace
+nanosec
naptr
narrowband
+nasdaq
+natively
+nbifom
nbrar
ndpsm
+ndriver
+nederland
negotiability
+neighbour
+neighbouring
+neighbours
nessie
+nested2party
+nested3party
+nested4party
netboot
netfilter
netflow
@@ -965,33 +1508,54 @@ newpw
nexthop
nextseq
nfs2err
+nfs3err
nfs4err
+nfula
nghttp
ngran
ngsniffer
+niagara
niagra
+nigam
+nikon
+nintendo
+nistp
nitnxlate
+njack
+nlpid
+nmtcommand
nnsvc
+noalign
noascii
noauth
nodeid
+nodiscriminant
nofcs
nokia
+nolintnextline
nominals
+non3gpp
+nonask
nonblock
noncriticalextension
noncriticalextensions
nopad
noqueue
+nordic
nordig
+noreply
+norge
+normalised
nortel
notarized
notational
notif
notifier
notset
+notsupp
notused
novell
+november
nowait
nowrap
npcap
@@ -1002,13 +1566,21 @@ nrtcws
nsapi
nssai
nssvc
+nstat
nstime
nstrace
+nstring
+nt4change
ntlmssp
+ntlmv
ntohl
ntohs
+ntstatus
+nttime
ntwkconn
nullptr
+numitems
+nvgre
nvmeof
nvram
oampdu
@@ -1020,28 +1592,45 @@ octal
octet
octets
octetstring
+october
oextcap
ofdma
offloadability
+offsetp
+ofpacpt
ofpat
ofpbac
+ofpbfc
+ofpbic
+ofpbmc
ofpbrc
ofpet
+ofpfmfc
ofpgmfc
+ofpgt
+ofpit
+ofpmf
+ofpmmfc
ofpmp
+ofppc
ofppf
+ofppr
ofprr
ofptfpt
ofpxmt
om2000
omniidl
+omron
onboarding
onduration
onoff
ontime
+opcfoundation
opcode
opcodes
opcua
+openfd
+opengl
openssh
openssl
openstreetmap
@@ -1049,40 +1638,63 @@ openvpn
opflags
oplock
opnum
+optarg
optimisation
+optimised
optimizations
optimizer
optiplex
optreq
+optrsp
+optus
ordinal
+organisation
+orpcthat
+orthogonal
oscillatory
oscore
osdmap
osmocom
osmux
ospf6
+othersat
outhdr
+overlay1mask
+overlay2mask
+overridable
+p2pstar
pacch
packetcable
packetization
packetized
pagings
+panasonic
+parallelism
parallelization
param
+parameterfv
+parameteriv
parameterization
parameterized
+paramfmt
params
paramset
parens
+paris
parlay
parms
+parsed
parser
+parsers
parses
+partitioning
passcode
passivated
passkey
passthrough
passwd
+pathconf
+paxos
pbcch
pcapng
pccch
@@ -1090,13 +1702,18 @@ pcch
pcell
pcmax
pcmaxc
+pcre2pattern
pcrepattern
pdcch
+pdelay
pdsch
pdustatus
peeraddr
peerkey
+pentium
periodicities
+periodicity
+peripherals
peristency
persistency
pfname
@@ -1104,23 +1721,32 @@ pgpool
pharos
phaser
phasor
+phasors
phich
phonebook
+phymod
physcellid
picmg
pinfo
pixmap
plaintext
+planar
plano
plixer
+plmnid
plugin
pluginize
plugins
pluginsdir
pmconfig
+pmkid
pname
polestar
+polycom
+polygon
+polynom
popup
+portableapps
portcounters
portid
portinfo
@@ -1129,36 +1755,61 @@ portno
portnumber
portset
portstatus
+positioner
posix
postfix
powercontrol
pppdump
pppoe
prach
+pradesh
+preambles
preauth
+precisions
+precoding
+precompilation
+precompiled
+precompute
preconfiguration
preconfigured
predef
+predefines
preempting
preemption
+prefetch
+prefill
prefname
prefs
preloaded
prepay
prepend
+prepost
+preprocess
+preprocessor
preshared
+prespecified
+presubscribed
+prettification
printf
+prioritised
+prioritising
prioritization
prioritized
privkey
procid
+profi
profidrive
+profienergy
profinet
+programmatic
promisc
promiscsniff
promiscuously
propertykey
+prosody
+protabbrev
protected
+proto
protoabbrev
protobuf
protocolie
@@ -1167,13 +1818,16 @@ proxied
proxying
proxykey
pscell
+psecs
pseudowire
psname
ptime
+ptlrpc
ptvcursor
ptype
pubdir
pubkey
+publickey
pucch
pusch
pwach
@@ -1182,39 +1836,65 @@ pxeclient
pytest
qam16
qam64
+qcustomplot
qmgmt
qnet6
+qnsview
qosinfo
+qpack
qsearch
+qstring
+qtbase
+qtbug
quadlet
quadrature
quadro
+quadruplet
+quadruplets
+qualcomm
quantifiers
+quantization
+quantized
queryhit
queryset
+queueing
+quiesce
quiescing
quintuplet
quintuplets
+quotactl
r3info
radcom
radeon
+radians
radiotap
radix
ralink
ranap
+randomisation
randomization
randomize
randomizer
randpkt
+randr
raster
+rawip
+rcode
+rcsta
+rctxinfo
+rdeth
rdpudp
rdtci
reachability
+readdir
readme
+realise
realloc
realtek
realtime
reassembles
+reassemblies
+reassembly
reassigning
reassignments
reassigns
@@ -1234,7 +1914,11 @@ recalculate
recalculating
recalculation
recalibrate
+recognise
+recognised
+recognises
recognizer
+recompile
recompiled
recompiling
recomputed
@@ -1246,45 +1930,73 @@ reconfigured
reconfigures
reconfirm
reconfrqst
+reconnection
+recurse
+recurses
recursively
+redefinition
redelivered
redelivery
redir
+redirection
redirector
redirects
+redissect
+redissecting
+redissection
+redist
redistributable
redistributables
+redistributes
+reenabled
reencyption
reentry
reestablishing
reestablishment
refactored
+refactoring
referer
referrer
regex
regexp
+regid
regionid
+regname
+reimplement
+reimplementation
reimplemented
+reimplementing
reinitialization
reinitialize
reinitialized
reinitializing
reinjected
reinjection
+reint
reinvoke
rekey
rekeying
relocatable
remapping
+remediation
+removability
+renegotiation
+renesas
renumbering
reoptimization
reoptimized
reordercap
reorigination
+reparse
+replot
representable
+reproducible
reprogrammable
reprogramming
+republish
+reqinfo
requester
+requesters
requestor
requeue
reregister
@@ -1295,6 +2007,8 @@ resampled
resampler
rescan
resegment
+resegmentation
+resegmented
resend
resequencing
reservable
@@ -1310,11 +2024,19 @@ resized
resolvable
resolver
resolvers
+restofline
resub
resubmission
+resync
+resynchronisation
resynchronization
resynchronize
+resynchronized
+retap
+retarder
+rethrown
retrans
+retranslated
retransmission
retransmissions
retransmit
@@ -1324,78 +2046,138 @@ retransmitter
retries
retry
retrying
+returndiag
retval
retyping
revalidate
revalidation
revertive
revocations
+rf4ce
rfcomm
+rffpcnex
+rfinfo
rfmon
rgoose
ripemd
rlcmac
rmcap
+rmdir
rngrsp
rnsap
roamer
+robert
+romania
+rotterdam
routable
+routererr
rowfmt
rpcap
+rpcsec
rpmbuild
rsocket
rsrvd
rtitcp
rtpdump
rtpevent
+rtpfb
rtpmidi
rtpmux
+rtpstream
ruleset
+russian
+russwin
+rwanda
+rxbitmask
rxchannel
+rxflags
rxlen
rxlev
+rxmcsmap
rxreq
+s3accountlist
+s3interval
s7comm
sabme
sacch
+saddr
+samsung
sanicap
+sanitization
sanitize
+sanitizers
+santa
+santiago
sapgui
+sarajevo
satisfiable
+satnet
+saturates
+saturday
+savvius
scalability
+scalable
scaleout
scaler
scannable
scdma
scell
+sched
+schema
+schemas
+schweiz
scoped
scrollbar
+sczdx
sdcch
sdjournal
+sdkflags
+sdram
sdusize
+secchan
+sectigo
sectorization
sectorized
+secur
+seg1a
+seglen
segmenting
segno
+selectivity
+semaphores
+semcheck
semiautomatic
sendto
+seoul
separability
separators
+seqlen
seqno
seqnr
seqnum
+seqpkt
sequenceno
sercos
+sercosiii
+serialisation
+serialised
+serialization
serialize
serialized
+serializes
+serverlst
+serversrt
servlet
sessionid
sessionkey
setattr
setcap
+setconf
+setkey
setuid
severities
sfiocr
+sflag
sflow
sftpserver
sftserver
@@ -1405,30 +2187,55 @@ sha256
sha384
sha512
sharkd
+shenzhen
shomiti
+shortstr
+showtechnic
+shtdn
siapp
sidelink
+sigcomp
+siginfo
+siglen
signaal
signaling
signon
+simplestatusbit
simulcast
+simulcrypt
+singulation
+sinsp
+sinteger
sistemas
+sizeof
skippable
skype
slaac
slimp
+slovak
slsch
+smime
+sminfo
+smpdirected
smpte
smrse
sname
snaplen
snow3g
snprintf
+sockaddr
+socketcan
softkey
+solaris
solera
someip
someipsd
sonet
+spacebar
+spacetel
+spacings
+spandsp
+spanish
spare
spare1
spare2
@@ -1440,39 +2247,70 @@ spare7
spare8
spare9
spcell
+specifier
specifiers
spectrograph
speex
+speexdsp
+spirent
spline
spnego
spoofing
spooled
+spooler
+spoolss
+sqlite
+sqlwarn
srbid
+srcdst
+srcip
srcport
srtcp
srvcc
+srvsvc
+ss7pcs
+sscanf
+sscop
sshdump
sshkey
ssupervisor
+stainfo
stanag
+standardised
+standardization
+starttls
stateful
statfs
+statsum
statusbar
+statvfs
stderr
stdin
+stdio
stdout
+steinigke
+stevens
+stmin
+stnode
+strawberryperl
strbuf
+strcmp
strdup
streamid
+strfunc
stringz
stringzpad
+strtoul
struct
structs
+stype
subaddress
subband
subcarrier
subcarriers
+subchan
subchannel
+subchannelization
subcode
subdevice
subdissector
@@ -1481,6 +2319,7 @@ subdoc
subelem
subelement
subelements
+subelt
subframes
subfunc
subhd
@@ -1488,6 +2327,7 @@ subheader
subheaders
subids
subidx
+subikdisk
subindex
subkey
subm
@@ -1504,25 +2344,45 @@ subpdu
subpm
subprocesstest
subquery
+subrect
subrects
subselect
subselection
subslot
+subsys
subtlv
subtree
subtrees
+sudan
+sunday
+superseding
superset
+suppfeat
+svalidator
+svcparam
sverige
svhdx
+swedish
+swisscom
+swissvoice
switchinfo
+sydney
symantec
+symbian
+symbl
+symlink
+synchronisation
+synchronise
+synchronised
synchronizer
synchronizing
synchronously
syncman
+syncookie
syniverse
synphasor
syntaxes
+synthesiser
sysdig
sysex
sysframe
@@ -1531,7 +2391,11 @@ sysmac
systemd
tablemod
tabular
+taiwan
+tanzania
+tbilisi
tclas
+tcpcc
tcpdump
tcpflags
tcpip
@@ -1542,48 +2406,80 @@ tdd384
tdd768
technica
Tektronix
+telco
+telecom
+télécom
Telecomunicaciones
+telecomunicazioni
telefonica
Telefónica
Teléfonos
telekom
+telematic
+telematics
telenor
+teleservice
+teleservices
teletex
telfonica
telia
+telkom
+telnet
+telstra
teredo
+terminations
tesla
+texas
text2pcap
textbox
+tfields
+tfmccack
+tfshark
thermister
thermistor
+throughput
thunderx
+thursday
timeout
timeslot
timestamp
timestamps
+timetag
timezone
tipcv
+titlebar
+tlvlen
+tlvtype
+tname
+tobago
toggled
+toggles
toggling
+tokenized
+tokyo
toolbar
toolongfragment
toolset
tooltip
+topologies
toprowflag
topup
toshiba
+tostr
+tostring
totemsrp
touchlink
touchpad
traceroute
traff
+trans
transceive
transcoder
transiently
transifex
transitioning
transitivity
+transportcc
transum
transversal
traveler
@@ -1594,9 +2490,14 @@ truncatable
truncate
truncates
truncating
+truphone
+trustdom
+truthiness
tshark
tspec
+tsprec
tstamp
+tuesday
tunid
tunneled
tunneling
@@ -1604,16 +2505,25 @@ tuple
tuples
tvbparse
tvbuff
+tvbuffer
twamp
twopc
txchannel
+txmcsmap
+txpwr
+txsta
+txtmod
type1
type2
+type2mimo
type3
typedef
typeof
uarfcn
+uaudp
uavcan
+ubertooth
+ubiquisys
uboot
ubuntu
ucast
@@ -1621,41 +2531,55 @@ udpcp
udpdump
udphdr
udrei
+uganda
uievent
uint16
uint32
uint8
+uinteger
ulmap
ulsch
+umask
unack
unacked
unadmitted
unadvise
+unalias
unaligned
unallocated
unallowed
+unary
unassign
unassoc
+unassociated
unauthenticated
+unauthorised
+unavailability
unbind
+unblocking
unbuffered
uncalculated
uncalibrated
uncategorized
unchannelized
+unchunked
unciphered
uncoloured
uncomment
+uncommented
uncompensated
uncompress
uncompressed
+uncompresses
uncompressing
uncompression
unconfigurable
unconfigured
unconfirm
+unconstrained
uncontended
uncorrectable
+uncorrected
undecidable
undecipherable
undecodable
@@ -1663,16 +2587,22 @@ undecoded
undecryptable
undecrypted
undelete
+undelimited
undeliverable
underflow
underrun
+undisplayed
undisposed
+undissectable
undissected
unduplicated
+unencoded
unencrypted
+unescape
unescaped
unescaping
unexported
+unfeasible
unformatted
unfragmented
unframed
@@ -1682,15 +2612,20 @@ unhidden
unicast
unicode
unicom
+unidirectionally
unignore
unimplemented
uninformative
+uninhibit
+uninitialised
uninitialized
uninstall
uninstallation
uninstalled
uninstaller
uninterruptable
+unintuitive
+unistim
universitaet
unjoin
unjoined
@@ -1703,16 +2638,21 @@ unmap
unmappable
unmark
unmarshal
+unmeasured
unmerged
unmodulated
+unmount
unmute
unmuted
unnumb
unoptimized
unordered
+unpaired
unparsable
unparseable
unparsed
+unpersistable
+unprintable
unprocessable
unpublish
unpunctuated
@@ -1720,20 +2660,31 @@ unquoted
unreach
unreassembled
unreceived
+unrecognised
unrecoverable
unrecovered
unregister
unregistration
unreportable
+unres
unresolvable
unresponded
unroutable
+unsaved
+unscaled
unsecure
unsegmented
unsequenced
+unset
+unshifted
+unsilence
+unsnoopables
+unsorted
unspec
+unspecific
unsubscribe
unsubscribed
+unsupportable
unsynchronized
untagged
unterminated
@@ -1751,12 +2702,17 @@ upload
uploaded
uploading
uploads
+uptime
urlencoded
+urllib
urnti
usability
+usbdump
usbmon
usbms
usbpcap
+usecs
+usenet
userauth
userdata
userinfo
@@ -1767,19 +2723,26 @@ utilization
utils
utran
uuencoded
+uuids
v1250
v1310
v1410
v1530
v1610
+valgrind
+validations
validator
+valstringname
+varbyte
varint
vcpkg
vcredist
vcxproj
vector3d
venusmngr
+verbose
verbosity
+verifications
verifier
verizon
version2
@@ -1790,17 +2753,26 @@ version6
version7
versioned
versioning
+vfinforecord
vhdset
viavi
virtex
virtio
virtualization
+virtualized
vlans
+vldbentry
+vmods
+vmware
vnode
vocoder
+vodacom
vodafone
voipmetrics
volerr
+volid
+voltype
+vserver
vxlan
wakeup
wapforum
@@ -1808,18 +2780,24 @@ wbxml
webcam
webkit
websocket
+wednesday
+werror
+whereis
whoami
+whois
wideband
wifidump
wikipedia
wikis
wimax
wimaxasncp
+win2k
winflexbison
winget
winpcap
winspool
wiphy
+wirelen
wireshark
wiretap
wisun
@@ -1831,6 +2809,8 @@ wowlan
wpcap
wrepl
writable
+writecap
+wsbuglink
wsbuild
wscale
wscbor
@@ -1844,14 +2824,33 @@ xattr
xauth
xchannel
xcode
+xdist
xetra
+xf86dri
+xf86vidmode
xferext
+xferflags
+xferstatus
+xfixes
+xhtml
+xinerama
+xinput
xmlns
+xoffset
+xprint
+xproto
+xselinux
xsltproc
+xtest
xtreme
+yyextra
+yyscanner
+yyterminate
+yytext
z3950
zbncp
zeroes
zigbee
+zlibng
zugtyp
zürich
diff --git a/tools/ws-coding-style.cfg b/tools/ws-coding-style.cfg
index aabd05e3..8bdd4efc 100644
--- a/tools/ws-coding-style.cfg
+++ b/tools/ws-coding-style.cfg
@@ -157,7 +157,7 @@ sp_cond_colon = force # ignore/add/remove/force
# Add or remove space around the '?' in 'b ? t : f'
sp_cond_question = force # ignore/add/remove/force
-# Add or remove space before a semicolon of an empty part of a for statment.
+# Add or remove space before a semicolon of an empty part of a for statement.
sp_before_semi_for_empty = force # ignore/add/remove/force
# Space between close brace and else
diff --git a/tools/yacc.py b/tools/yacc.py
index 1352e963..3466fbb5 100644
--- a/tools/yacc.py
+++ b/tools/yacc.py
@@ -1,31 +1,11 @@
# -----------------------------------------------------------------------------
# ply: yacc.py
#
-# Copyright (C) 2001-2015,
+# Copyright (C) 2001-2018
# David M. Beazley (Dabeaz LLC)
# All rights reserved.
#
# SPDX-License-Identifier: BSD-3-Clause
-# -----------------------------------------------------------------------------
-#
-# This implements an LR parser that is constructed from grammar rules defined
-# as Python functions. The grammer is specified by supplying the BNF inside
-# Python documentation strings. The inspiration for this technique was borrowed
-# from John Aycock's Spark parsing system. PLY might be viewed as cross between
-# Spark and the GNU bison utility.
-#
-# The current implementation is only somewhat object-oriented. The
-# LR parser itself is defined in terms of an object (which allows multiple
-# parsers to co-exist). However, most of the variables used during table
-# construction are defined in terms of global variables. Users shouldn't
-# notice unless they are trying to define multiple parsers at the same
-# time using threads (in which case they should have their head examined).
-#
-# This implementation supports both SLR and LALR(1) parsing. LALR(1)
-# support was originally implemented by Elias Ioup (ezioup@alumni.uchicago.edu),
-# using the algorithm found in Aho, Sethi, and Ullman "Compilers: Principles,
-# Techniques, and Tools" (The Dragon Book). LALR(1) has since been replaced
-# by the more efficient DeRemer and Pennello algorithm.
#
# :::::::: WARNING :::::::
#
@@ -41,11 +21,10 @@ import types
import sys
import os.path
import inspect
-import base64
import warnings
-__version__ = '3.8'
-__tabversion__ = '3.8'
+__version__ = '3.11'
+__tabversion__ = '3.10'
#-----------------------------------------------------------------------------
# === User configurable parameters ===
@@ -245,6 +224,9 @@ class YaccProduction:
def lexpos(self, n):
return getattr(self.slice[n], 'lexpos', 0)
+ def set_lexpos(self, n, lexpos):
+ self.slice[n].lexpos = lexpos
+
def lexspan(self, n):
startpos = getattr(self.slice[n], 'lexpos', 0)
endpos = getattr(self.slice[n], 'endlexpos', startpos)
@@ -286,7 +268,7 @@ class LRParser:
# certain kinds of advanced parsing situations where the lexer and parser interact with
# each other or change states (i.e., manipulation of scope, lexer states, etc.).
#
- # See: https://www.gnu.org/software/bison/manual/html_node/Default-Reductions.html#Default-Reductions
+ # See: http://www.gnu.org/software/bison/manual/html_node/Default-Reductions.html#Default-Reductions
def set_defaulted_states(self):
self.defaulted_states = {}
for state, actions in self.action.items():
@@ -474,8 +456,9 @@ class LRParser:
try:
# Call the grammar rule with our special slice object
del symstack[-plen:]
- del statestack[-plen:]
+ self.state = state
p.callable(pslice)
+ del statestack[-plen:]
#--! DEBUG
debug.info('Result : %s', format_result(pslice[0]))
#--! DEBUG
@@ -484,14 +467,16 @@ class LRParser:
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead)
- symstack.pop()
- statestack.pop()
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ symstack.extend(targ[1:-1]) # Put the production slice back on the stack
+ statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
+ sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
+
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -514,6 +499,7 @@ class LRParser:
try:
# Call the grammar rule with our special slice object
+ self.state = state
p.callable(pslice)
#--! DEBUG
debug.info('Result : %s', format_result(pslice[0]))
@@ -523,14 +509,15 @@ class LRParser:
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead)
- symstack.pop()
- statestack.pop()
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
+ sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
+
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -556,7 +543,7 @@ class LRParser:
# If there are any synchronization rules, they may
# catch it.
#
- # In addition to pushing the error token, we call
+ # In addition to pushing the error token, we call call
# the user defined p_error() function if this is the
# first syntax error. This function is only called if
# errorcount == 0.
@@ -569,6 +556,7 @@ class LRParser:
if self.errorfunc:
if errtoken and not hasattr(errtoken, 'lexer'):
errtoken.lexer = lexer
+ self.state = state
tok = call_errorfunc(self.errorfunc, errtoken, self)
if self.errorok:
# User must have done some kind of panic
@@ -788,21 +776,24 @@ class LRParser:
try:
# Call the grammar rule with our special slice object
del symstack[-plen:]
- del statestack[-plen:]
+ self.state = state
p.callable(pslice)
+ del statestack[-plen:]
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead)
- symstack.pop()
- statestack.pop()
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ symstack.extend(targ[1:-1]) # Put the production slice back on the stack
+ statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
+ sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
+
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -825,20 +816,22 @@ class LRParser:
try:
# Call the grammar rule with our special slice object
+ self.state = state
p.callable(pslice)
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead)
- symstack.pop()
- statestack.pop()
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
+ sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
+
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -856,7 +849,7 @@ class LRParser:
# If there are any synchronization rules, they may
# catch it.
#
- # In addition to pushing the error token, we call
+ # In addition to pushing the error token, we call call
# the user defined p_error() function if this is the
# first syntax error. This function is only called if
# errorcount == 0.
@@ -869,6 +862,7 @@ class LRParser:
if self.errorfunc:
if errtoken and not hasattr(errtoken, 'lexer'):
errtoken.lexer = lexer
+ self.state = state
tok = call_errorfunc(self.errorfunc, errtoken, self)
if self.errorok:
# User must have done some kind of panic
@@ -1079,21 +1073,24 @@ class LRParser:
try:
# Call the grammar rule with our special slice object
del symstack[-plen:]
- del statestack[-plen:]
+ self.state = state
p.callable(pslice)
+ del statestack[-plen:]
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead)
- symstack.pop()
- statestack.pop()
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ symstack.extend(targ[1:-1]) # Put the production slice back on the stack
+ statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
+ sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
+
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -1111,20 +1108,22 @@ class LRParser:
try:
# Call the grammar rule with our special slice object
+ self.state = state
p.callable(pslice)
symstack.append(sym)
state = goto[statestack[-1]][pname]
statestack.append(state)
except SyntaxError:
# If an error was set. Enter error recovery state
- lookaheadstack.append(lookahead)
- symstack.pop()
- statestack.pop()
+ lookaheadstack.append(lookahead) # Save the current lookahead token
+ statestack.pop() # Pop back one state (before the reduce)
state = statestack[-1]
sym.type = 'error'
+ sym.value = 'error'
lookahead = sym
errorcount = error_count
self.errorok = False
+
continue
# !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
@@ -1142,7 +1141,7 @@ class LRParser:
# If there are any synchronization rules, they may
# catch it.
#
- # In addition to pushing the error token, we call
+ # In addition to pushing the error token, we call call
# the user defined p_error() function if this is the
# first syntax error. This function is only called if
# errorcount == 0.
@@ -1155,6 +1154,7 @@ class LRParser:
if self.errorfunc:
if errtoken and not hasattr(errtoken, 'lexer'):
errtoken.lexer = lexer
+ self.state = state
tok = call_errorfunc(self.errorfunc, errtoken, self)
if self.errorok:
# User must have done some kind of panic
@@ -1319,7 +1319,7 @@ class Production(object):
p = LRItem(self, n)
# Precompute the list of productions immediately following.
try:
- p.lr_after = Prodnames[p.prod[n+1]]
+ p.lr_after = self.Prodnames[p.prod[n+1]]
except (IndexError, KeyError):
p.lr_after = []
try:
@@ -1459,7 +1459,7 @@ class Grammar(object):
self.Precedence = {} # Precedence rules for each terminal. Contains tuples of the
# form ('right',level) or ('nonassoc', level) or ('left',level)
- self.UsedPrecedence = set() # Precedence rules that were actually used by the grammer.
+ self.UsedPrecedence = set() # Precedence rules that were actually used by the grammar.
# This is only used to provide error checking and to generate
# a warning about unused precedence rules.
@@ -2260,7 +2260,6 @@ class LRGeneratedTable(LRTable):
# -----------------------------------------------------------------------------
def dr_relation(self, C, trans, nullable):
- dr_set = {}
state, N = trans
terms = []
@@ -2544,8 +2543,13 @@ class LRGeneratedTable(LRTable):
# Need to decide on shift or reduce here
# By default we favor shifting. Need to add
# some precedence rules here.
- sprec, slevel = Productions[st_actionp[a].number].prec
- rprec, rlevel = Precedence.get(a, ('right', 0))
+
+ # Shift precedence comes from the token
+ sprec, slevel = Precedence.get(a, ('right', 0))
+
+ # Reduce precedence comes from rule being reduced (p)
+ rprec, rlevel = Productions[p.number].prec
+
if (slevel < rlevel) or ((slevel == rlevel) and (rprec == 'left')):
# We really need to reduce here.
st_action[a] = -p.number
@@ -2603,8 +2607,13 @@ class LRGeneratedTable(LRTable):
# - if precedence of reduce rule is higher, we reduce.
# - if precedence of reduce is same and left assoc, we reduce.
# - otherwise we shift
- rprec, rlevel = Productions[st_actionp[a].number].prec
+
+ # Shift precedence comes from the token
sprec, slevel = Precedence.get(a, ('right', 0))
+
+ # Reduce precedence comes from the rule that could have been reduced
+ rprec, rlevel = Productions[st_actionp[a].number].prec
+
if (slevel > rlevel) or ((slevel == rlevel) and (rprec == 'right')):
# We decide to shift here... highest precedence to shift
Productions[st_actionp[a].number].reduced -= 1
@@ -2684,6 +2693,7 @@ class LRGeneratedTable(LRTable):
f.write('''
# %s
# This file is automatically generated. Do not edit.
+# pylint: disable=W,C,R
_tabversion = %r
_lr_method = %r
@@ -2917,28 +2927,20 @@ class ParserReflect(object):
# Compute a signature over the grammar
def signature(self):
+ parts = []
try:
- from hashlib import md5
- except ImportError:
- from md5 import md5
- try:
- sig = md5()
if self.start:
- sig.update(self.start.encode('latin-1'))
+ parts.append(self.start)
if self.prec:
- sig.update(''.join([''.join(p) for p in self.prec]).encode('latin-1'))
+ parts.append(''.join([''.join(p) for p in self.prec]))
if self.tokens:
- sig.update(' '.join(self.tokens).encode('latin-1'))
+ parts.append(' '.join(self.tokens))
for f in self.pfuncs:
if f[3]:
- sig.update(f[3].encode('latin-1'))
+ parts.append(f[3])
except (TypeError, ValueError):
pass
-
- digest = base64.b16encode(sig.digest())
- if sys.version_info[0] >= 3:
- digest = digest.decode('latin-1')
- return digest
+ return ''.join(parts)
# -----------------------------------------------------------------------------
# validate_modules()
@@ -2956,7 +2958,10 @@ class ParserReflect(object):
fre = re.compile(r'\s*def\s+(p_[a-zA-Z_0-9]*)\(')
for module in self.modules:
- lines, linen = inspect.getsourcelines(module)
+ try:
+ lines, linen = inspect.getsourcelines(module)
+ except IOError:
+ continue
counthash = {}
for linen, line in enumerate(lines):
@@ -3026,7 +3031,7 @@ class ParserReflect(object):
self.error = True
return
- self.tokens = tokens
+ self.tokens = sorted(tokens)
# Validate the tokens
def validate_tokens(self):
@@ -3084,7 +3089,7 @@ class ParserReflect(object):
if not name.startswith('p_') or name == 'p_error':
continue
if isinstance(item, (types.FunctionType, types.MethodType)):
- line = item.__code__.co_firstlineno
+ line = getattr(item, 'co_firstlineno', item.__code__.co_firstlineno)
module = inspect.getmodule(item)
p_functions.append((line, module, name, item.__doc__))
@@ -3186,9 +3191,13 @@ def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, star
if module:
_items = [(k, getattr(module, k)) for k in dir(module)]
pdict = dict(_items)
- # If no __file__ attribute is available, try to obtain it from the __module__ instead
+ # If no __file__ or __package__ attributes are available, try to obtain them
+ # from the __module__ instead
if '__file__' not in pdict:
pdict['__file__'] = sys.modules[pdict['__module__']].__file__
+ if '__package__' not in pdict and '__module__' in pdict:
+ if hasattr(sys.modules[pdict['__module__']], '__package__'):
+ pdict['__package__'] = sys.modules[pdict['__module__']].__package__
else:
pdict = get_caller_module_dict(2)
@@ -3262,7 +3271,7 @@ def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, star
else:
debuglog = NullLogger()
- debuglog.info('Created by PLY version %s (https://www.dabeaz.com/ply/)', __version__)
+ debuglog.info('Created by PLY version %s (http://www.dabeaz.com/ply)', __version__)
errors = False
@@ -3430,6 +3439,8 @@ def yacc(method='LALR', debug=yaccdebug, module=None, tabmodule=tab_module, star
if write_tables:
try:
lr.write_table(tabmodule, outputdir, signature)
+ if tabmodule in sys.modules:
+ del sys.modules[tabmodule]
except IOError as e:
errorlog.warning("Couldn't create %r. %s" % (tabmodule, e))