summaryrefslogtreecommitdiffstats
path: root/test/lua
diff options
context:
space:
mode:
Diffstat (limited to 'test/lua')
-rw-r--r--test/lua/acme_file.lua1453
-rw-r--r--test/lua/add_packet_field.lua899
-rw-r--r--test/lua/byte_array.lua215
-rw-r--r--test/lua/dir.lua195
-rw-r--r--test/lua/dissectFPM.lua452
-rw-r--r--test/lua/dissector.lua659
-rw-r--r--test/lua/field.lua165
-rw-r--r--test/lua/field_setup.lua108
-rw-r--r--test/lua/globals_2.2.txt1221
-rw-r--r--test/lua/inspect.lua715
-rw-r--r--test/lua/int64.lua360
-rw-r--r--test/lua/listener.lua246
-rw-r--r--test/lua/nstime.lua140
-rw-r--r--test/lua/pcap_file.lua752
-rw-r--r--test/lua/pinfo.lua220
-rw-r--r--test/lua/proto.lua211
-rw-r--r--test/lua/protobuf_test_called_by_custom_dissector.lua68
-rw-r--r--test/lua/protobuf_test_field_subdissector_table.lua6
-rw-r--r--test/lua/protofield.lua236
-rw-r--r--test/lua/script_args.lua24
-rw-r--r--test/lua/struct.lua367
-rw-r--r--test/lua/testlib.lua174
-rw-r--r--test/lua/try_heuristics.lua61
-rw-r--r--test/lua/tvb.lua922
-rw-r--r--test/lua/unicode.lua55
-rw-r--r--test/lua/util.lua118
-rw-r--r--test/lua/verify_dissector.lua380
-rw-r--r--test/lua/verify_globals.lua135
28 files changed, 10557 insertions, 0 deletions
diff --git a/test/lua/acme_file.lua b/test/lua/acme_file.lua
new file mode 100644
index 0000000..f159ba2
--- /dev/null
+++ b/test/lua/acme_file.lua
@@ -0,0 +1,1453 @@
+------------------------------------------
+-- acme_file_reader.lua
+-- Author: Hadriel Kaplan (hadrielk at yahoo dot com)
+-- version = 1.0
+-- date = 3/3/2014
+------------------------------------------
+--[[
+ This is a Wireshark Lua-based capture file reader.
+ This "capture file" reader reads message logs from Acme Packet (now Oracle) Session Border Controllers,
+ such as sipmsg.log files. There are several variants of the log file format, as well as some changes that
+ can happen based on how the log file is generated and retrieved; for example if it's generated through a
+ 'tail' command, or FTP'ed by a FTP client which adds carriage-returns. This Lua file reader tries to handle
+ such conditions.
+
+ Note: this script wasn't written to be super-efficient, nor clever. When you've been writing Lua for a while
+ you get used to writing in a different, more elegant fashion than this script is; but other people find it
+ hard to read such Lua code, so I've tried to keep this simpler.
+
+ Features:
+ -handles sipmsg type logs, sipdns type logs, algd type logs
+ -handles both IPv4 and IPv6, for both UDP and TCP
+ -reads sipmsg logs from 3800, 4250, 4500, 9200, 6300 SBCs
+ -handles logs with extra carriage-returns and linefeeds, such as from certain FTP'ed cases
+ -handles logs generated/copied from a 'tail' command on the SBC ACLI
+ -handles MBCD messages in logs, and puts their decoded ascii description in comments in Wireshark
+
+ Issues:
+ -for very large logs (many megabytes), it takes a long time (many minutes)
+ -creates fake IP and UDP/TCP headers, which might be misleading
+ -has to guess sometimes, though it hasn't guessed wrong yet as far as I know
+
+ To-do:
+ - make it use Struct.tohex/fromhex now that we have the Struct library in Wireshark
+ - make it use a linux cooked-mode pseudo-header (see https://gitlab.com/wireshark/wireshark/-/wikis/SLL)
+ - make it use preferences, once I write C-code for Wireshark to do that :)
+ - rewrite some of the pattern searches to use real regex/PCRE instead?
+
+Example SIP over UDP message:
+Aug 26 19:25:10.685 On [5:0]2.1.1.1:5060 received from 2.1.2.115:5060
+REGISTER sip:2.1.1.1:5060 SIP/2.0
+Via: SIP/2.0/UDP 2.1.2.115:5060;branch=z9hG4bK6501441021660x81000412
+From: <sip:public_115@2.1.1.1:5060>;tag=520052-7015560x81000412
+To: <sip:public_115@2.1.1.1:5060>
+Call-ID: 680192-4234150x81000412@2.1.2.115
+CSeq: 247 REGISTER
+Contact: <sip:public_115@2.1.2.115:5060;transport=udp>
+Expires: 300
+Max-Forwards: 70
+Authorization: Digest username="public_115",realm="empirix.com",uri="sip:2.1.1.1",response="5d61837cc54dc27018a40f2532e622de",nonce="430f6ff09ecd8c3fdfc5430b6e7e437a4cf77057",algorithm=md5
+Content-Length: 0
+
+
+----------------------------------------
+Another one:
+2007-03-06 13:38:48.037 OPENED
+2007-03-06 13:38:48.037 OPENED
+2007-03-06 13:38:48.037 OPENED
+Mar 6 13:38:54.959 On [1:0]135.25.29.135:5060 received from 192.168.109.138:65471
+OPTIONS sip:135.25.29.135 SIP/2.0
+Accept: application/sdp
+User-Agent: ABS GW v5.1.0
+To: sip:135.25.29.135
+From: sip:192.168.109.138;tag=a2a090ade36bb108da70b0c8f7ba02e9
+Contact: sip:192.168.109.138
+Call-ID: 8c0296da4a0d9f4d97e1389cd28d2352@172.16.6.114
+CSeq: 347517161 OPTIONS
+Via: SIP/2.0/UDP 192.168.109.138;branch=z9hG4bK21feac80fe9a63c1cf2988baa2af0849
+Max-Forwards: 70
+Content-Length: 0
+
+
+----------------------------------------
+Another SIP over UDP (from 9200):
+File opened.
+Jun 8 14:34:22.599 UDP[3:0]10.102.131.194:5060 OPENED
+Jun 8 14:34:22.616 UDP[6:0]10.102.130.185:5060 OPENED
+Jun 8 14:34:49.416 On [6:0]10.102.130.185:5060 received from 10.102.130.150:5060
+REGISTER sip:csp.noklab.net SIP/2.0
+Via: SIP/2.0/UDP 192.168.1.100:5060;branch=z9hG4bK26b7a48d
+From: sip:34903@csp.noklab.net
+To: sip:34903@csp.noklab.net
+Call-ID: 003094c3-a0160002-23aa7e86-29e5808d@192.168.1.100
+CSeq: 144 REGISTER
+User-Agent: CSCO/7
+Contact: <sip:34903@192.168.1.100:5060>
+Content-Length: 0
+Expires: 3600
+
+
+----------------------------------------
+
+Example SIP over TCP message (note it ends in the middle of a header name):
+Jan 12 00:03:54.700 On 172.25.96.200:8194 received from 172.25.32.28:5060
+SIP/2.0 200 OK
+From: Unavailable <sip:Unavailable@172.25.96.200:5060;user=phone>;tag=1200822480
+To: 24001900011 <sip:0011@172.25.32.28:5060;user=phone>;tag=03c86c0b27df1b1254aeccbc000
+Call-ID: 7f6b0887-1d313896-1511da31-b045@144.229.136.111
+CSe
+----------------------------------------
+
+Example SIP Pre and Post-NAT messages:
+Post-NAT from private<realm=e911core> encoded:
+SIP/2.0 302 Moved Temporarily
+Call-ID: SD27o9f04-fcc63aa885c83e22a1be64cfc210b55e-vjvtv00
+CSeq: 2 INVITE
+From: <sip:7866932005@127.1.0.100:5060;user=phone;e911core=TSD5051AEPCORE-dnamt76v6nm04;CKE=BSLD-5cuduig6t52l2;e911vpn=TSD5051AEPVPN-7gdq13vt8fi59>;tag=SD27o9f04-10000000-0-1424021314
+To: <sip:911@127.0.0.100;user=phone;CKE=BSLD-8blt7m3dhnj17>;tag=10280004-0-1239441202
+Via: SIP/2.0/UDP 127.254.254.1:5060;branch=z9hG4bK5i4ue300dgrdras7q281.1
+Server: DC-SIP/1.2
+Content-Length: 0
+Contact: <sip:1111119999@127.0.0.100:5060;e911core=TSD5051AEPCORE-5n86t36uuma01>
+
+
+----------------------------------------
+Pre-NAT to private<realm=e911core> decode:
+ACK sip:911@127.0.0.100;user=phone;CKE=BSLD-8blt7m3dhnj17 SIP/2.0
+Via: SIP/2.0/UDP 127.254.254.1:5060;branch=z9hG4bK5i4ue300dgrdras7q281.1
+Call-ID: SD27o9f04-fcc63aa885c83e22a1be64cfc210b55e-vjvtv00
+CSeq: 2 ACK
+From: <sip:7866932005@127.1.0.100:5060;user=phone;e911core=TSD5051AEPCORE-dnamt76v6nm04;CKE=BSLD-5cuduig6t52l2;e911vpn=TSD5051AEPVPN-7gdq13vt8fi59>;tag=SD27o9f04-10000000-0-1424021314
+To: <sip:911@127.0.0.100;user=phone;CKE=BSLD-8blt7m3dhnj17>;tag=10280004-0-1239441202
+Max-Forwards: 70
+
+
+----------------------------------------
+
+Example DNS message:
+Nov 1 23:03:12.811 On 10.21.232.194:1122 received from 10.21.199.204:53
+DNS Response 3916 flags=8503 q=1 ans=0 auth=1 add=0 net-ttl=0
+ Q:NAPTR 7.6.5.4.3.2.1.0.1.2.e164
+ NS:SOA e164 ttl=0 netnumber01
+ rname=user.netnumber01
+ ser=223 ref=0 retry=0 exp=0 minttl=0
+
+ 0000: 0f 4c 85 03 00 01 00 00 00 01 00 00 01 37 01 36 .L...........7.6
+ 0010: 01 35 01 34 01 33 01 32 01 31 01 30 01 31 01 32 .5.4.3.2.1.0.1.2
+ 0020: 04 65 31 36 34 00 00 23 00 01 04 65 31 36 34 00 .e164..#...e164.
+ 0030: 00 06 00 01 00 00 00 00 00 33 0b 6e 65 74 6e 75 .........3.netnu
+ 0040: 6d 62 65 72 30 31 00 04 75 73 65 72 0b 6e 65 74 mber01..user.net
+ 0050: 6e 75 6d 62 65 72 30 31 00 00 00 00 df 00 00 00 number01........
+ 0060: 00 00 00 00 00 00 00 00 00 00 00 00 00 .............
+
+----------------------------------------
+Example MGCP message (note the IP/UDP headers are in the hex):
+Mar 1 14:37:26.683 On [0:803]172.16.84.141:2427 sent to 172.16.74.100:2427
+Packet:
+ 0000: 00 04 00 00 00 01 00 02 00 00 03 23 0a ad 00 c9 ...........#....
+ 0010: 45 00 00 a8 23 36 00 00 3c 11 63 fd ac 10 54 8d E...#6..<.c...T.
+ 0020: ac 10 4a 64 09 7b 09 7b 00 94 16 c2 32 35 30 20 ..Jd.{.{....250
+
+250 55363 Connection Deleted
+P: PS=6551, OS=1048160, PR=6517, OR=1042720, PL=0, JI=1, LA=5, PC/RPS=6466, PC/ROS=1034560, PC/RPL=0, PC/RJI=0
+
+----------------------------------------
+Example MBCD message:
+Mar 1 14:37:26.672 On 127.0.0.1:2946 sent to 127.0.0.1:2944
+ 0000: ac 3e fd a8 01 01 77 36 9e 00 37 10 0c 34 4c bc .>....w6..7..4L.
+ 0010: 00 30 23 0c 34 4c bc 00 11 33 00 0e 35 00 04 00 .0#.4L...3..5...
+ 0020: 00 00 00 30 00 04 00 00 00 00 23 0c 34 4c bd 00 ...0......#.4L..
+ 0030: 11 33 00 0e 35 00 04 00 00 00 00 30 00 04 00 00 .3..5......0....
+ 0040: 00 00 ..
+Transaction = 24589982 {
+ Context = 204754108 {
+ Subtract = 204754108 {
+ Audit {
+ Stats,
+ Flow
+ }
+ },
+ Subtract = 204754109 {
+ Audit {
+ Stats,
+ Flow
+ }
+ }
+ }
+}
+----------------------------------------
+
+]]----------------------------------------
+
+-- debug printer, set DEBUG to true to enable printing debug info
+-- set DEBUG2 to true to enable really verbose printing
+local DEBUG, DEBUG2 = true, false
+
+local dprint = function() end
+local dprint2 = function() end
+if DEBUG or DEBUG2 then
+ dprint = function(...)
+ print(table.concat({"Lua:", ...}," "))
+ end
+
+ if DEBUG2 then
+ dprint2 = dprint
+ end
+end
+
+-- this should be done as a preference setting
+local ALWAYS_UDP = true
+
+
+local fh = FileHandler.new("Oracle Acme Packet logs", "acme",
+ "A file reader for Oracle Acme Packet message logs such as sipmsg.log","rs")
+
+
+-- There are certain things we have to create fake state/data for, because they
+-- don't exist in the log file for example to create IP headers we have to create
+-- fake identification field values, and to create timestamps we have to guess the
+-- year (and in some cases month/day as well), and for TCP we have to create fake
+-- connection info, such as sequence numbers. We can't simply have a global static
+-- variable holding such things, because Wireshark reads the file sequentially at
+-- first, but then calls seek_read for random packets again and we don't want to
+-- re-create the fake info again because it will be wrong. So we need to create it
+-- for each packet and remember what we created for each packet, so that seek_read
+-- gets the same values. We could store the variables in a big table, keyed by the
+-- specific header info line for each one; but instead we'll key it off of the file
+-- position number, since read() sets it for Wireshark and seek_read() gets it from
+-- Wireshark. So we'll have a set of global statics used during read(), but the
+-- actual per-packet values will be stored in a table indexed/keyed by the file
+-- position number. A separate table holds TCP peer connection info as described
+-- later.
+
+-- I said above that this state is "global", but really it can't be global to this
+-- whole script file, because more than one file can be opened for reading at the
+-- same time. For example if the user presses the reload button, the capture file
+-- will be opened for reading before the previous (same) one is closed. So we have
+-- to store state per-file. The good news is Wireshark gives us a convenient way to
+-- do that, using the CaptureInfo.private_table attribute/member. We can save a Lua
+-- table with whatever contents we want, to this private_table member, and get it
+-- later during the other read/seek_read/cose function calls.
+
+-- So to store this per-file state, we're going to use Lua class objects. They're
+-- just Lua tables that have functions and meta-functions and can be treated like
+-- objects in terms of syntax/behavior.
+
+local State = {}
+local State_mt = { __index = State }
+
+function State.new()
+ local new_class = { -- the new instance
+ -- stuff we need to keep track of to cerate fake info
+ ip_ident = 0,
+ tyear = 0,
+ tmonth = 0,
+ tmin = 0,
+ tsec = 0,
+ tmilli = 0,
+ nstime = NSTime(),
+ -- the following table holds per-packet info
+ -- the key index will be a number - the file position - but it won't be an array type table (too sparse).
+ -- Each packet's entry is a table holding the "static" variables for that packet; this sub-table will be
+ -- an array style instead of hashmap, to reduce size/performance
+ -- This table needs to be cleared whenever the file is closed/opened.
+ packets = {},
+
+ -- the following local table holds TCP peer "connection" info, which is basically
+ -- TCP control block (TCB) type information; this is needed to create and keep track
+ -- of fake TCP sockets/headers for messages that went over TCP, for example for fake
+ -- sequence number info.
+ -- The key index for this is the local+remote ip:port strings concatenated.
+ -- The value is a sub-table, array style, holding the most recent sequence numbers.
+ -- This whole table needs to be cleared whenever the file is closed/opened.
+ tcb = {},
+
+ }
+ setmetatable( new_class, State_mt ) -- all instances share the same metatable
+ return new_class
+end
+
+-- the indices for the State.packets{} variable sub-tables
+local IP_IDENT = 1
+local TTIME = 2
+local LOCAL_SEQ = 3
+local REMOTE_SEQ = 4
+
+-- the indices for the State.tcb{} sub-tables
+local TLOCAL_SEQ = 1
+local TREMOTE_SEQ = 2
+
+-- helper functions
+local char = string.char
+local floor = math.floor
+
+-- takes a Lua number and converts it into a 2-byte string binary (network order)
+
+local function dec2bin16(num)
+ return Struct.pack(">I2",num)
+end
+
+-- takes a Lua number and converts it into a 4-byte string binary (network order)
+local function dec2bin32(num)
+ return Struct.pack(">I4",num)
+end
+
+
+-- function to skip log info before/between/after messages
+local delim = "^%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-%-$"
+-- words that must be found to be skipped. "File ..." is found in 9200 logs)
+local skiplist = { " OPENED", " CLOSED", " STARTED", " STOPPED", "^File ", delim }
+-- pre/post NAT entries
+local pre_nat_header_pattern = "^Pre%-NAT to private<realm=([^>]+)> decode:\r?$"
+local post_nat_header_pattern = "^Post%-NAT from private<realm=([^>]+)> encoded:\r?$"
+
+local function skip_ahead(file, line, position)
+ repeat
+ local found = #line == 0 -- will be false unless the line is empty
+ for i, word in ipairs(skiplist) do
+ if line:find(word) then
+ found = true
+ break
+ end
+ end
+ if found then
+ position = file:seek()
+ line = file:read()
+ if not line then return nil end
+ elseif line:find(pre_nat_header_pattern) or line:find(post_nat_header_pattern) then
+ -- skip the whole message
+ found = true
+ repeat
+ line = file:read()
+ until line:find(delim)
+ end
+ until not found
+ return line, position
+end
+
+-- following pattern grabs month, day, hour, min, sec, millisecs
+local header_time_pattern = "^(%u%l%l) ?(%d%d?) (%d%d?):(%d%d):(%d%d)%.(%d%d%d) On "
+-- tail'ed file has no month/day
+local header_tail_time_pattern = "^(%d%d):(%d%d)%.(%d%d%d) On "
+
+-- grabs local and remote IPv4:ports (not phy/vlan), and words in between (i.e., "sent to" or "received from")
+local header_address_pattern = "(%d%d?%d?%.%d%d?%d?%.%d%d?%d?%.%d%d?%d?):(%d+) (%l+ %l+) (%d%d?%d?%.%d%d?%d?%.%d%d?%d?%.%d%d?%d?):(%d+) ?\r?$"
+-- grabs local and remote IPv6:ports (not phy/vlan), and words in between (i.e., "sent to" or "received from")
+local header_v6address_pattern = "%[([:%x]+)%]:(%d+) (%l+ %l+) %[([:%x]+)%]:(%d+) ?\r?$"
+
+-- grabs phy/vlan info
+local header_phy_pattern = "%[(%d+):(%d+)%]"
+
+local SENT = 1
+local RECV = 2
+local function get_direction(phrase)
+ if #phrase == 7 and phrase:find("sent to") then
+ return SENT
+ elseif #phrase == 13 and phrase:find("received from") then
+ return RECV
+ end
+ dprint("direction phrase not found")
+ return nil
+end
+
+-- monthlist table for getting month number value from 3-char name (number is table index)
+local monthlist = {"Jan", "Feb", "Mar", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"}
+
+-- Compute the difference in seconds between local time and UTC
+-- from http://lua-users.org/wiki/TimeZone
+local function get_timezone()
+ local now = os.time()
+ return os.difftime(now, os.time(os.date("!*t", now)))
+end
+local timezone = get_timezone()
+
+function State:get_timestamp(line, file_position, seeking)
+ local i, line_pos, month, day, hour, min, sec, milli = line:find(header_time_pattern)
+ if not month then
+ return
+ end
+
+ if seeking then
+ -- we've seen this packet before, just go get the saved timestamp
+ sec = self.packets[file_position][TTIME]
+ if not sec then
+ dprint("failed to get saved timestamp for packet at position:", file_position)
+ return
+ end
+ return sec, line_pos
+ end
+
+ -- find the month's number
+ for index, name in ipairs(monthlist) do
+ if month == name then
+ month = index
+ break
+ end
+ end
+ if type(month) ~= "number" then return end
+
+ day = tonumber(day)
+ hour = tonumber(hour)
+ min = tonumber(min)
+ sec = tonumber(sec)
+ milli = tonumber(milli)
+
+ if not day or not hour or not min or not sec or not milli then
+ dprint("timestamp could not be determined")
+ return nil
+ end
+
+ -- we don't know what year the log file was created, so we have to guess
+ -- if we guess the current system year, then a log of December loaded in January will appear wrong,
+ -- as will a log file which lasts over new year
+ -- so we're going to check the current system month, and if it's less than the log file's then we'll
+ -- assume the log file started last year; if the system month is larger or equal, then we'll assume the log
+ -- file is of this year. We only do this checking once per file.
+ if self.tyear == 0 then
+ local curr_year, curr_month = tonumber(os.date("%Y")), tonumber(os.date("%m"))
+ if curr_month < month then
+ -- use last year
+ if curr_year > 0 then
+ curr_year = curr_year - 1
+ end
+ end
+ self.tyear = curr_year
+
+ -- XXX - but for purposes of testing, we just force the year to
+ -- 2014, so that we can compare the result of this code reading
+ -- an Acme log with the result of the pcapng reader reading a
+ -- pcapng file with the same packets - the time stamps in
+ -- pcapng files are times since the Epoch, so the year is known
+ self.tyear = 2014
+ end
+
+ -- if this message's month is less than previous message's, then year wrapped
+ if month < self.tmonth then
+ self.tyear = self.tyear + 1
+ end
+ self.tmonth = month
+
+ local timet = os.time({ ["year"] = self.tyear, ["month"] = month, ["day"] = day, ["hour"] = hour, ["min"] = min, ["sec"] = sec })
+ if not timet then
+ dprint("timestamp conversion failed")
+ end
+
+ timet = timet + timezone
+
+ -- make an NSTime
+ self.nstime = NSTime(timet, milli * 1000000)
+ self.packets[file_position][TTIME] = self.nstime
+
+ timet = timet + (milli/1000)
+ dprint2("found time of ", os.date("%c",timet), " with value=",timet)
+
+ return self.nstime, line_pos
+end
+
+-- get_tail_time() gets a fictitious timestamp starting from 19:00:00 on Dec 31, 1969, and incrementing based
+-- on the minutes/secs/millisecs seen (i.e., if the minute wrapped then hour increases by 1, etc.).
+-- this is needed for tail'ed log files, since they don't show month/day/hour
+function State:get_tail_time(line, file_position, seeking)
+ local i, line_pos, min, sec, milli = line:find(header_tail_time_pattern)
+ if not min then return end
+
+ if seeking then
+ -- we've seen this packet before, just go get the saved timestamp
+ sec = self.packets[file_position][TTIME]
+ if not sec then
+ dprint("failed to get saved timestamp for packet at position:", file_position)
+ return
+ end
+ return sec, line_pos
+ end
+
+ min = tonumber(min)
+ sec = tonumber(sec)
+ milli = tonumber(milli)
+
+ if not min or not sec or not milli then
+ dprint("timestamp could not be determined")
+ return nil
+ end
+
+ -- get difference in time
+ local tmin, tsec, tmilli, nstime = self.tmin, self.tsec, self.tmilli, self.nstime
+ local ttime = nstime.secs
+
+ -- min, sec, milli are what the log says this tail'ed packet is
+ -- tmin, tsec, tmilli are what we got from last packet
+ -- nstime is the unix time of that, and ttime is the seconds of that unix time
+
+ -- if minutes wrapped, or they're equal but seconds wrapped, then handle it as if in the next hour
+ if (min < tmin) or (min == tmin and sec < tsec) or (min == tmin and sec == tsec and milli < tmilli) then
+ -- something wrapped, calculate difference as if in next hour
+ ttime = ttime + (((min * 60) + sec + 3600) - ((tmin * 60) + tsec))
+ else
+ ttime = ttime + (((min * 60) + sec) - ((tmin * 60) + tsec))
+ end
+ self.tmin, self.tsec, self.tmilli = min, sec, milli
+ self.nstime = NSTime(ttime, milli * 1000000)
+ self.packets[file_position][TTIME] = self.nstime
+
+ return self.nstime, line_pos
+end
+
+local hexbin = {
+ ["0"]=0, ["1"]=1, ["2"]=2, ["3"]=3, ["4"]=4, ["5"]=5, ["6"]=6, ["7"]=7, ["8"]=8, ["9"]=9, ["a"]=10, ["b"]=11, ["c"]=12, ["d"]=13, ["e"]=14, ["f"]=15,
+ ["00"]=0, ["01"]=1, ["02"]=2, ["03"]=3, ["04"]=4, ["05"]=5, ["06"]=6, ["07"]=7, ["08"]=8, ["09"]=9, ["0a"]=10, ["0b"]=11, ["0c"]=12, ["0d"]=13, ["0e"]=14, ["0f"]=15,
+ ["10"]=16, ["11"]=17, ["12"]=18, ["13"]=19, ["14"]=20, ["15"]=21, ["16"]=22, ["17"]=23, ["18"]=24, ["19"]=25, ["1a"]=26, ["1b"]=27, ["1c"]=28, ["1d"]=29, ["1e"]=30, ["1f"]=31,
+ ["20"]=32, ["21"]=33, ["22"]=34, ["23"]=35, ["24"]=36, ["25"]=37, ["26"]=38, ["27"]=39, ["28"]=40, ["29"]=41, ["2a"]=42, ["2b"]=43, ["2c"]=44, ["2d"]=45, ["2e"]=46, ["2f"]=47,
+ ["30"]=48, ["31"]=49, ["32"]=50, ["33"]=51, ["34"]=52, ["35"]=53, ["36"]=54, ["37"]=55, ["38"]=56, ["39"]=57, ["3a"]=58, ["3b"]=59, ["3c"]=60, ["3d"]=61, ["3e"]=62, ["3f"]=63,
+ ["40"]=64, ["41"]=65, ["42"]=66, ["43"]=67, ["44"]=68, ["45"]=69, ["46"]=70, ["47"]=71, ["48"]=72, ["49"]=73, ["4a"]=74, ["4b"]=75, ["4c"]=76, ["4d"]=77, ["4e"]=78, ["4f"]=79,
+ ["50"]=80, ["51"]=81, ["52"]=82, ["53"]=83, ["54"]=84, ["55"]=85, ["56"]=86, ["57"]=87, ["58"]=88, ["59"]=89, ["5a"]=90, ["5b"]=91, ["5c"]=92, ["5d"]=93, ["5e"]=94, ["5f"]=95,
+ ["60"]=96, ["61"]=97, ["62"]=98, ["63"]=99, ["64"]=100, ["65"]=101, ["66"]=102, ["67"]=103, ["68"]=104, ["69"]=105, ["6a"]=106, ["6b"]=107, ["6c"]=108, ["6d"]=109, ["6e"]=110, ["6f"]=111,
+ ["70"]=112, ["71"]=113, ["72"]=114, ["73"]=115, ["74"]=116, ["75"]=117, ["76"]=118, ["77"]=119, ["78"]=120, ["79"]=121, ["7a"]=122, ["7b"]=123, ["7c"]=124, ["7d"]=125, ["7e"]=126, ["7f"]=127,
+ ["80"]=128, ["81"]=129, ["82"]=130, ["83"]=131, ["84"]=132, ["85"]=133, ["86"]=134, ["87"]=135, ["88"]=136, ["89"]=137, ["8a"]=138, ["8b"]=139, ["8c"]=140, ["8d"]=141, ["8e"]=142, ["8f"]=143,
+ ["90"]=144, ["91"]=145, ["92"]=146, ["93"]=147, ["94"]=148, ["95"]=149, ["96"]=150, ["97"]=151, ["98"]=152, ["99"]=153, ["9a"]=154, ["9b"]=155, ["9c"]=156, ["9d"]=157, ["9e"]=158, ["9f"]=159,
+ ["a0"]=160, ["a1"]=161, ["a2"]=162, ["a3"]=163, ["a4"]=164, ["a5"]=165, ["a6"]=166, ["a7"]=167, ["a8"]=168, ["a9"]=169, ["aa"]=170, ["ab"]=171, ["ac"]=172, ["ad"]=173, ["ae"]=174, ["af"]=175,
+ ["b0"]=176, ["b1"]=177, ["b2"]=178, ["b3"]=179, ["b4"]=180, ["b5"]=181, ["b6"]=182, ["b7"]=183, ["b8"]=184, ["b9"]=185, ["ba"]=186, ["bb"]=187, ["bc"]=188, ["bd"]=189, ["be"]=190, ["bf"]=191,
+ ["c0"]=192, ["c1"]=193, ["c2"]=194, ["c3"]=195, ["c4"]=196, ["c5"]=197, ["c6"]=198, ["c7"]=199, ["c8"]=200, ["c9"]=201, ["ca"]=202, ["cb"]=203, ["cc"]=204, ["cd"]=205, ["ce"]=206, ["cf"]=207,
+ ["d0"]=208, ["d1"]=209, ["d2"]=210, ["d3"]=211, ["d4"]=212, ["d5"]=213, ["d6"]=214, ["d7"]=215, ["d8"]=216, ["d9"]=217, ["da"]=218, ["db"]=219, ["dc"]=220, ["dd"]=221, ["de"]=222, ["df"]=223,
+ ["e0"]=224, ["e1"]=225, ["e2"]=226, ["e3"]=227, ["e4"]=228, ["e5"]=229, ["e6"]=230, ["e7"]=231, ["e8"]=232, ["e9"]=233, ["ea"]=234, ["eb"]=235, ["ec"]=236, ["ed"]=237, ["ee"]=238, ["ef"]=239,
+ ["f0"]=240, ["f1"]=241, ["f2"]=242, ["f3"]=243, ["f4"]=244, ["f5"]=245, ["f6"]=246, ["f7"]=247, ["f8"]=248, ["f9"]=249, ["fa"]=250, ["fb"]=251, ["fc"]=252, ["fd"]=253, ["fe"]=254, ["ff"]=255
+}
+
+local function iptobytes(ipaddr)
+ local bytes = { ipaddr:match("(%d+)%.(%d+)%.(%d+)%.(%d+)") }
+ if not #bytes == 4 then
+ dprint("failed to get ip address bytes for '", ipaddr, "'")
+ return
+ end
+ local ip = ""
+ for i, byte in ipairs(bytes) do
+ ip = ip .. char(tonumber(byte))
+ end
+ return ip
+end
+
+local function hexword2bin(word)
+ if #word == 4 then
+ return char(hexbin[word:sub(1,2)], hexbin[word:sub(3,4)])
+ elseif #word == 3 then
+ return char(hexbin[word:sub(1,1)], hexbin[word:sub(2,3)])
+ elseif #word < 3 then
+ return char(0, hexbin[word])
+ end
+ return nil -- error
+end
+
+-- convert this 2620:0:60:8ac::102 to its 16-byte binary (=8 of 2-byte words)
+local NUMWORDS = 8
+local function ipv6tobytes(ipaddr)
+ -- start with all 16 bytes being zeroes
+ local words = { "\00\00", "\00\00", "\00\00", "\00\00", "\00\00", "\00\00", "\00\00", "\00\00" }
+ -- now walk from front of ipv6 address string replacing byte numbers above;
+ -- if we hit a "::", then jump to end and do it in reverse
+ local colon_s, colon_e = ipaddr:find("::%x")
+ if colon_s then
+ -- there's a double-colon, so split the string and do the end first, backwards
+ -- get each chunk first
+ local t = {}
+ local index, wordix = 1, NUMWORDS
+ for w in string.gmatch(ipaddr:sub(colon_e - 1), ":(%x+)") do
+ t[index] = hexword2bin(w)
+ index = index + 1
+ end
+ for ix=index-1, 1, -1 do
+ words[wordix] = t[ix]
+ wordix = wordix - 1
+ end
+ ipaddr = ipaddr:sub(1, colon_s)
+ end
+
+ local i = 1
+ for w in string.gmatch(ipaddr, "(%x+):?") do
+ words[i] = hexword2bin(w)
+ i = i + 1
+ end
+
+ if not #words == NUMWORDS then
+ dprint("failed to get IPv6 address bytes for '", ipaddr, "'")
+ return
+ end
+
+ return table.concat(words)
+end
+
+-- calculates checksum as done for IP, TCP, UDP
+local function checksum(chunk)
+ local sum = 0
+ -- take every 2-byte value and add them up
+ for one, two in chunk:gmatch("(.)(.)") do
+ sum = sum + (string.byte(one) * 256) + (string.byte(two))
+ while floor(sum / 65536) > 0 do
+ -- add carry/overflow value
+ sum = (sum % 65536) + (floor(sum / 65536))
+ end
+ end
+
+ -- now get one's complement of that
+ sum = 65535 - sum
+
+ -- and return it as a 2-byte string
+ return dec2bin16(sum)
+end
+
+----------------------------------------
+-- protocol type number
+local PROTO_UDP = "\17"
+local PROTO_TCP = "\06"
+-- enum
+local IPv4 = 1
+local IPv6 = 2
+-- both type enums and header lengths
+local UDP = 8
+local TCP = 20
+
+----------------------------------------
+-- Packet creation/serialization occurs using a Lua class object model
+-- There's a single base class 'Packet' which has data/methods every packet type has
+-- 'RawPacket' and 'DataPacket' both derive from 'Packet'.
+-- 'RawPacket' is for packets which the log file has the raw IP/UDP headers for,
+-- such as ALG log messages (MGCP/NCS). Since the IP headers are in them, we use those.
+-- 'DataPacket' is for packets which the log file only has payload data for, and
+-- we need to create fake IP/UDP or IP/TCP headers for.
+-- 'BinPacket' and'AsciiPacket' both derive from 'DataPacket'.
+-- 'BinPacket' is for binary-style logged packets, such as MBCD or DNS, while
+-- 'AsciiPacket' is for ascii-style ones such as SIP.
+-- 'DnsPacket' derives from 'BinPacket', for DNS-style logs.
+
+-- Each class has a read_data() method, which reads in the packet data, builds the packet,
+-- and sets the Wireshark buffer. Some classes have a get_data() method which read_data()
+-- calls, to get the payload data before building a fake packet.
+
+-- The base Packet class has a get_hex_data() and get_ascii_data() methods, to get the payload
+-- in either form, and those base methods are called by get_data() or read_data() of derived
+-- classes.
+
+-- For performance reasons, packet data is read line-by-line into a table (called bufftbl),
+-- which is concatenated at the end. This avoids Lua building interim strings and garbage
+-- collecting them. But it makes the code uglier. The get_data()/get_hex_data()/get_ascii_data()
+-- methods read into this table they get passed, while the read_data() functions handle managing
+-- the table.
+
+----------------------------------------
+----------------------------------------
+-- The base Packet class, from which others derive
+-- all Packets have a ptype, timestamp, source and dest address:port, and data
+--
+local Packet = {}
+local Packet_mt = { __index = Packet }
+
+function Packet.new(state, timestamp, direction, source_ip, source_port, dest_ip, dest_port, ptype, ttype, file_position)
+ local new_class = { -- the new instance
+ ["state"] = state,
+ ["timestamp"] = timestamp,
+ ["direction"] = direction,
+ ["source_ip"] = source_ip,
+ ["source_port"] = source_port,
+ ["dest_ip"] = dest_ip,
+ ["dest_port"] = dest_port,
+ ["ptype"] = ptype,
+ ["ttype"] = ttype,
+ ["file_position"] = file_position
+ }
+ setmetatable( new_class, Packet_mt ) -- all instances share the same metatable
+ return new_class
+end
+
+function Packet:set_comment(comment)
+ self["comment"] = comment
+end
+
+function Packet:set_wslua_fields(frame)
+ frame.time = self.timestamp
+ frame.rec_type = wtap_rec_types.PACKET
+ frame.flags = wtap_presence_flags.TS -- for timestamp
+ if self.comment then
+ frame.comment = self.comment
+ end
+ return true
+end
+
+local packet_hexline_pattern = "^ %x%x%x0: %x%x"
+function Packet:get_hex_data(file, line, bufftbl, index)
+ local start = index
+
+ dprint2("Packet:get_hex_data() called")
+ repeat
+ for word in line:gmatch("(%x%x) ") do
+ bufftbl[index] = char(hexbin[word])
+ index = index + 1
+ if ((index - start) % 16) == 0 then break end
+ end
+ line = file:read()
+ until not line or not line:find(packet_hexline_pattern)
+
+ return index - start, line
+end
+
+function Packet:get_ascii_data(file, line, bufftbl, index, only_newline)
+ local bufflen = 0 -- keep tally of total length of payload
+ local found_delim = true
+
+ dprint2("Packet:get_ascii_data() called")
+ repeat
+ bufftbl[index] = line
+ bufflen = bufflen + #line
+
+ -- sanity check if line has "\r" at end, and if so only add \n
+ if line:find("\r",-1,true) then
+ bufftbl[index+1] = "\n"
+ bufflen = bufflen + 1
+ dprint2("Found carriage-return at end of line")
+ elseif only_newline then
+ -- only add a newline
+ bufftbl[index+1] = "\n"
+ bufflen = bufflen + 1
+ else
+ bufftbl[index+1] = "\r\n"
+ bufflen = bufflen + 2
+ end
+ index = index + 2
+
+ -- read next line now
+ line = file:read()
+ if not line then
+ -- hit eof?
+ found_delim = false
+ break
+ end
+
+ until line:find(delim)
+
+ -- get rid of last \r\n, if we found a dashed delimiter, as it's not part of packet
+ if found_delim then
+ bufflen = bufflen - bufftbl[index-1]:len()
+ bufftbl[index-1] = nil
+ end
+
+ dprint2("Packet:get_ascii_data() returning", bufflen)
+ return bufflen
+end
+
+----------------------------------------
+-- RawPacket class, for packets that the log file contains the whole IP header for, such as algd logs
+--
+local RawPacket = {}
+local RawPacket_mt = { __index = RawPacket }
+setmetatable( RawPacket, Packet_mt ) -- make RawPacket inherit from Packet
+
+function RawPacket.new(...)
+ local new_class = Packet.new(...) -- the new instance
+ setmetatable( new_class, RawPacket_mt ) -- all instances share the same metatable
+ return new_class
+end
+
+function RawPacket:read_data(file, frame, line, seeking)
+ local bufftbl = {} -- table to hold data bytes
+ local index = 1 -- start at first slot in array
+
+ -- need to skip "Packet:" line and first 0000: line, it's internal junk
+ line = file:read()
+ line = file:read()
+
+ dprint2("RawPacket:read_data() getting hex from line='", line, "'")
+ local bufflen, line = self:get_hex_data(file, line, bufftbl, index)
+ if not bufflen or bufflen < 21 then
+ dprint("error getting binary data")
+ return false
+ end
+
+ -- add remainder as more packet data, but first delete overlap
+ -- see if frag bits are set in IP header, to see if UDP/TCP header exists
+ if self.ptype == IPv4 then
+ -- grab byte with frag flags and first byte of offset
+ local flag = string.byte(bufftbl[7]) -- converts binary character to number
+ local frag_offset = flag % 32 -- masks off upper 3 bits
+ frag_offset = (frag_offset * 256) + string.byte(bufftbl[8])
+ flag = floor(flag / 224) -- shift right
+ flag = flag % 2 -- mask upper bits
+ if flag == 1 or frag_offset > 0 then
+ -- we have a fragmented IPv4 packet, so no proto header
+ -- only save first 20 bytes (the IP header)
+ for i=bufflen, 21, -1 do
+ bufftbl[i] = nil
+ end
+ bufflen = 20
+ else
+ -- only save first 20 + proto size bytes
+ local save
+ if bufftbl[10] == PROTO_UDP then
+ save = 28
+ elseif bufftbl[10] == PROTO_TCP then
+ save = 40
+ else
+ dprint("failed to fix raw packet overlap")
+ return
+ end
+ for i=bufflen, save+1, -1 do
+ bufftbl[i] = nil
+ end
+ bufflen = save
+ end
+ end
+ -- TODO: IPv6
+
+ -- now read in rest of message, if any
+ -- first skip extra empty newline
+ if #line == 0 then
+ line = file:read()
+ end
+
+ bufflen = bufflen + self:get_ascii_data(file, line, bufftbl, bufflen+1, true)
+
+ frame.data = table.concat(bufftbl)
+
+ return true
+end
+
+----------------------------------------
+-- DataPacket class, for packets that the log file contains just the payload data for
+--
+local DataPacket = {}
+local DataPacket_mt = { __index = DataPacket }
+setmetatable( DataPacket, Packet_mt ) -- make DataPacket inherit from Packet
+
+function DataPacket.new(...)
+ local new_class = Packet.new(...) -- the new instance
+ setmetatable( new_class, DataPacket_mt ) -- all instances share the same metatable
+ return new_class
+end
+
+function DataPacket:set_tcbkey(key)
+ self["tcbkey"] = key
+ return
+end
+
+function DataPacket:build_ipv4_hdr(bufflen, proto, seeking)
+ local len = bufflen + 20 -- 20 byte IPv4 header size
+
+ -- figure out the ip identification value
+ local ip_ident
+ if seeking then
+ ip_ident = self.state.packets[self.file_position][IP_IDENT]
+ else
+ -- increment ident value
+ self.state.ip_ident = self.state.ip_ident + 1
+ if self.state.ip_ident == 65536 then
+ self.state.ip_ident = 1
+ end
+ ip_ident = self.state.ip_ident
+ -- save it for future seeking
+ self.state.packets[self.file_position][IP_IDENT] = ip_ident
+ end
+
+ -- use a table to concatenate as it's slightly faster that way
+ local hdrtbl = {
+ "\69\00", -- 1=ipv4 and 20 byte header length
+ dec2bin16(len), -- 2=packet length bytes
+ dec2bin16(ip_ident), -- 3=ident field bytes
+ "\00\00\64", -- 4=flags/fragment offset, ttl
+ proto, -- 5=proto
+ "\00\00", -- 6=checksum (using zero for now)
+ iptobytes(self.source_ip), -- 7=source ip
+ iptobytes(self.dest_ip) -- 8=dest ip
+ }
+
+ -- calc IPv4 header checksum, and set its value
+ hdrtbl[6] = checksum(table.concat(hdrtbl))
+
+ return table.concat(hdrtbl)
+end
+
+function DataPacket:build_ipv6_hdr(bufflen, proto)
+ -- use a table to concatenate as it's slightly faster that way
+ local hdrtbl = {
+ "\96\00\00\00", -- 1=ipv6 version, class, label
+ dec2bin16(bufflen), -- 2=packet length bytes
+ proto .. "\64", -- 4=proto, ttl
+ ipv6tobytes(self.source_ip), -- 5=source ip
+ ipv6tobytes(self.dest_ip) -- 6=dest ip
+ }
+ return table.concat(hdrtbl)
+end
+
+-- calculates TCP/UDP header checksums with pseudo-header info
+function DataPacket:calc_header_checksum(bufftbl, bufflen, hdrtbl, proto)
+ -- first create pseudo IP header
+ if self.ptype == IPv4 then
+ local iphdrtbl = {
+ iptobytes(self.source_ip), -- 1=source ip
+ iptobytes(self.dest_ip), -- 2=dest ip
+ "\00", -- zeros
+ proto, -- proto
+ dec2bin16(bufflen) -- payload length bytes
+ }
+ bufftbl[1] = table.concat(iphdrtbl)
+ elseif self.ptype == IPv6 then
+ local iphdrtbl = {
+ ipv6tobytes(self.source_ip), -- 1=source ip
+ ipv6tobytes(self.dest_ip), -- 2=dest ip
+ "\00\00", -- zeroes
+ dec2bin16(bufflen), -- payload length bytes
+ "\00\00\00", -- zeros
+ proto -- proto
+ }
+ bufftbl[1] = table.concat(iphdrtbl)
+ end
+
+ -- and pseudo TCP or UDP header
+ bufftbl[2] = table.concat(hdrtbl)
+
+ -- see if payload is odd length
+ local odd = false
+ if bufflen % 2 == 1 then
+ -- odd number of payload bytes, add zero byte at end
+ odd = true -- remember to undo this
+ bufftbl[#bufftbl+1] = "\00"
+ end
+
+ local result = checksum(table.concat(bufftbl))
+
+ -- remove pseudo-headers
+ bufftbl[1] = nil
+ bufftbl[2] = nil
+ if odd then
+ bufftbl[#bufftbl] = nil
+ end
+
+ return result
+end
+
+
+function DataPacket:build_udp_hdr(bufflen, bufftbl)
+ local len = bufflen + 8 -- 8 for size of UDP header
+ local hdrtbl = {
+ dec2bin16(self.source_port), -- 1=source port bytes
+ dec2bin16(self.dest_port), -- 2=dest port bytes
+ dec2bin16(len), -- 3=payload length bytes
+ "\00\00" -- 4=checksum
+ }
+ if bufftbl then
+ -- calc udp checksum (only done for IPv6)
+ hdrtbl[4] = self:calc_header_checksum(bufftbl, len, hdrtbl, PROTO_UDP)
+ end
+ return table.concat(hdrtbl)
+end
+
+
+function DataPacket:build_tcp_hdr(bufflen, bufftbl, seeking)
+ local len = bufflen + 20 -- 20 for size of TCP header
+
+ local local_seq, remote_seq
+ if seeking then
+ local_seq = self.state.packets[self.file_position][LOCAL_SEQ]
+ remote_seq = self.state.packets[self.file_position][REMOTE_SEQ]
+ else
+ -- find socket/tcb info for this "stream", create if not found
+ if not self.state.tcb[self.tcbkey] then
+ -- create them
+ self.state.tcb[self.tcbkey] = {}
+ local_seq = 1
+ remote_seq = 1
+ self.state.packets[self.file_position][LOCAL_SEQ] = 1
+ self.state.packets[self.file_position][REMOTE_SEQ] = 1
+ -- set tcb to next sequence numbers, so that the correct "side"
+ -- acknowledges receiving these bytes
+ if self.direction == SENT then
+ -- this packet is being sent, so local sequence increases next time
+ self.state.tcb[self.tcbkey][TLOCAL_SEQ] = bufflen+1
+ self.state.tcb[self.tcbkey][TREMOTE_SEQ] = 1
+ else
+ -- this packet is being received, so remote sequence increases next time
+ -- and local side will acknowldge it next time
+ self.state.tcb[self.tcbkey][TLOCAL_SEQ] = 1
+ self.state.tcb[self.tcbkey][TREMOTE_SEQ] = bufflen+1
+ end
+ else
+ -- stream already exists, so send the current tcb seqs and update for next time
+ if self.direction == SENT then
+ -- this packet is being sent, so local sequence increases next time
+ local_seq = self.state.tcb[self.tcbkey][TLOCAL_SEQ]
+ remote_seq = self.state.tcb[self.tcbkey][TREMOTE_SEQ]
+ self.state.tcb[self.tcbkey][TLOCAL_SEQ] = local_seq + bufflen
+ else
+ -- this packet is being received, so the "local" seq number of the packet is the remote's seq really
+ local_seq = self.state.tcb[self.tcbkey][TREMOTE_SEQ]
+ remote_seq = self.state.tcb[self.tcbkey][TLOCAL_SEQ]
+ -- and remote seq needs to increase next time (remember local_seq is TREMOTE_SEQ)
+ self.state.tcb[self.tcbkey][TREMOTE_SEQ] = local_seq + bufflen
+ end
+ self.state.packets[self.file_position][LOCAL_SEQ] = local_seq
+ self.state.packets[self.file_position][REMOTE_SEQ] = remote_seq
+ end
+ end
+
+ local hdrtbl = {
+ dec2bin16(self.source_port), -- 1=source port bytes
+ dec2bin16(self.dest_port), -- 2=dest port bytes
+ dec2bin32(local_seq), -- 3=sequence
+ dec2bin32(remote_seq), -- 4=ack number
+ "\80\16\255\255", -- 5=offset, flags, window size
+ "\00\00", -- 6=checksum
+ "\00\00" -- 7=urgent pointer
+ }
+
+ -- calc tcp checksum
+ hdrtbl[6] = self:calc_header_checksum(bufftbl, len, hdrtbl, PROTO_TCP)
+
+ return table.concat(hdrtbl)
+end
+
+function DataPacket:build_packet(bufftbl, bufflen, seeking)
+ dprint2("DataPacket:build_packet() called with ptype=",self.ptype)
+ if self.ptype == IPv4 then
+ if self.ttype == UDP then
+ bufftbl[2] = self:build_udp_hdr(bufflen)
+ bufftbl[1] = self:build_ipv4_hdr(bufflen + 8, PROTO_UDP, seeking)
+ elseif self.ttype == TCP then
+ bufftbl[2] = self:build_tcp_hdr(bufflen, bufftbl, seeking)
+ bufftbl[1] = self:build_ipv4_hdr(bufflen + 20, PROTO_TCP, seeking)
+ end
+ elseif self.ptype == IPv6 then
+ -- UDP for IPv6 requires checksum calculation, so we can't avoid more work
+ if self.ttype == UDP then
+ bufftbl[2] = self:build_udp_hdr(bufflen, bufftbl)
+ bufftbl[1] = self:build_ipv6_hdr(bufflen + 8, PROTO_UDP)
+ elseif self.ttype == TCP then
+ bufftbl[2] = self:build_tcp_hdr(bufflen, bufftbl, seeking)
+ bufftbl[1] = self:build_ipv6_hdr(bufflen + 20, PROTO_TCP)
+ end
+ else
+ dprint("DataPacket:build_packet: invalid packet type (neither IPv4 nor IPv6)")
+ return nil
+ end
+
+ return table.concat(bufftbl)
+end
+
+-- for performance, we read each line into a table and concatenate it at end
+-- but it makes this code super ugly
+function DataPacket:read_data(file, frame, line, seeking)
+ local bufftbl = { "", "" } -- 2 slots for ip and udp/tcp headers
+ local index = 3 -- start at third slot in array
+ local comment -- for any packet comments
+
+ dprint2("DataPacket: read_data(): calling get_data")
+ local bufflen = self:get_data(file, line, bufftbl, index)
+ if not bufflen then
+ dprint("DataPacket: error getting ascii or binary data")
+ return false
+ end
+
+ local buff = self:build_packet(bufftbl, bufflen, seeking)
+
+ frame.data = buff
+
+ return true
+end
+
+
+----------------------------------------
+-- BinPacket class, for packets that the log file contains binary payload data for, such as MBCD
+--
+local BinPacket = {}
+local BinPacket_mt = { __index = BinPacket }
+setmetatable( BinPacket, DataPacket_mt ) -- make BinPacket inherit from DataPacket
+
+function BinPacket.new(...)
+ local new_class = DataPacket.new(...) -- the new instance
+ setmetatable( new_class, BinPacket_mt ) -- all instances share the same metatable
+ return new_class
+end
+
+function BinPacket:get_comment_data(file, line, stop_pattern)
+ local comments = {}
+
+ while line and not line:find(stop_pattern) do
+ if #line > 0 then
+ comments[#comments+1] = line
+ comments[#comments+1] = "\r\n"
+ end
+ line = file:read()
+ end
+
+ if #comments > 0 then
+ -- get rid of extra "\r\n"
+ comments[#comments] = nil
+ self:set_comment(table.concat(comments))
+ end
+
+ return line
+end
+
+function BinPacket:get_data(file, line, bufftbl, index)
+ local is_alg = false
+
+ local bufflen, line = self:get_hex_data(file, line, bufftbl, index)
+
+ -- now eat rest of message until delimiter or end of file
+ -- we'll put them in comments
+ line = self:get_comment_data(file, line, delim)
+
+ -- return the bufflen, which is the same as number of table entries we made
+ return bufflen
+end
+
+----------------------------------------
+-- DnsPacket class, for DNS packets (which are binary but with comments at top)
+--
+local DnsPacket = {}
+local DnsPacket_mt = { __index = DnsPacket }
+setmetatable( DnsPacket, BinPacket_mt ) -- make DnsPacket inherit from BinPacket
+
+function DnsPacket.new(...)
+ local new_class = BinPacket.new(...) -- the new instance
+ setmetatable( new_class, DnsPacket_mt ) -- all instances share the same metatable
+ return new_class
+end
+
+local binpacket_start_pattern = "^ 0000: %x%x %x%x %x%x %x%x %x%x %x%x %x%x %x%x "
+function DnsPacket:get_data(file, line, bufftbl, index)
+ -- it's UDP regardless of what parse_header() thinks
+ self.ttype = UDP
+
+ -- comments are at top instead of bottom of message
+ line = self:get_comment_data(file, line, binpacket_start_pattern)
+
+ local bufflen, line = self:get_hex_data(file, line, bufftbl, index)
+
+ -- now eat rest of message until delimiter or end of file
+ while line and not line:find(delim) do
+ line = file:read()
+ end
+
+ -- return the bufflen, which is the same as number of table entries we made
+ return bufflen
+end
+
+----------------------------------------
+-- AsciiPacket class, for packets that the log file contains ascii payload data for
+--
+local AsciiPacket = {}
+local AsciiPacket_mt = { __index = AsciiPacket }
+setmetatable( AsciiPacket, DataPacket_mt ) -- make AsciiPacket inherit from DataPacket
+
+function AsciiPacket.new(...)
+ local new_class = DataPacket.new(...) -- the new instance
+ setmetatable( new_class, AsciiPacket_mt ) -- all instances share the same metatable
+ return new_class
+end
+
+function AsciiPacket:get_data(file, line, bufftbl, index)
+ return self:get_ascii_data(file, line, bufftbl, index)
+end
+
+
+----------------------------------------
+-- To determine packet type, we peek at the first line of 'data' following the log
+-- message header. Its pattern determines the Packet object type.
+-- The following are the patterns we look for; if it doesn't match one of these,
+-- then it's an AsciiPacket:
+local packet_patterns = {
+ { "^ 0000: %x%x %x%x %x%x %x%x %x%x %x%x %x%x %x%x ", BinPacket },
+ { "^Packet:$", RawPacket },
+ { "^DNS Query %d+ flags=%d+ q=%d+ ans=%d+", DnsPacket },
+ { "^DNS Response %d+ flags=%d+ q=%d+ ans=%d+", DnsPacket }
+}
+-- indeces for above
+local PP_PATTERN = 1
+local PP_CLASS = 2
+
+local function get_packet_class(line)
+ for i, t in ipairs(packet_patterns) do
+ if line:find(t[PP_PATTERN]) then
+ dprint2("got class type=",i)
+ return t[PP_CLASS]
+ end
+ end
+ dprint2("got class type AsciiPacket")
+ return AsciiPacket
+end
+
+----------------------------------------
+-- parses header line
+-- returns nil on failure
+-- the header lines look like this:
+-- Aug 10 14:30:11.134 On [1:544]10.201.145.237:5060 received from 10.210.1.193:5060
+-- this one has no phy/vlan info in brackets:
+-- Mar 6 13:39:06.122 On 127.0.0.1:2945 sent to 127.0.0.1:2944
+-- this one is IPv6:
+-- Aug 10 14:30:11.140 On [3:0][2620:0:60:8ac::102]:5060 sent to [2620:0:60:8ab::12]:5060
+-- this is from a tail'ed log output:
+-- 52:22.434 On [0:0]205.152.56.211:5060 received from 205.152.56.75:5060
+local loopback_pattern = "^127%.0%.0%.%d+$"
+local function parse_header(state, file, line, file_position, seeking)
+
+ if seeking then
+ -- verify we've seen this packet before
+ if not state.packets[file_position] then
+ dprint("parse_header: packet at file position ", file_position, " not saved previously")
+ return
+ end
+ else
+ -- first time through, create sub-table for the packet
+ state.packets[file_position] = {}
+ end
+
+ -- get time info, and line match ending position
+ local timestamp, line_pos = state:get_timestamp(line, file_position, seeking)
+ if not timestamp then
+ -- see if it's a tail'ed log instead
+ timestamp, line_pos = state:get_tail_time(line, file_position, seeking)
+ end
+
+ if not timestamp then
+ dprint("parse_header: could not parse time portion")
+ return
+ end
+
+ local ptype, ttype = IPv4, UDP
+
+ -- get phy/vlan if present
+ -- first skip past time portion
+ local phy, vlan, i, j, k
+ line_pos = line_pos + 1
+ i, j, phy, vlan = line:find(header_phy_pattern, line_pos)
+ if i then
+ phy = tonumber(phy)
+ vlan = tonumber(vlan)
+ line_pos = j -- skip past this portion for next match
+ else
+ -- if there's no phy/vlan info, then assume it's TCP (unless it's loopback address we'll check later)
+ ttype = TCP
+ end
+
+ -- get addresses and direction
+ local local_ip, local_port, direction, remote_ip, remote_port = line:match(header_address_pattern, line_pos)
+ if not local_ip then
+ -- try IPv6
+ local_ip, local_port, direction, remote_ip, remote_port = line:match(header_v6address_pattern, line_pos)
+ if not local_ip then
+ dprint("parse_header: could not parse address portion")
+ return nil
+ end
+ ptype = IPv6
+ end
+
+ if local_ip:find(loopback_pattern) and remote_ip:find(loopback_pattern) then
+ -- internal loopback packets never have phy/vlan but are always UDP messages (for all intents)
+ ttype = UDP
+ end
+
+ -- override above decisions based on configuration
+ if ALWAYS_UDP then
+ ttype = UDP
+ end
+
+ direction = get_direction(direction)
+ if direction == nil then
+ dprint("parse_header: failed to convert direction")
+ return nil
+ end
+
+ local source_ip, source_port, dest_ip, dest_port = local_ip, local_port, remote_ip, remote_port
+ if direction == RECV then
+ -- swap them
+ source_ip, source_port, dest_ip, dest_port = remote_ip, remote_port, local_ip, local_port
+ end
+ -- convert
+ source_port = tonumber(source_port)
+ dest_port = tonumber(dest_port)
+
+ -- peek at next line to determine packet type
+ local position = file:seek()
+ line = file:read()
+ dprint2("parse_header: peeking at line='", line, "'")
+ packet_class = get_packet_class(line)
+ file:seek("set", position) -- go back
+
+ dprint2("parse_header calling packet_class.new with:",
+ tostring(timestamp), direction, source_ip, source_port,
+ dest_ip, dest_port, ptype, ttype, file_position)
+
+ local packet = packet_class.new(state, timestamp, direction, source_ip, source_port, dest_ip, dest_port, ptype, ttype, file_position)
+ if not packet then
+ dprint("parse_header: parser failed to create Packet object")
+ end
+
+ if ttype == TCP then
+ -- if the packet is tcp type, then set the key for TCB table lookup
+ packet:set_tcbkey(table.concat({ "[", local_ip, "]:", local_port, "->[", remote_ip, "]:", remote_port }))
+ end
+
+ return packet
+end
+
+
+----------------------------------------
+-- file handling functions for Wireshark to use
+
+-- The read_open is called by Wireshark once per file, to see if the file is this reader's type.
+-- It passes in (1) a File and (2) CaptureInfo object to this function
+-- Since there is no exact magic sequence to search for, we have to use heuristics to guess if the file
+-- is our type or not, which we do by parsing a message header.
+-- Since Wireshark uses the file cursor position for future reading of this file, we also have to seek back to the beginning
+-- so that our normal read() function works correctly.
+local function read_open(file, capture)
+ dprint2("read_open called")
+ -- save current position to return later
+ local position = file:seek()
+
+ local line = file:read()
+ if not line then return false end
+
+ dprint2("read_open: got this line begin:\n'", line, "'")
+
+ line, position = skip_ahead(file, line, position)
+ if not line then return false end
+
+ dprint2("read_open: got this line after skip:\n'", line, "', with position=", position)
+
+ local state = State.new()
+
+ if parse_header(state, file, line, position) then
+ dprint2("read_open success")
+
+ file:seek("set",position)
+
+ capture.time_precision = wtap_filetypes.TSPREC_MSEC -- for millisecond precision
+ capture.encap = wtap.RAW_IP -- whole file is raw IP format
+ capture.snapshot_length = 0 -- unknown snaplen
+ capture.comment = "Oracle Acme Packet SBC message log"
+ capture.os = "VxWorks or Linux"
+ capture.hardware = "Oracle Acme Packet SBC"
+
+ -- reset state variables
+ capture.private_table = State.new()
+
+ dprint2("read_open returning true")
+ return true
+ end
+
+ dprint2("read_open returning false")
+ return false
+end
+
+----------------------------------------
+-- this is used by both read() and seek_read()
+local function read_common(funcname, file, capture, frame, position, seeking)
+ dprint2(funcname, "read_common called")
+ local state = capture.private_table
+
+ if not state then
+ dprint(funcname, "error getting capture state")
+ return false
+ end
+
+ local line = file:read()
+ if not line then
+ dprint(funcname, "hit end of file")
+ return false
+ end
+ line, position = skip_ahead(file, line, position)
+ if not line then
+ if file:read(0) ~= nil then
+ dprint(funcname, "did not hit end of file after skipping but ending anyway")
+ else
+ dprint2(funcname, "hit end of file after skipping")
+ end
+ return false
+ end
+
+ dprint2(funcname, ": parsing line='", line, "'")
+ local phdr = parse_header(state, file, line, position, seeking)
+ if not phdr then
+ dprint(funcname, "failed to parse header")
+ return false
+ end
+
+ line = file:read()
+
+ dprint2(funcname,": calling class object's read_data()")
+ phdr:read_data(file, frame, line, seeking)
+
+ if not phdr:set_wslua_fields(frame) then
+ dprint(funcname, "failed to set Wireshark packet header info")
+ return
+ end
+
+ dprint2(funcname, "read_common returning position")
+ return position
+end
+
+----------------------------------------
+-- Wireshark/tshark calls read() for each frame/record in the file
+-- It passes in (1) a File, (2) CaptureInfo, and (3) a FrameInfo object to this function
+-- It expects in return the file offset position the record starts at,
+-- or nil/false if there's an error or end-of-file is reached.
+-- The offset position is used later: wireshark remembers it and gives
+-- it to seek_read() at various random times
+local function read(file, capture, frame)
+ dprint2("read called")
+ local position = file:seek()
+ position = read_common("read", file, capture, frame, position)
+ if not position then
+ if file:read(0) ~= nil then
+ dprint("read failed to call read_common")
+ else
+ dprint2("read: reached end of file")
+ end
+ return false
+ end
+ return position
+end
+
+----------------------------------------
+-- Wireshark/tshark calls seek_read() for each frame/record in the file, at random times
+-- It passes in (1) File, (2) CaptureInfo, (3) FrameInfo, and (4) the offset position number
+-- It expects in return true for successful parsing, or nil/false if there's an error.
+local function seek_read(file, capture, frame, offset)
+ dprint2("seek_read called")
+ file:seek("set",offset)
+ if not read_common("seek_read", file, capture, frame, offset, true) then
+ dprint("seek_read failed to call read_common")
+ return false
+ end
+ return true
+end
+
+----------------------------------------
+-- Wireshark/tshark calls read_close() when it's closing the file completely
+-- It passes in (1) a File and (2) CaptureInfo object to this function
+-- this is a good opportunity to clean up any state you may have created during
+-- file reading.
+-- In our case there *is* state to reset, but we only saved it in
+-- the capture.private_table, so Wireshark will clean it up for us.
+local function read_close(file, capture)
+ dprint2("read_close called")
+ return true
+end
+
+----------------------------------------
+-- An often unused function, Wireshark calls this when the sequential walk-through is over
+-- It passes in (1) a File and (2) CaptureInfo object to this function
+-- (i.e., no more calls to read(), only to seek_read()).
+-- In our case there *is* some state to reset, but we only saved it in
+-- the capture.private_table, so Wireshark will clean it up for us.
+local function seq_read_close(file, capture)
+ dprint2("seq_read_close called")
+ return true
+end
+
+-- set above functions to the FileHandler
+fh.read_open = read_open
+fh.read = read
+fh.seek_read = seek_read
+fh.read_close = read_close
+fh.seq_read_close = seq_read_close
+fh.extensions = "log" -- this is just a hint
+
+-- and finally, register the FileHandler!
+register_filehandler(fh)
diff --git a/test/lua/add_packet_field.lua b/test/lua/add_packet_field.lua
new file mode 100644
index 0000000..d1a2a6c
--- /dev/null
+++ b/test/lua/add_packet_field.lua
@@ -0,0 +1,899 @@
+--[[
+ The tree:add_packet_field() method returns a value and offset in addition to a tree item.
+ This file tests whether the value and offset are correct. As for the value,
+ its correctness is tested in several ways for a given input.
+
+ 1. The returned value should match a precomputed value
+
+ 2. The returned value should match the value obtained from a Field object
+ right after tree:add_packet_field() is called
+
+ 3. The returned value should match the value obtained from a Field object
+ right after tree:add() is called with the same input as tree:add_packet_field()
+
+ 4. The returned value should match the value obtained from the corresponding value function
+ called on the input tvbrange
+
+ There are some incompatibilties and limitations due to handling of encodings.
+ Incompatibilities are noted with the text INCOMPATIBILITY in a nearby comment.
+]]
+
+local field_setup = require "field_setup"
+
+--[[
+ This dissector expects a capture with at least one packet on UDP 65333.
+ All the actual test data is synthetic.
+]]--
+local myproto = Proto("test", "Test")
+
+field_data = field_setup(myproto, "test")
+
+function hexlify_string(s)
+ local sep = ""
+ local hx = ""
+ for i=1,#s do
+ hx = hx .. sep .. string.format("%02x", s:byte(i))
+ sep = " "
+ end
+ return hx
+end
+
+--[[
+ Ensure the value is represented in a way that shows up when printed.
+ It is assumed the string representation is relatively short.
+
+ The test suite will report an error if we print invalid utf8 for any reason.
+ We work around this by passing a substitution string used when the real
+ string has invalid utf8. We also print the output bytes in hex after the string,
+ and those bytes are always faithful to the real output.
+]]--
+function format_value_for_print(v, substitution)
+ local t = type(v)
+ local s
+ if t == "string" then
+ local hx = hexlify_string(v)
+ if substitution ~= nil then
+ s = string.format("(invalid utf8) \"%s\" [%s]", substitution, hx)
+ else
+ s = string.format("\"%s\" [%s]", v, hx)
+ end
+
+ else
+ s = tostring(v)
+ end
+ return string.format("(%s) %s", type(v), s)
+end
+
+function format_encoding_for_print(enc)
+
+ local char_enc = "ASCII"
+ if bit.band(enc, ENC_UTF_16) ~= 0 then
+ char_enc = "UTF-16"
+ end
+
+ local enc_enc = "BE"
+ if bit.band(enc, ENC_LITTLE_ENDIAN) ~= 0 then
+ end_enc = "LE"
+ end
+
+ if enc == ENC_ISO_8601_DATE_TIME then
+ char_enc = "ISO_8601"
+ end_enc = "-"
+ end
+
+ return string.format("%s %s", char_enc, end_enc)
+end
+
+function print_test_data(test_data)
+ print(string.format("TEST: using field type: %s", test_data.field_type))
+ if test_data.hexlify then
+ print(string.format("TEST: input was hexlified from: \"%s\"", test_data.original_input))
+ end
+ print(string.format("TEST: using data: [%s]", test_data.input))
+ print(string.format("TEST: using offset: %d", test_data.input_offset))
+ print(string.format("TEST: using encoding: %s", format_encoding_for_print(test_data.encoding)))
+ print()
+end
+
+function general_equality_test(a, b)
+ return a == b
+end
+
+--equal or both nan
+function float_equality_test(a, b)
+ return a == b or (a ~= a and b ~= b)
+end
+
+function recent_field_value(t)
+ local values = {field_data[t].value_field()}
+ return values[#values].value
+end
+
+function add_packet_field_returns_precomputed_value(test_data)
+
+ print(string.format(" EXPECT: precomputed return value: %s", format_value_for_print(test_data.expect_precomputed)))
+ print(string.format(" OUTPUT: add_packet_field returned value: %s", format_value_for_print(test_data.returned_value)))
+
+ if test_data.equality_function(test_data.returned_value, test_data.expect_precomputed) then
+ print(" PASS: the return value is correct")
+ print()
+ return true
+ end
+
+ print(" FAIL: the returned value is incorrect")
+ print()
+ return false
+end
+
+function add_packet_field_then_value_field_returns_expected_value(test_data)
+
+ print(string.format(" EXPECT: value field value %s", format_value_for_print(test_data.expect_add_pf_field_value)))
+ print(string.format(" OUTPUT: value field after tree:add_packet_field() returned: %s",
+ format_value_for_print(test_data.returned_add_pf_field_value)))
+
+ local incompatible = test_data.expect_add_pf_field_value ~= test_data.expect_precomputed
+ if incompatible then
+ print(" WARNING: the value field does not return the same value as the other implementations")
+ end
+ if test_data.equality_function(test_data.returned_add_pf_field_value, test_data.expect_add_pf_field_value) then
+ print(" PASS: the value field is correct")
+ print()
+ return true
+ end
+
+ print(" FAIL: the value field is incorrect")
+ print()
+ return false
+end
+
+function tree_add_then_value_field_returns_expected_value(test_data)
+
+ if test_data.skip_tree_add_test then
+ print(" SKIP: " .. test_data.skip_tree_add_test_message)
+ print()
+ return true
+ end
+
+ print(string.format(" EXPECT: value field value %s", format_value_for_print(test_data.expect_add_field_value)))
+ print(string.format(" OUTPUT: value field after tree:add() returned: %s",
+ format_value_for_print(test_data.returned_add_field_value)))
+
+ local incompatible = test_data.expect_add_field_value ~= test_data.expect_precomputed
+ if incompatible then
+ print(" WARNING: the value field does not return the same value as the other implementations")
+ end
+ if test_data.equality_function(test_data.returned_add_field_value, test_data.expect_add_field_value) then
+ print(" PASS: the value field is correct")
+ print()
+ return true
+ end
+
+ print(" FAIL: the value field is incorrect")
+ print()
+ return false
+
+end
+
+--[[
+ The tvbrange:string() function can return invalid utf8 even when the input is valid.
+]]
+function tvbrange_returns_expected_value(test_data)
+
+ if test_data.tvbr_fn == nil then
+ print(" SKIP: no tvbrange function for this field type")
+ print()
+ return true
+ end
+
+ local tvbr_value, tvbr_fn_printable = test_data.tvbr_fn(test_data.input_tvbrange, test_data.encoding)
+ local pass = test_data.equality_function(tvbr_value, test_data.expect_tvbrange_value)
+ local incompatible = test_data.expect_tvbrange_value ~= test_data.expect_precomputed
+ local tvbr_value_printable = format_value_for_print(tvbr_value)
+ local expect_value_printable = format_value_for_print(test_data.expect_tvbrange_value, test_data.expect_tvbrange_value_printable)
+ if pass then
+ --if the outputs are equal, then the substitute is useable for both
+ tvbr_value_printable = format_value_for_print(tvbr_value, test_data.expect_tvbrange_value_printable)
+ end
+
+ print(string.format(" TEST: using tvbrange function %s", tvbr_fn_printable))
+ print(string.format(" EXPECT: tvbrange value %s", expect_value_printable))
+ print(string.format(" OUTPUT: tvbrange returned %s", tvbr_value_printable))
+ if incompatible then
+ print(" WARNING: the tvbr function is not compatible with the other implementations")
+ end
+
+ if pass then
+ print(" PASS: the the tvbr function works as expected")
+ print()
+ return true
+ end
+
+ print(" FAIL: the the tvbr function works as expected")
+ print()
+ return false
+end
+
+function add_packet_field_returns_correct_offset(test_data)
+
+ print(string.format(" EXPECT: offset %d", test_data.expect_offset))
+ print(string.format(" OUTPUT: add_packet_field returned offset %d", test_data.returned_offset))
+
+ if test_data.returned_offset == test_data.expect_offset then
+ print(" PASS: the returned offset is correct")
+ print()
+ return true
+ end
+
+ print(" FAIL: the returned offset is incorrect")
+ print()
+ return false
+end
+
+function add_packet_field_all_tests(tree, test_data)
+ print_test_data(test_data)
+ local ret = true
+ and add_packet_field_returns_precomputed_value(test_data)
+ and add_packet_field_then_value_field_returns_expected_value(test_data)
+ and tree_add_then_value_field_returns_expected_value(test_data)
+ and tvbrange_returns_expected_value(test_data)
+ and add_packet_field_returns_correct_offset(test_data)
+ return ret
+end
+
+function generate_test_data_for_case(tree, field_type, case, tvbr_fn, equality_function, use_offset)
+
+ local input = case.input
+ if case.hexlify then
+ input = hexlify_string(case.input)
+ end
+
+ local input_byte_length = string.len(input:gsub(" ", "")) / 2
+ local input_offset = 0
+ if use_offset then
+ input = "77 " .. input
+ input_offset = 1
+ end
+
+ local input_tvb = ByteArray.new(input):tvb()
+ local input_tvbrange
+
+ if case.fake_input_length == nil then
+ input_tvbrange = input_tvb(input_offset, input_byte_length)
+ else
+ input_tvbrange = input_tvb(input_offset, case.fake_input_length)
+ end
+
+ local t = field_data[field_type]
+ local add_pf_leaf, returned_value, returned_offset = tree:add_packet_field(t.packet_field, input_tvbrange, case.encoding)
+ local add_pf_field_value = recent_field_value(field_type)
+
+ local add_leaf = nil
+ local add_field_value = nil
+ local skip_tree_add_test_message = nil
+ local skip_tree_add_test = false
+
+ if case.encoding == ENC_ASCII + ENC_BIG_ENDIAN then
+ add_leaf = tree:add(t.packet_field, input_tvbrange)
+ add_field_value = recent_field_value(field_type)
+ elseif case.encoding == ENC_ASCII + ENC_LITTLE_ENDIAN then
+ add_leaf = tree:add_le(t.packet_field, input_tvbrange)
+ add_field_value = recent_field_value(field_type)
+ else
+ skip_tree_add_test = true
+ skip_tree_add_test_message = "tree:add() only uses ASCII encoding"
+ end
+
+ local expect_add_pf_field_value = case.output
+ if case.incompatible_add_pf_field then
+ expect_add_pf_field_value = case.expect_add_pf_field_value
+ end
+
+ local expect_add_field_value = case.output
+ if case.incompatible_add_field then
+ expect_add_field_value = case.expect_add_field_value
+ end
+
+ local expect_tvbrange_value = case.output
+ if case.incompatible_tvbrange then
+ expect_tvbrange_value = case.expect_tvbrange_value
+ end
+
+ local expect_offset = input_byte_length + input_offset
+ if case.variable_input_length then
+ expect_offset = case.input_length + input_offset
+ end
+
+ return {
+ field_type = field_type,
+ hexlify = case.hexlify,
+ original_input = case.input,
+ input = input,
+ input_offset = input_offset,
+ input_tvbrange = input_tvbrange,
+ encoding = case.encoding,
+
+ returned_value = returned_value,
+ returned_offset = returned_offset,
+ returned_add_pf_field_value = add_pf_field_value,
+ returned_add_field_value = add_field_value,
+
+ tvbr_fn = tvbr_fn,
+ equality_function = equality_function,
+ expect_precomputed = case.output,
+ expect_add_pf_field_value = expect_add_pf_field_value,
+
+ expect_add_field_value = expect_add_field_value,
+ skip_tree_add_test = skip_tree_add_test,
+ skip_tree_add_test_message = skip_tree_add_test_message,
+
+ expect_tvbrange_value = expect_tvbrange_value,
+ expect_tvbrange_value_printable = case.expect_tvbrange_value_printable,
+ expect_offset = expect_offset
+ }
+end
+
+function run_test_cases_all_tests(tree, field_type, test_cases, tvbr_fn, equality_function)
+ local test_data
+ for _ , case in ipairs(test_cases) do
+ test_data = generate_test_data_for_case(tree, field_type, case, tvbr_fn, equality_function, true)
+ if not add_packet_field_all_tests(tree, test_data) then
+ return false
+ end
+
+ test_data = generate_test_data_for_case(tree, field_type, case, tvbr_fn, equality_function, false)
+ if not add_packet_field_all_tests(tree, test_data) then
+ return false
+ end
+ end
+
+ return true
+end
+
+function simple_integer_tests(tree)
+ local uint8_test_cases = {
+ {input = "ff", encoding = ENC_LITTLE_ENDIAN, output = 0xff},
+ {input = "00", encoding = ENC_LITTLE_ENDIAN, output = 0x00},
+ {input = "ff", encoding = ENC_BIG_ENDIAN, output = 0xff},
+ {input = "00", encoding = ENC_BIG_ENDIAN, output = 0x00},
+ }
+
+ local uint16_test_cases = {
+ {input = "ff 00", encoding = ENC_LITTLE_ENDIAN, output = 0x00ff},
+ {input = "00 ff", encoding = ENC_LITTLE_ENDIAN, output = 0xff00},
+ {input = "ff 00", encoding = ENC_BIG_ENDIAN, output = 0xff00},
+ {input = "00 ff", encoding = ENC_BIG_ENDIAN, output = 0x00ff},
+ }
+
+ local uint24_test_cases = {
+ {input = "ff 00 00", encoding = ENC_LITTLE_ENDIAN, output = 0x0000ff},
+ {input = "00 ff 00", encoding = ENC_LITTLE_ENDIAN, output = 0x00ff00},
+ {input = "00 00 ff", encoding = ENC_LITTLE_ENDIAN, output = 0xff0000},
+ {input = "ff 00 00", encoding = ENC_BIG_ENDIAN, output = 0xff0000},
+ {input = "00 ff 00", encoding = ENC_BIG_ENDIAN, output = 0x00ff00},
+ {input = "00 00 ff", encoding = ENC_BIG_ENDIAN, output = 0x0000ff},
+ }
+
+ local uint32_test_cases = {
+ {input = "ff 00 00 00", encoding = ENC_LITTLE_ENDIAN, output = 0x000000ff},
+ {input = "00 ff 00 00", encoding = ENC_LITTLE_ENDIAN, output = 0x0000ff00},
+ {input = "00 00 ff 00", encoding = ENC_LITTLE_ENDIAN, output = 0x00ff0000},
+ {input = "00 00 00 ff", encoding = ENC_LITTLE_ENDIAN, output = 0xff000000},
+ {input = "ff 00 00 00", encoding = ENC_BIG_ENDIAN, output = 0xff000000},
+ {input = "00 ff 00 00", encoding = ENC_BIG_ENDIAN, output = 0x00ff0000},
+ {input = "00 00 ff 00", encoding = ENC_BIG_ENDIAN, output = 0x0000ff00},
+ {input = "00 00 00 ff", encoding = ENC_BIG_ENDIAN, output = 0x000000ff},
+ }
+
+ function tvbr_uint (tvbr, encoding)
+ if encoding == ENC_LITTLE_ENDIAN then
+ return tvbr:le_uint(), "le_uint()"
+ else
+ return tvbr:uint(), "uint()"
+ end
+ end
+
+ local int8_test_cases = {
+ {input = "ff", encoding = ENC_LITTLE_ENDIAN, output = -0x01},
+ {input = "00", encoding = ENC_LITTLE_ENDIAN, output = 0x00},
+ {input = "ff", encoding = ENC_BIG_ENDIAN, output = -0x01},
+ {input = "00", encoding = ENC_BIG_ENDIAN, output = 0x00},
+ }
+
+ local int16_test_cases = {
+ {input = "ff 00", encoding = ENC_LITTLE_ENDIAN, output = 0x00ff},
+ {input = "00 ff", encoding = ENC_LITTLE_ENDIAN, output = -0x0100},
+ {input = "ff 00", encoding = ENC_BIG_ENDIAN, output = -0x0100},
+ {input = "00 ff", encoding = ENC_BIG_ENDIAN, output = 0x00ff},
+ }
+
+ local int24_test_cases = {
+ {input = "ff 00 00", encoding = ENC_LITTLE_ENDIAN, output = 0x0000ff},
+ {input = "00 ff 00", encoding = ENC_LITTLE_ENDIAN, output = 0x00ff00},
+ {input = "00 00 ff", encoding = ENC_LITTLE_ENDIAN, output = -0x010000},
+ {input = "ff 00 00", encoding = ENC_BIG_ENDIAN, output = -0x010000},
+ {input = "00 ff 00", encoding = ENC_BIG_ENDIAN, output = 0x00ff00},
+ {input = "00 00 ff", encoding = ENC_BIG_ENDIAN, output = 0x0000ff},
+ }
+
+ local int32_test_cases = {
+ {input = "ff 00 00 00", encoding = ENC_LITTLE_ENDIAN, output = 0x000000ff},
+ {input = "00 ff 00 00", encoding = ENC_LITTLE_ENDIAN, output = 0x0000ff00},
+ {input = "00 00 ff 00", encoding = ENC_LITTLE_ENDIAN, output = 0x00ff0000},
+ {input = "00 00 00 ff", encoding = ENC_LITTLE_ENDIAN, output = -0x01000000},
+ {input = "ff 00 00 00", encoding = ENC_BIG_ENDIAN, output = -0x01000000},
+ {input = "00 ff 00 00", encoding = ENC_BIG_ENDIAN, output = 0x00ff0000},
+ {input = "00 00 ff 00", encoding = ENC_BIG_ENDIAN, output = 0x0000ff00},
+ {input = "00 00 00 ff", encoding = ENC_BIG_ENDIAN, output = 0x000000ff},
+ }
+
+ function tvbr_int(tvbr, encoding)
+ if encoding == ENC_LITTLE_ENDIAN then
+ return tvbr:le_int(), "le_int()"
+ else
+ return tvbr:int(), "int()"
+ end
+ end
+
+ return true
+ and run_test_cases_all_tests(tree, "uint8", uint8_test_cases, tvbr_uint, general_equality_test)
+ and run_test_cases_all_tests(tree, "uint16", uint16_test_cases, tvbr_uint, general_equality_test)
+ and run_test_cases_all_tests(tree, "uint24", uint24_test_cases, tvbr_uint, general_equality_test)
+ and run_test_cases_all_tests(tree, "uint32", uint32_test_cases, tvbr_uint, general_equality_test)
+
+ and run_test_cases_all_tests(tree, "int8", int8_test_cases, tvbr_int, general_equality_test)
+ and run_test_cases_all_tests(tree, "int16", int16_test_cases, tvbr_int, general_equality_test)
+ and run_test_cases_all_tests(tree, "int24", int24_test_cases, tvbr_int, general_equality_test)
+ and run_test_cases_all_tests(tree, "int32", int32_test_cases, tvbr_int, general_equality_test)
+end
+
+function integer64_tests(tree)
+
+ local uint64_test_cases = {
+ {input = "ff 00 00 00 00 00 00 00", encoding = ENC_LITTLE_ENDIAN, output = UInt64(0x000000ff, 0x00000000)},
+ {input = "00 ff 00 00 00 00 00 00", encoding = ENC_LITTLE_ENDIAN, output = UInt64(0x0000ff00, 0x00000000)},
+ {input = "00 00 ff 00 00 00 00 00", encoding = ENC_LITTLE_ENDIAN, output = UInt64(0x00ff0000, 0x00000000)},
+ {input = "00 00 00 ff 00 00 00 00", encoding = ENC_LITTLE_ENDIAN, output = UInt64(0xff000000, 0x00000000)},
+ {input = "00 00 00 00 ff 00 00 00", encoding = ENC_LITTLE_ENDIAN, output = UInt64(0x00000000, 0x000000ff)},
+ {input = "00 00 00 00 00 ff 00 00", encoding = ENC_LITTLE_ENDIAN, output = UInt64(0x00000000, 0x0000ff00)},
+ {input = "00 00 00 00 00 00 ff 00", encoding = ENC_LITTLE_ENDIAN, output = UInt64(0x00000000, 0x00ff0000)},
+ {input = "00 00 00 00 00 00 00 ff", encoding = ENC_LITTLE_ENDIAN, output = UInt64(0x00000000, 0xff000000)},
+ {input = "ff 00 00 00 00 00 00 00", encoding = ENC_BIG_ENDIAN, output = UInt64(0x00000000, 0xff000000)},
+ {input = "00 ff 00 00 00 00 00 00", encoding = ENC_BIG_ENDIAN, output = UInt64(0x00000000, 0x00ff0000)},
+ {input = "00 00 ff 00 00 00 00 00", encoding = ENC_BIG_ENDIAN, output = UInt64(0x00000000, 0x0000ff00)},
+ {input = "00 00 00 ff 00 00 00 00", encoding = ENC_BIG_ENDIAN, output = UInt64(0x00000000, 0x000000ff)},
+ {input = "00 00 00 00 ff 00 00 00", encoding = ENC_BIG_ENDIAN, output = UInt64(0xff000000, 0x00000000)},
+ {input = "00 00 00 00 00 ff 00 00", encoding = ENC_BIG_ENDIAN, output = UInt64(0x00ff0000, 0x00000000)},
+ {input = "00 00 00 00 00 00 ff 00", encoding = ENC_BIG_ENDIAN, output = UInt64(0x0000ff00, 0x00000000)},
+ {input = "00 00 00 00 00 00 00 ff", encoding = ENC_BIG_ENDIAN, output = UInt64(0x000000ff, 0x00000000)},
+ }
+
+ function tvbr_uint(tvbr, encoding)
+ if encoding == ENC_LITTLE_ENDIAN then
+ return tvbr:le_uint64(), "le_uint64()"
+ else
+ return tvbr:uint64(), "uint64()"
+ end
+ end
+
+ local int64_test_cases = {
+ {input = "ff 00 00 00 00 00 00 00", encoding = ENC_LITTLE_ENDIAN, output = Int64(0x000000ff, 0x00000000)},
+ {input = "00 ff 00 00 00 00 00 00", encoding = ENC_LITTLE_ENDIAN, output = Int64(0x0000ff00, 0x00000000)},
+ {input = "00 00 ff 00 00 00 00 00", encoding = ENC_LITTLE_ENDIAN, output = Int64(0x00ff0000, 0x00000000)},
+ {input = "00 00 00 ff 00 00 00 00", encoding = ENC_LITTLE_ENDIAN, output = Int64(0xff000000, 0x00000000)},
+ {input = "00 00 00 00 ff 00 00 00", encoding = ENC_LITTLE_ENDIAN, output = Int64(0x00000000, 0x000000ff)},
+ {input = "00 00 00 00 00 ff 00 00", encoding = ENC_LITTLE_ENDIAN, output = Int64(0x00000000, 0x0000ff00)},
+ {input = "00 00 00 00 00 00 ff 00", encoding = ENC_LITTLE_ENDIAN, output = Int64(0x00000000, 0x00ff0000)},
+ {input = "00 00 00 00 00 00 00 ff", encoding = ENC_LITTLE_ENDIAN, output = Int64(0x00000000, 0xff000000)},
+ {input = "ff 00 00 00 00 00 00 00", encoding = ENC_BIG_ENDIAN, output = Int64(0x00000000, 0xff000000)},
+ {input = "00 ff 00 00 00 00 00 00", encoding = ENC_BIG_ENDIAN, output = Int64(0x00000000, 0x00ff0000)},
+ {input = "00 00 ff 00 00 00 00 00", encoding = ENC_BIG_ENDIAN, output = Int64(0x00000000, 0x0000ff00)},
+ {input = "00 00 00 ff 00 00 00 00", encoding = ENC_BIG_ENDIAN, output = Int64(0x00000000, 0x000000ff)},
+ {input = "00 00 00 00 ff 00 00 00", encoding = ENC_BIG_ENDIAN, output = Int64(0xff000000, 0x00000000)},
+ {input = "00 00 00 00 00 ff 00 00", encoding = ENC_BIG_ENDIAN, output = Int64(0x00ff0000, 0x00000000)},
+ {input = "00 00 00 00 00 00 ff 00", encoding = ENC_BIG_ENDIAN, output = Int64(0x0000ff00, 0x00000000)},
+ {input = "00 00 00 00 00 00 00 ff", encoding = ENC_BIG_ENDIAN, output = Int64(0x000000ff, 0x00000000)},
+ }
+
+ function tvbr_int(tvbr, encoding)
+ if encoding == ENC_LITTLE_ENDIAN then
+ return tvbr:le_int64(), "le_int64()"
+ else
+ return tvbr:int64(), "int64()"
+ end
+ end
+
+ return true
+ and run_test_cases_all_tests(tree, "uint64", uint64_test_cases, tvbr_uint, general_equality_test)
+ and run_test_cases_all_tests(tree, "int64", int64_test_cases, tvbr_int, general_equality_test)
+end
+
+function string_tests(tree)
+
+ local ABC_ascii = "41 42 43"
+ local ABCzD_ascii = "41 42 43 00 44"
+
+ local SHARK_16_little = "b5 30 e1 30"
+ local SHARKzSA_16_little = "b5 30 e1 30 00 00 b5 30"
+
+ local SHARK_16_big = "30 b5 30 e1"
+ local SHARKzSA_16_big = "30 b5 30 e1 00 00 30 b5"
+
+ local string_test_cases = {
+ {input = ABC_ascii, encoding = ENC_ASCII, output = "ABC"},
+
+ {input = ABCzD_ascii, encoding = ENC_ASCII, output = "ABC"},
+
+ {input = SHARK_16_little, encoding = ENC_ASCII, output = "�0�0"},
+
+ {input = SHARK_16_little, encoding = ENC_UTF_16 + ENC_LITTLE_ENDIAN, output = "サメ"},
+
+ {input = SHARKzSA_16_little, encoding = ENC_UTF_16 + ENC_LITTLE_ENDIAN, output = "サメ"},
+
+ {input = SHARK_16_big, encoding = ENC_UTF_16 + ENC_BIG_ENDIAN, output = "サメ"},
+
+ {input = SHARKzSA_16_big, encoding = ENC_UTF_16 + ENC_BIG_ENDIAN, output = "サメ"},
+ }
+
+ function tvbr_string(tvbr, encoding)
+ return tvbr:string(encoding), string.format("string(%s)", format_encoding_for_print(encoding))
+ end
+
+ --[[
+ stringz computes its own input length by looking for null
+ the input length includes the null, which is 2 bytes for utf16
+ ]]--
+ local stringz_tests = {
+
+ {input = ABCzD_ascii, encoding = ENC_ASCII, output = "ABC",
+ variable_input_length = true, input_length = 4
+ },
+
+ {input = SHARKzSA_16_little, encoding = ENC_UTF_16 + ENC_LITTLE_ENDIAN, output = "サメ",
+ variable_input_length = true, input_length = 6,
+ },
+
+ {input = SHARKzSA_16_big, encoding = ENC_UTF_16 + ENC_BIG_ENDIAN, output = "サメ",
+ variable_input_length = true, input_length = 6,
+ },
+ }
+
+ function tvbr_stringz(tvbr, encoding)
+ return tvbr:stringz(encoding), string.format("stringz(%s)", format_encoding_for_print(encoding))
+ end
+
+ local ustring_tests = {
+ {input = SHARK_16_big, encoding = ENC_UTF_16 + ENC_BIG_ENDIAN, output = "サメ"},
+ {input = SHARKzSA_16_big, encoding = ENC_UTF_16 + ENC_BIG_ENDIAN, output = "サメ"},
+ }
+
+ function tvbr_ustring(tvbr, encoding)
+ return tvbr:ustring(), "ustring()"
+ end
+
+ local le_ustring_tests = {
+ {input = SHARK_16_little, encoding = ENC_UTF_16 + ENC_LITTLE_ENDIAN, output = "サメ"},
+ {input = SHARKzSA_16_little, encoding = ENC_UTF_16 + ENC_LITTLE_ENDIAN, output = "サメ"},
+ }
+
+ function tvbr_le_ustring(tvbr, encoding)
+ return tvbr:le_ustring(), "le_ustring()"
+ end
+
+ local ustringz_tests = {
+ {input = SHARKzSA_16_big, encoding = ENC_UTF_16 + ENC_BIG_ENDIAN, output = "サメ",
+ variable_input_length = true, input_length = 6
+ },
+ }
+
+ function tvbr_ustringz(tvbr, encoding)
+ return tvbr:ustringz(), "ustringz()"
+ end
+
+ local le_ustringz_tests = {
+ {input = SHARKzSA_16_little, encoding = ENC_UTF_16 + ENC_LITTLE_ENDIAN, output = "サメ",
+ variable_input_length = true, input_length = 6
+ },
+ }
+
+ function tvbr_le_ustringz(tvbr, encoding)
+ return tvbr:le_ustringz(), "le_ustringz()"
+ end
+
+ return true
+ and run_test_cases_all_tests(tree, "string", string_test_cases, tvbr_string, general_equality_test)
+ and run_test_cases_all_tests(tree, "stringz", stringz_tests, tvbr_stringz, general_equality_test)
+ and run_test_cases_all_tests(tree, "string", ustring_tests, tvbr_ustring, general_equality_test)
+ and run_test_cases_all_tests(tree, "string", le_ustring_tests, tvbr_le_ustring, general_equality_test)
+ and run_test_cases_all_tests(tree, "stringz", ustringz_tests, tvbr_ustringz, general_equality_test)
+ and run_test_cases_all_tests(tree, "stringz", le_ustringz_tests, tvbr_le_ustringz, general_equality_test)
+end
+
+function bool_char_tests(tree)
+
+ local bool_tests = {
+ {input = "ff", encoding = ENC_BIG_ENDIAN, output = true},
+ {input = "00", encoding = ENC_BIG_ENDIAN, output = false},
+ {input = "01", encoding = ENC_BIG_ENDIAN, output = true},
+ {input = "ff", encoding = ENC_LITTLE_ENDIAN, output = true},
+ {input = "00", encoding = ENC_LITTLE_ENDIAN, output = false},
+ {input = "01", encoding = ENC_LITTLE_ENDIAN, output = true},
+ }
+
+ local char_tests = {
+ {input = "ff", encoding = ENC_BIG_ENDIAN, output = 0xff},
+ {input = "00", encoding = ENC_BIG_ENDIAN, output = 0x00},
+ {input = "30", encoding = ENC_BIG_ENDIAN, output = 0x30},
+ {input = "ff", encoding = ENC_LITTLE_ENDIAN, output = 0xff},
+ {input = "00", encoding = ENC_LITTLE_ENDIAN, output = 0x00},
+ {input = "30", encoding = ENC_LITTLE_ENDIAN, output = 0x30},
+ }
+
+ return true
+ and run_test_cases_all_tests(tree, "boolean", bool_tests, nil, general_equality_test)
+ and run_test_cases_all_tests(tree, "char", char_tests, nil, general_equality_test)
+end
+
+function float_tests(tree)
+
+ local be_float = {
+ {input = "3c 00 00 00", encoding = ENC_BIG_ENDIAN, output = 0.0078125},
+ {input = "bd a0 00 00", encoding = ENC_BIG_ENDIAN, output = -0.078125},
+ {input = "3f 48 00 00", encoding = ENC_BIG_ENDIAN, output = 0.78125},
+ {input = "c0 fa 00 00", encoding = ENC_BIG_ENDIAN, output = -7.8125},
+ {input = "42 9c 40 00", encoding = ENC_BIG_ENDIAN, output = 78.125},
+ {input = "c4 43 50 00", encoding = ENC_BIG_ENDIAN, output = -781.25},
+ {input = "45 f4 24 00", encoding = ENC_BIG_ENDIAN, output = 7812.5},
+ {input = "c7 98 96 80", encoding = ENC_BIG_ENDIAN, output = -78125.0},
+ {input = "49 3e bc 20", encoding = ENC_BIG_ENDIAN, output = 781250.0},
+ {input = "ca ee 6b 28", encoding = ENC_BIG_ENDIAN, output = -7812500.0},
+ {input = "00 00 00 00", encoding = ENC_BIG_ENDIAN, output = 0.0},
+ {input = "80 00 00 00", encoding = ENC_BIG_ENDIAN, output = -0.0},
+ {input = "7f c0 00 00", encoding = ENC_BIG_ENDIAN, output = 0/0},
+ {input = "7f 80 00 00", encoding = ENC_BIG_ENDIAN, output = 1/0},
+ {input = "ff 80 00 00", encoding = ENC_BIG_ENDIAN, output = -1/0},
+ }
+
+ local le_float = {
+ {input = "00 00 00 3c", encoding = ENC_LITTLE_ENDIAN, output = 0.0078125},
+ {input = "00 00 a0 bd", encoding = ENC_LITTLE_ENDIAN, output = -0.078125},
+ {input = "00 00 48 3f", encoding = ENC_LITTLE_ENDIAN, output = 0.78125},
+ {input = "00 00 fa c0", encoding = ENC_LITTLE_ENDIAN, output = -7.8125},
+ {input = "00 40 9c 42", encoding = ENC_LITTLE_ENDIAN, output = 78.125},
+ {input = "00 50 43 c4", encoding = ENC_LITTLE_ENDIAN, output = -781.25},
+ {input = "00 24 f4 45", encoding = ENC_LITTLE_ENDIAN, output = 7812.5},
+ {input = "80 96 98 c7", encoding = ENC_LITTLE_ENDIAN, output = -78125.0},
+ {input = "20 bc 3e 49", encoding = ENC_LITTLE_ENDIAN, output = 781250.0},
+ {input = "28 6b ee ca", encoding = ENC_LITTLE_ENDIAN, output = -7812500.0},
+ {input = "00 00 00 00", encoding = ENC_LITTLE_ENDIAN, output = 0.0},
+ {input = "00 00 00 80", encoding = ENC_LITTLE_ENDIAN, output = -0.0},
+ {input = "00 00 c0 7f", encoding = ENC_LITTLE_ENDIAN, output = 0/0},
+ {input = "00 00 80 7f", encoding = ENC_LITTLE_ENDIAN, output = 1/0},
+ {input = "00 00 80 ff", encoding = ENC_LITTLE_ENDIAN, output = -1/0},
+ }
+
+ local be_double = {
+ {input = "3f 80 00 00 00 00 00 00", encoding = ENC_BIG_ENDIAN, output = 0.0078125},
+ {input = "bf e9 00 00 00 00 00 00", encoding = ENC_BIG_ENDIAN, output = -0.78125},
+ {input = "40 88 6a 00 00 00 00 00", encoding = ENC_BIG_ENDIAN, output = 781.25},
+ {input = "c0 f3 12 d0 00 00 00 00", encoding = ENC_BIG_ENDIAN, output = -78125.0},
+ {input = "41 92 a0 5f 20 00 00 00", encoding = ENC_BIG_ENDIAN, output = 78125000.0},
+ {input = "c1 fd 1a 94 a2 00 00 00", encoding = ENC_BIG_ENDIAN, output = -7812500000.0},
+ {input = "42 9c 6b f5 26 34 00 00", encoding = ENC_BIG_ENDIAN, output = 7812500000000.0},
+ {input = "c3 06 34 57 85 d8 a0 00", encoding = ENC_BIG_ENDIAN, output = -781250000000000.0},
+ {input = "43 a5 af 1d 78 b5 8c 40", encoding = ENC_BIG_ENDIAN, output = 7.8125e+17},
+ {input = "c4 10 f0 cf 06 4d d5 92", encoding = ENC_BIG_ENDIAN, output = -7.8125e+19},
+ {input = "00 00 00 00 00 00 00 00", encoding = ENC_BIG_ENDIAN, output = 0.0},
+ {input = "80 00 00 00 00 00 00 00", encoding = ENC_BIG_ENDIAN, output = -0.0},
+ {input = "7f f8 00 00 00 00 00 00", encoding = ENC_BIG_ENDIAN, output = 0/0},
+ {input = "7f f0 00 00 00 00 00 00", encoding = ENC_BIG_ENDIAN, output = 1/0},
+ {input = "ff f0 00 00 00 00 00 00", encoding = ENC_BIG_ENDIAN, output = -1/0},
+ }
+
+ local le_double = {
+ {input = "00 00 00 00 00 00 80 3f", encoding = ENC_LITTLE_ENDIAN, output = 0.0078125},
+ {input = "00 00 00 00 00 00 e9 bf", encoding = ENC_LITTLE_ENDIAN, output = -0.78125},
+ {input = "00 00 00 00 00 6a 88 40", encoding = ENC_LITTLE_ENDIAN, output = 781.25},
+ {input = "00 00 00 00 d0 12 f3 c0", encoding = ENC_LITTLE_ENDIAN, output = -78125.0},
+ {input = "00 00 00 20 5f a0 92 41", encoding = ENC_LITTLE_ENDIAN, output = 78125000.0},
+ {input = "00 00 00 a2 94 1a fd c1", encoding = ENC_LITTLE_ENDIAN, output = -7812500000.0},
+ {input = "00 00 34 26 f5 6b 9c 42", encoding = ENC_LITTLE_ENDIAN, output = 7812500000000.0},
+ {input = "00 a0 d8 85 57 34 06 c3", encoding = ENC_LITTLE_ENDIAN, output = -781250000000000.0},
+ {input = "40 8c b5 78 1d af a5 43", encoding = ENC_LITTLE_ENDIAN, output = 7.8125e+17},
+ {input = "92 d5 4d 06 cf f0 10 c4", encoding = ENC_LITTLE_ENDIAN, output = -7.8125e+19},
+ {input = "00 00 00 00 00 00 00 00", encoding = ENC_LITTLE_ENDIAN, output = 0.0},
+ {input = "00 00 00 00 00 00 00 80", encoding = ENC_LITTLE_ENDIAN, output = -0.0},
+ {input = "00 00 00 00 00 00 f8 7f", encoding = ENC_LITTLE_ENDIAN, output = 0/0},
+ {input = "00 00 00 00 00 00 f0 7f", encoding = ENC_LITTLE_ENDIAN, output = 1/0},
+ {input = "00 00 00 00 00 00 f0 ff", encoding = ENC_LITTLE_ENDIAN, output = -1/0},
+ }
+
+ function tvbr_float(tvbr, encoding)
+ return tvbr:float(), "float()"
+ end
+
+ function tvbr_le_float(tvbr, encoding)
+ return tvbr:le_float(), "le_float()"
+ end
+
+ return true
+ and run_test_cases_all_tests(tree, "float", be_float, tvbr_float, float_equality_test)
+ and run_test_cases_all_tests(tree, "double", be_double, tvbr_float, float_equality_test)
+ and run_test_cases_all_tests(tree, "float", le_float, tvbr_le_float, float_equality_test)
+ and run_test_cases_all_tests(tree, "double", le_double, tvbr_le_float, float_equality_test)
+end
+
+function address_tests(tree)
+
+ --INCOMPATIBILITY: value fields always assume big-endian encoding for IPv4 addresses
+ local ipv4_test_cases = {
+ {input = "01 00 00 00", encoding = ENC_LITTLE_ENDIAN, output = Address.ip("0.0.0.1"),
+ incompatible_add_pf_field = true, expect_add_pf_field_value = Address.ip("1.0.0.0"),
+ incompatible_add_field = true, expect_add_field_value = Address.ip("1.0.0.0")
+ },
+ {input = "00 02 00 00", encoding = ENC_LITTLE_ENDIAN, output = Address.ip("0.0.2.0"),
+ incompatible_add_pf_field = true, expect_add_pf_field_value = Address.ip("0.2.0.0"),
+ incompatible_add_field = true, expect_add_field_value = Address.ip("0.2.0.0")
+ },
+ {input = "00 00 03 00", encoding = ENC_LITTLE_ENDIAN, output = Address.ip("0.3.0.0"),
+ incompatible_add_pf_field = true, expect_add_pf_field_value = Address.ip("0.0.3.0"),
+ incompatible_add_field = true, expect_add_field_value = Address.ip("0.0.3.0")
+ },
+ {input = "00 00 00 04", encoding = ENC_LITTLE_ENDIAN, output = Address.ip("4.0.0.0"),
+ incompatible_add_pf_field = true, expect_add_pf_field_value = Address.ip("0.0.0.4"),
+ incompatible_add_field = true, expect_add_field_value = Address.ip("0.0.0.4")
+ },
+ {input = "01 00 00 00", encoding = ENC_BIG_ENDIAN, output = Address.ip("1.0.0.0")},
+ {input = "00 02 00 00", encoding = ENC_BIG_ENDIAN, output = Address.ip("0.2.0.0")},
+ {input = "00 00 03 00", encoding = ENC_BIG_ENDIAN, output = Address.ip("0.0.3.0")},
+ {input = "00 00 00 04", encoding = ENC_BIG_ENDIAN, output = Address.ip("0.0.0.4")},
+ }
+
+ function tvbr_ipv4 (tvbr, encoding)
+ if encoding == ENC_LITTLE_ENDIAN then
+ return tvbr:le_ipv4(), "le_ipv4()"
+ else
+ return tvbr:ipv4(), "ipv4()"
+ end
+ end
+
+ local ipv6_test_cases = {
+ {encoding = ENC_BIG_ENDIAN, input = "0000 0000 0000 0000 0000 0000 0000 00ff",
+ output = Address.ipv6("0000:0000:0000:0000:0000:0000:0000:00ff")},
+ {encoding = ENC_BIG_ENDIAN, input = "0000 0000 0000 0000 0000 0000 0000 ff00",
+ output = Address.ipv6("0000:0000:0000:0000:0000:0000:0000:ff00")},
+ {encoding = ENC_BIG_ENDIAN, input = "0000 0000 0000 0000 0000 0000 00ff 0000",
+ output = Address.ipv6("0000:0000:0000:0000:0000:0000:00ff:0000")},
+ {encoding = ENC_BIG_ENDIAN, input = "0000 0000 0000 0000 0000 0000 ff00 0000",
+ output = Address.ipv6("0000:0000:0000:0000:0000:0000:ff00:0000")},
+ {encoding = ENC_BIG_ENDIAN, input = "0000 0000 0000 0000 0000 00ff 0000 0000",
+ output = Address.ipv6("0000:0000:0000:0000:0000:00ff:0000:0000")},
+ {encoding = ENC_BIG_ENDIAN, input = "0000 0000 0000 0000 0000 ff00 0000 0000",
+ output = Address.ipv6("0000:0000:0000:0000:0000:ff00:0000:0000")},
+ {encoding = ENC_BIG_ENDIAN, input = "0000 0000 0000 0000 00ff 0000 0000 0000",
+ output = Address.ipv6("0000:0000:0000:0000:00ff:0000:0000:0000")},
+ {encoding = ENC_BIG_ENDIAN, input = "0000 0000 0000 0000 ff00 0000 0000 0000",
+ output = Address.ipv6("0000:0000:0000:0000:ff00:0000:0000:0000")},
+ {encoding = ENC_BIG_ENDIAN, input = "0000 0000 0000 00ff 0000 0000 0000 0000",
+ output = Address.ipv6("0000:0000:0000:00ff:0000:0000:0000:0000")},
+ {encoding = ENC_BIG_ENDIAN, input = "0000 0000 0000 ff00 0000 0000 0000 0000",
+ output = Address.ipv6("0000:0000:0000:ff00:0000:0000:0000:0000")},
+ {encoding = ENC_BIG_ENDIAN, input = "0000 0000 00ff 0000 0000 0000 0000 0000",
+ output = Address.ipv6("0000:0000:00ff:0000:0000:0000:0000:0000")},
+ {encoding = ENC_BIG_ENDIAN, input = "0000 0000 ff00 0000 0000 0000 0000 0000",
+ output = Address.ipv6("0000:0000:ff00:0000:0000:0000:0000:0000")},
+ {encoding = ENC_BIG_ENDIAN, input = "0000 00ff 0000 0000 0000 0000 0000 0000",
+ output = Address.ipv6("0000:00ff:0000:0000:0000:0000:0000:0000")},
+ {encoding = ENC_BIG_ENDIAN, input = "0000 ff00 0000 0000 0000 0000 0000 0000",
+ output = Address.ipv6("0000:ff00:0000:0000:0000:0000:0000:0000")},
+ {encoding = ENC_BIG_ENDIAN, input = "00ff 0000 0000 0000 0000 0000 0000 0000",
+ output = Address.ipv6("00ff:0000:0000:0000:0000:0000:0000:0000")},
+ {encoding = ENC_BIG_ENDIAN, input = "ff00 0000 0000 0000 0000 0000 0000 0000",
+ output = Address.ipv6("ff00:0000:0000:0000:0000:0000:0000:0000")},
+ }
+
+ function tvbr_ipv6 (tvbr, encoding)
+ return tvbr:ipv6(), "ipv6()"
+ end
+
+ local ether_test_cases = {
+ {input = "ff 00 00 00 00 00", encoding = 0, output = Address.ether("ff:00:00:00:00:00")},
+ {input = "00 ff 00 00 00 00", encoding = 0, output = Address.ether("00:ff:00:00:00:00")},
+ {input = "00 00 ff 00 00 00", encoding = 0, output = Address.ether("00:00:ff:00:00:00")},
+ {input = "00 00 00 ff 00 00", encoding = 0, output = Address.ether("00:00:00:ff:00:00")},
+ {input = "00 00 00 00 ff 00", encoding = 0, output = Address.ether("00:00:00:00:ff:00")},
+ {input = "00 00 00 00 00 ff", encoding = 0, output = Address.ether("00:00:00:00:00:ff")},
+ }
+
+ function tvbr_ether (tvbr, encoding)
+ return tvbr:ether(), "ether()"
+ end
+
+ return true
+ and run_test_cases_all_tests(tree, "ipv4", ipv4_test_cases, tvbr_ipv4, general_equality_test)
+ and run_test_cases_all_tests(tree, "ipv6", ipv6_test_cases, tvbr_ipv6, general_equality_test)
+ and run_test_cases_all_tests(tree, "ether", ether_test_cases, tvbr_ether, general_equality_test)
+end
+
+function time_tests(tree)
+
+ local time_cases = {
+ {input ="00 01 02 03", encoding = ENC_BIG_ENDIAN, output = NSTime(0x00010203,0)},
+ {input ="03 02 01 00", encoding = ENC_LITTLE_ENDIAN, output = NSTime(0x00010203,0)},
+ {input ="00 01 02 03 04 05 06 07", encoding = ENC_BIG_ENDIAN, output = NSTime(0x00010203, 0x04050607)},
+ {input ="03 02 01 00 07 06 05 04", encoding = ENC_LITTLE_ENDIAN, output = NSTime(0x00010203, 0x04050607)},
+ }
+
+ local string_cases = {
+ {input = "1994-11-05T13:15:30Z", encoding = ENC_ISO_8601_DATE_TIME, output = NSTime(784041330, 0),
+ hexlify=true},
+ {input = "1994-11-05T13:15:30Z12345", encoding = ENC_ISO_8601_DATE_TIME, output = NSTime(784041330, 0),
+ hexlify=true, variable_input_length = true, input_length = 20},
+ }
+
+ function tvbr_nstime(tvbr, encoding)
+ if encoding == ENC_LITTLE_ENDIAN then
+ return tvbr:le_nstime(), "le_nstime()"
+ else
+ return tvbr:nstime(encoding), string.format("nstime(%s)", format_encoding_for_print(encoding))
+ end
+ end
+
+ return true
+ and run_test_cases_all_tests(tree, "relative_time", time_cases, tvbr_nstime, general_equality_test)
+ and run_test_cases_all_tests(tree, "absolute_time", time_cases, tvbr_nstime, general_equality_test)
+ and run_test_cases_all_tests(tree, "absolute_time", string_cases, tvbr_nstime, general_equality_test)
+end
+
+function bytearray_tests(tree)
+
+ local bytes_tests = {
+ {input = "00 01 02 03 ff", encoding = 0, output = ByteArray.new("00 01 02 03 ff")}
+ }
+
+ function tvbr_bytes(tvbr, encoding)
+ return tvbr:bytes(), "bytes()"
+ end
+
+ local varbytes_tests = {
+ {input = "04 00 01 02 ff", encoding = ENC_BIG_ENDIAN,
+ output = ByteArray.new("00 01 02 ff"), fake_input_length = 1},
+ {input = "00 04 00 01 02 ff", encoding = ENC_BIG_ENDIAN,
+ output = ByteArray.new("00 01 02 ff"), fake_input_length = 2},
+ {input = "00 00 00 04 00 01 02 ff", encoding = ENC_BIG_ENDIAN,
+ output = ByteArray.new("00 01 02 ff"), fake_input_length = 4},
+ }
+
+ return true
+ and run_test_cases_all_tests(tree, "bytes", bytes_tests, tvbr_bytes, general_equality_test)
+ and run_test_cases_all_tests(tree, "oid", bytes_tests, tvbr_bytes, general_equality_test)
+ and run_test_cases_all_tests(tree, "rel_oid", bytes_tests, tvbr_bytes, general_equality_test)
+ and run_test_cases_all_tests(tree, "system_id", bytes_tests, tvbr_bytes, general_equality_test)
+ and run_test_cases_all_tests(tree, "uint_bytes", varbytes_tests, nil, general_equality_test)
+end
+
+function run_all_tests(tree)
+ return true
+ and simple_integer_tests(tree)
+ and integer64_tests(tree)
+ and string_tests(tree)
+ and bool_char_tests(tree)
+ and float_tests(tree)
+ and address_tests(tree)
+ and time_tests(tree)
+ and bytearray_tests(tree)
+end
+
+local has_run = false
+function myproto.dissector(tvb, pkt, root)
+ if has_run then
+ return
+ end
+ has_run = true
+ local tree = root:add(myproto, tvb(0))
+ if run_all_tests(tree) then
+ print("All tests passed!")
+ print()
+ end
+end
+
+DissectorTable.get("udp.port"):add(65333, myproto)
diff --git a/test/lua/byte_array.lua b/test/lua/byte_array.lua
new file mode 100644
index 0000000..8e4631b
--- /dev/null
+++ b/test/lua/byte_array.lua
@@ -0,0 +1,215 @@
+-- test script for ByteArray integer functions
+
+local testlib = require("testlib")
+
+local INT = "int"
+local UINT = "uint"
+local INT64 = "int64"
+local UINT64 = "uint64"
+local LE_INT = "le_int"
+local LE_UINT = "le_uint"
+local LE_INT64 = "le_int64"
+local LE_UINT64 = "le_uint64"
+
+-- expected number of runs per type
+local taptests = {
+ [INT]=14,
+ [UINT]=14,
+ [INT64]=15,
+ [UINT64]=15,
+ [LE_INT]=14,
+ [LE_UINT]=14,
+ [LE_INT64]=15,
+ [LE_UINT64]=15
+}
+testlib.init(taptests)
+
+local empty = ByteArray.new("")
+local be_data = ByteArray.new("FF 00 00 00 00 00 00 00")
+local le_data = ByteArray.new("00 00 00 00 00 00 00 FF")
+
+-- the following are so we can use pcall (which needs a function to call)
+
+local function ByteArray_int(array,offset,length)
+ local value = array:int(offset,length)
+end
+
+local function ByteArray_uint(array,offset,length)
+ local value = array:uint(offset,length)
+end
+
+local function ByteArray_int64(array,offset,length)
+ local value = array:int64(offset,length)
+end
+
+local function ByteArray_uint64(array,offset,length)
+ local value = array:uint64(offset,length)
+end
+
+local function ByteArray_le_int(array,offset,length)
+ local value = array:le_int(offset,length)
+end
+
+local function ByteArray_le_uint(array,offset,length)
+ local value = array:le_uint(offset,length)
+end
+
+local function ByteArray_le_int64(array,offset,length)
+ local value = array:le_int64(offset,length)
+end
+
+local function ByteArray_le_uint64(array,offset,length)
+ local value = array:le_uint64(offset,length)
+end
+
+------------- test script ------------
+
+testlib.testing(INT,"negative tests")
+testlib.test(INT,"ByteArray:int-0",not pcall(ByteArray_int, empty))
+testlib.test(INT,"ByteArray:int-1",not pcall(ByteArray_int, be_data))
+testlib.test(INT,"ByteArray:int-2",not pcall(ByteArray_int, be_data, -1))
+testlib.test(INT,"ByteArray:int-3",not pcall(ByteArray_int, be_data, 0))
+testlib.test(INT,"ByteArray:int-4",not pcall(ByteArray_int, be_data, 0, -1))
+testlib.test(INT,"ByteArray:int-5",not pcall(ByteArray_int, be_data, 0, 0))
+testlib.test(INT,"ByteArray:int-6",not pcall(ByteArray_int, be_data, 0, 5))
+testlib.test(INT,"ByteArray:int-7",not pcall(ByteArray_int, be_data, 7, 2))
+testlib.test(INT,"ByteArray:int-8",not pcall(ByteArray_int, be_data, 8, 1))
+
+testlib.testing(INT,"positive tests")
+testlib.test(INT,"ByteArray:int-9", be_data:int(0, 1) == -1)
+testlib.test(INT,"ByteArray:int-10", be_data:int(0, 2) == -256)
+testlib.test(INT,"ByteArray:int-11", be_data:int(0, 3) == -65536)
+testlib.test(INT,"ByteArray:int-12", be_data:int(0, 4) == -16777216)
+testlib.test(INT,"ByteArray:int-13", be_data:subset(2, 2):int() == 0)
+
+testlib.testing(UINT,"negative tests")
+testlib.test(UINT,"ByteArray:uint-0",not pcall(ByteArray_uint, empty))
+testlib.test(UINT,"ByteArray:uint-1",not pcall(ByteArray_uint, be_data))
+testlib.test(UINT,"ByteArray:uint-2",not pcall(ByteArray_uint, be_data, -1))
+testlib.test(UINT,"ByteArray:uint-3",not pcall(ByteArray_uint, be_data, 0))
+testlib.test(UINT,"ByteArray:uint-4",not pcall(ByteArray_uint, be_data, 0, -1))
+testlib.test(UINT,"ByteArray:uint-5",not pcall(ByteArray_uint, be_data, 0, 0))
+testlib.test(UINT,"ByteArray:uint-6",not pcall(ByteArray_uint, be_data, 0, 5))
+testlib.test(UINT,"ByteArray:uint-7",not pcall(ByteArray_uint, be_data, 7, 2))
+testlib.test(UINT,"ByteArray:uint-8",not pcall(ByteArray_uint, be_data, 8, 1))
+
+testlib.testing(UINT,"positive tests")
+testlib.test(UINT,"ByteArray:uint-9", be_data:uint(0, 1) == 255)
+testlib.test(UINT,"ByteArray:uint-10", be_data:uint(0, 2) == 65280)
+testlib.test(UINT,"ByteArray:uint-11", be_data:uint(0, 3) == 16711680)
+testlib.test(UINT,"ByteArray:uint-12", be_data:uint(0, 4) == 4278190080)
+testlib.test(UINT,"ByteArray:uint-13", be_data:subset(2, 2):uint() == 0)
+
+testlib.testing(INT64,"negative tests")
+testlib.test(INT64,"ByteArray:int64-0",not pcall(ByteArray_int64, empty))
+testlib.test(INT64,"ByteArray:int64-1",not pcall(ByteArray_int64, be_data, -1))
+testlib.test(INT64,"ByteArray:int64-2",not pcall(ByteArray_int64, be_data, 0, 0))
+testlib.test(INT64,"ByteArray:int64-3",not pcall(ByteArray_int64, be_data, 0, 9))
+testlib.test(INT64,"ByteArray:int64-4",not pcall(ByteArray_int64, be_data, 7, 2))
+testlib.test(INT64,"ByteArray:int64-5",not pcall(ByteArray_int64, be_data, 8, 1))
+
+testlib.testing(INT64,"positive tests")
+testlib.test(INT64,"ByteArray:int64-6", be_data:int64(0, 1):tonumber() == -1)
+testlib.test(INT64,"ByteArray:int64-7", be_data:int64(0, 2):tonumber() == -256)
+testlib.test(INT64,"ByteArray:int64-8", be_data:int64(0, 3):tonumber() == -65536)
+testlib.test(INT64,"ByteArray:int64-9", be_data:int64(0, 4):tonumber() == -16777216)
+testlib.test(INT64,"ByteArray:int64-10", be_data:int64(0, 5):tonumber() == -4294967296)
+testlib.test(INT64,"ByteArray:int64-11", be_data:int64(0, 6):tonumber() == -1099511627776)
+testlib.test(INT64,"ByteArray:int64-12", be_data:int64(0, 7):tonumber() == -281474976710656)
+testlib.test(INT64,"ByteArray:int64-13", be_data:int64():tonumber() == -72057594037927936)
+testlib.test(INT64,"ByteArray:int64-14", be_data:subset(2, 2):int64():tonumber() == 0)
+
+testlib.testing(UINT64,"negative tests")
+testlib.test(UINT64,"ByteArray:uint64-0",not pcall(ByteArray_uint64, empty))
+testlib.test(UINT64,"ByteArray:uint64-1",not pcall(ByteArray_uint64, be_data, -1))
+testlib.test(UINT64,"ByteArray:uint64-2",not pcall(ByteArray_uint64, be_data, 0, 0))
+testlib.test(UINT64,"ByteArray:uint64-3",not pcall(ByteArray_uint64, be_data, 0, 9))
+testlib.test(UINT64,"ByteArray:uint64-4",not pcall(ByteArray_uint64, be_data, 7, 2))
+testlib.test(UINT64,"ByteArray:uint64-5",not pcall(ByteArray_uint64, be_data, 8, 1))
+
+testlib.testing(UINT64,"positive tests")
+testlib.test(UINT64,"ByteArray:uint64-6", be_data:uint64(0, 1):tonumber() == 255)
+testlib.test(UINT64,"ByteArray:uint64-7", be_data:uint64(0, 2):tonumber() == 65280)
+testlib.test(UINT64,"ByteArray:uint64-8", be_data:uint64(0, 3):tonumber() == 16711680)
+testlib.test(UINT64,"ByteArray:uint64-9", be_data:uint64(0, 4):tonumber() == 4278190080)
+testlib.test(UINT64,"ByteArray:uint64-10", be_data:uint64(0, 5):tonumber() == 1095216660480)
+testlib.test(UINT64,"ByteArray:uint64-11", be_data:uint64(0, 6):tonumber() == 280375465082880)
+testlib.test(UINT64,"ByteArray:uint64-12", be_data:uint64(0, 7):tonumber() == 71776119061217280)
+testlib.test(UINT64,"ByteArray:uint64-13", be_data:uint64():tonumber() == 18374686479671623680)
+testlib.test(UINT64,"ByteArray:uint64-14", be_data:subset(2, 2):uint64():tonumber() == 0)
+
+testlib.testing(LE_INT,"negative tests")
+testlib.test(LE_INT,"ByteArray:le_int-0",not pcall(ByteArray_le_int, empty))
+testlib.test(LE_INT,"ByteArray:le_int-1",not pcall(ByteArray_le_int, le_data))
+testlib.test(LE_INT,"ByteArray:le_int-2",not pcall(ByteArray_le_int, le_data, -1))
+testlib.test(LE_INT,"ByteArray:le_int-3",not pcall(ByteArray_le_int, le_data, 0))
+testlib.test(LE_INT,"ByteArray:le_int-4",not pcall(ByteArray_le_int, le_data, 0, -1))
+testlib.test(LE_INT,"ByteArray:le_int-5",not pcall(ByteArray_le_int, le_data, 0, 0))
+testlib.test(LE_INT,"ByteArray:le_int-6",not pcall(ByteArray_le_int, le_data, 0, 5))
+testlib.test(LE_INT,"ByteArray:le_int-7",not pcall(ByteArray_le_int, le_data, 7, 2))
+testlib.test(LE_INT,"ByteArray:le_int-8",not pcall(ByteArray_le_int, le_data, 8, 1))
+
+testlib.testing(LE_INT,"positive tests")
+testlib.test(LE_INT,"ByteArray:le_int-9", le_data:le_int(7) == -1)
+testlib.test(LE_INT,"ByteArray:le_int-10", le_data:le_int(6, 2) == -256)
+testlib.test(LE_INT,"ByteArray:le_int-11", le_data:le_int(5, 3) == -65536)
+testlib.test(LE_INT,"ByteArray:le_int-12", le_data:le_int(4, 4) == -16777216)
+testlib.test(LE_INT,"ByteArray:le_int-13", be_data:subset(2, 2):le_int() == 0)
+
+testlib.testing(LE_UINT,"negative tests")
+testlib.test(LE_UINT,"ByteArray:le_uint-0",not pcall(ByteArray_le_uint, empty))
+testlib.test(LE_UINT,"ByteArray:le_uint-1",not pcall(ByteArray_le_uint, le_data))
+testlib.test(LE_UINT,"ByteArray:le_uint-2",not pcall(ByteArray_le_uint, le_data, -1))
+testlib.test(LE_UINT,"ByteArray:le_uint-3",not pcall(ByteArray_le_uint, le_data, 0))
+testlib.test(LE_UINT,"ByteArray:le_uint-4",not pcall(ByteArray_le_uint, le_data, 0, -1))
+testlib.test(LE_UINT,"ByteArray:le_uint-5",not pcall(ByteArray_le_uint, le_data, 0, 0))
+testlib.test(LE_UINT,"ByteArray:le_uint-6",not pcall(ByteArray_le_uint, le_data, 0, 5))
+testlib.test(LE_UINT,"ByteArray:le_uint-7",not pcall(ByteArray_le_uint, le_data, 7, 2))
+testlib.test(LE_UINT,"ByteArray:le_uint-8",not pcall(ByteArray_le_uint, le_data, 8, 1))
+
+testlib.testing(LE_UINT,"positive tests")
+testlib.test(LE_UINT,"ByteArray:le_uint-9", le_data:le_uint(7) == 255)
+testlib.test(LE_UINT,"ByteArray:le_uint-10", le_data:le_uint(6, 2) == 65280)
+testlib.test(LE_UINT,"ByteArray:le_uint-11", le_data:le_uint(5, 3) == 16711680)
+testlib.test(LE_UINT,"ByteArray:le_uint-12", le_data:le_uint(4, 4) == 4278190080)
+testlib.test(LE_UINT,"ByteArray:le_uint-13", be_data:subset(2, 2):le_uint() == 0)
+
+testlib.testing(LE_INT64,"negative tests")
+testlib.test(LE_INT64,"ByteArray:le_int64-0",not pcall(ByteArray_le_int64, empty))
+testlib.test(LE_INT64,"ByteArray:le_int64-1",not pcall(ByteArray_le_int64, le_data, -1))
+testlib.test(LE_INT64,"ByteArray:le_int64-2",not pcall(ByteArray_le_int64, le_data, 0, 0))
+testlib.test(LE_INT64,"ByteArray:le_int64-3",not pcall(ByteArray_le_int64, le_data, 0, 9))
+testlib.test(LE_INT64,"ByteArray:le_int64-4",not pcall(ByteArray_le_int64, le_data, 7, 2))
+testlib.test(LE_INT64,"ByteArray:le_int64-5",not pcall(ByteArray_le_int64, le_data, 8, 1))
+
+testlib.testing(LE_INT64,"positive tests")
+testlib.test(LE_INT64,"ByteArray:le_int64-6", le_data:le_int64(7):tonumber() == -1)
+testlib.test(LE_INT64,"ByteArray:le_int64-7", le_data:le_int64(6, 2):tonumber() == -256)
+testlib.test(LE_INT64,"ByteArray:le_int64-8", le_data:le_int64(5, 3):tonumber() == -65536)
+testlib.test(LE_INT64,"ByteArray:le_int64-9", le_data:le_int64(4, 4):tonumber() == -16777216)
+testlib.test(LE_INT64,"ByteArray:le_int64-10", le_data:le_int64(3, 5):tonumber() == -4294967296)
+testlib.test(LE_INT64,"ByteArray:le_int64-11", le_data:le_int64(2, 6):tonumber() == -1099511627776)
+testlib.test(LE_INT64,"ByteArray:le_int64-12", le_data:le_int64(1, 7):tonumber() == -281474976710656)
+testlib.test(LE_INT64,"ByteArray:le_int64-13", le_data:le_int64():tonumber() == -72057594037927936)
+testlib.test(LE_INT64,"ByteArray:le_int64-14", le_data:subset(0, 2):le_int64():tonumber() == 0)
+
+testlib.testing(LE_UINT64,"negative tests")
+testlib.test(LE_UINT64,"ByteArray:le_uint64-0",not pcall(ByteArray_le_uint64, empty))
+testlib.test(LE_UINT64,"ByteArray:le_uint64-1",not pcall(ByteArray_le_uint64, le_data, -1))
+testlib.test(LE_UINT64,"ByteArray:le_uint64-2",not pcall(ByteArray_le_uint64, le_data, 0, 0))
+testlib.test(LE_UINT64,"ByteArray:le_uint64-3",not pcall(ByteArray_le_uint64, le_data, 0, 9))
+testlib.test(LE_UINT64,"ByteArray:le_uint64-4",not pcall(ByteArray_le_uint64, le_data, 7, 2))
+testlib.test(LE_UINT64,"ByteArray:le_uint64-5",not pcall(ByteArray_le_uint64, le_data, 8, 1))
+
+testlib.testing(LE_UINT64,"positive tests")
+testlib.test(LE_UINT64,"ByteArray:le_uint64-6", le_data:le_uint64(7):tonumber() == 255)
+testlib.test(LE_UINT64,"ByteArray:le_uint64-7", le_data:le_uint64(6, 2):tonumber() == 65280)
+testlib.test(LE_UINT64,"ByteArray:le_uint64-8", le_data:le_uint64(5, 3):tonumber() == 16711680)
+testlib.test(LE_UINT64,"ByteArray:le_uint64-9", le_data:le_uint64(4, 4):tonumber() == 4278190080)
+testlib.test(LE_UINT64,"ByteArray:le_uint64-10", le_data:le_uint64(3, 5):tonumber() == 1095216660480)
+testlib.test(LE_UINT64,"ByteArray:le_uint64-11", le_data:le_uint64(2, 6):tonumber() == 280375465082880)
+testlib.test(LE_UINT64,"ByteArray:le_uint64-12", le_data:le_uint64(1, 7):tonumber() == 71776119061217280)
+testlib.test(LE_UINT64,"ByteArray:le_uint64-13", le_data:le_uint64():tonumber() == 18374686479671623680)
+testlib.test(LE_UINT64,"ByteArray:le_uint64-14", le_data:subset(0, 2):le_uint64():tonumber() == 0)
+
+testlib.getResults() \ No newline at end of file
diff --git a/test/lua/dir.lua b/test/lua/dir.lua
new file mode 100644
index 0000000..c9ff8ea
--- /dev/null
+++ b/test/lua/dir.lua
@@ -0,0 +1,195 @@
+-- test script for wslua Dir functions
+
+local testlib = require("testlib")
+local OTHER = "other"
+testlib.init( { [OTHER] = 0 } )
+
+------------- helper funcs ------------
+
+-- the following are so we can use pcall (which needs a function to call)
+local function callDirFuncBase(name, t)
+ t.result = Dir[name]()
+ return true
+end
+
+local function callDirFunc(name, val, t)
+ t.result = Dir[name](val)
+ return true
+end
+
+local function makeFile(filename)
+ local f = io.open(filename, "w")
+ if not f then
+ error ("failed to make file"..filename.." in directory\n"..
+ "make sure to delete 'temp' directory before running again")
+ end
+ f:write("fooobarrloo")
+ f:close()
+ return true
+end
+
+--------------------------
+
+-- for our called function results
+local t = {}
+
+testlib.testing("Dir basics")
+
+testlib.test(OTHER,"global", _G.Dir ~= nil)
+testlib.test(OTHER,"global", type(Dir.make) == 'function')
+testlib.test(OTHER,"global", type(Dir.remove) == 'function')
+testlib.test(OTHER,"global", type(Dir.remove_all) == 'function')
+testlib.test(OTHER,"global", type(Dir.open) == 'function')
+testlib.test(OTHER,"global", type(Dir.close) == 'function')
+testlib.test(OTHER,"global", type(Dir.exists) == 'function')
+testlib.test(OTHER,"global", type(Dir.personal_config_path) == 'function')
+testlib.test(OTHER,"global", type(Dir.global_config_path) == 'function')
+testlib.test(OTHER,"global", type(Dir.personal_plugins_path) == 'function')
+testlib.test(OTHER,"global", type(Dir.global_plugins_path) == 'function')
+
+testlib.testing("Dir paths/filenames")
+
+testlib.test(OTHER,"Dir.__FILE__", __FILE__ ~= nil)
+testlib.test(OTHER,"Dir.__DIR__", __DIR__ ~= nil)
+testlib.test(OTHER,"Dir.exists", pcall(callDirFunc, "exists", "temp", t))
+testlib.test(OTHER,"Dir.personal_config_path", pcall(callDirFuncBase, "personal_config_path", t))
+testlib.test(OTHER,"Dir.global_config_path", pcall(callDirFuncBase, "global_config_path", t))
+testlib.test(OTHER,"Dir.personal_plugins_path", pcall(callDirFuncBase, "personal_plugins_path", t))
+testlib.test(OTHER,"Dir.global_plugins_path", pcall(callDirFuncBase, "global_plugins_path", t))
+
+-- Users expect trailing slashes for DATA_DIR and USER_DIR (bug 14619).
+local dirsep = package.config:sub(1,1)
+testlib.test(OTHER,"DATA_DIR", string.sub(DATA_DIR, -1) == dirsep)
+testlib.test(OTHER,"USER_DIR", string.sub(USER_DIR, -1) == dirsep)
+
+print("\nFor your information, I got the following info:\n")
+print("__FILE__ = '" .. __FILE__ .. "'")
+print("__DIR__ = '" .. __DIR__ .. "'")
+print("personal_config_path = '" .. Dir.personal_config_path() .. "'")
+print("global_config_path = '" .. Dir.global_config_path() .. "'")
+print("personal_plugins_path = '" .. Dir.personal_plugins_path() .. "'")
+print("global_plugins_path = '" .. Dir.global_plugins_path() .. "'")
+print("\n")
+
+testlib.testing("Directory manipulation")
+
+testlib.test(OTHER,"Dir.exists", pcall(callDirFunc, "exists", "temp", t))
+
+if t.result == true or t.result == false then
+ error("this testsuite requires there be no 'temp' directory or file; please remove it")
+end
+
+testlib.testing("Dir.make")
+
+testlib.test(OTHER,"Dir.make", pcall(callDirFunc, "make", "temp", t) and t.result == true)
+testlib.test(OTHER,"Dir.exists", pcall(callDirFunc, "exists", "temp", t) and t.result == true)
+-- make the same dir, should give false
+testlib.test(OTHER,"Dir.make", pcall(callDirFunc, "make", "temp", t) and t.result == false)
+
+testlib.testing("Dir.remove")
+
+testlib.test(OTHER,"Dir.remove", pcall(callDirFunc, "remove", "temp", t) and t.result == true)
+testlib.test(OTHER,"Dir.exists", pcall(callDirFunc, "exists", "temp", t) and t.result == nil)
+testlib.test(OTHER,"Dir.remove", pcall(callDirFunc, "remove", "temp", t) and t.result == false)
+
+Dir.make("temp")
+makeFile("temp/file.txt")
+
+-- will return nil because temp has a file
+testlib.test(OTHER,"Dir.remove", pcall(callDirFunc, "remove", "temp", t) and t.result == nil)
+
+testlib.testing("Dir.remove_all")
+
+testlib.test(OTHER,"Dir.remove_all", pcall(callDirFunc, "remove_all", "temp", t) and t.result == true)
+testlib.test(OTHER,"Dir.remove_all", pcall(callDirFunc, "remove_all", "temp", t) and t.result == false)
+
+Dir.make("temp")
+makeFile("temp/file1.txt")
+makeFile("temp/file2.txt")
+makeFile("temp/file3.txt")
+testlib.test(OTHER,"Dir.remove_all", pcall(callDirFunc, "remove_all", "temp", t) and t.result == true)
+testlib.test(OTHER,"Dir.remove_all", pcall(callDirFunc, "remove_all", "temp", t) and t.result == false)
+
+testlib.testing("Dir.open")
+
+Dir.make("temp")
+makeFile("temp/file1.txt")
+makeFile("temp/file2.txt")
+makeFile("temp/file3.txt")
+testlib.test(OTHER,"Dir.open", pcall(callDirFunc, "open", "temp", t))
+testlib.test(OTHER,"Dir.open", type(t.result) == 'userdata')
+testlib.test(OTHER,"Dir.open", typeof(t.result) == 'Dir')
+
+io.stdout:write("calling Dir object...")
+local dir = t.result
+local files = {}
+files[dir()] = true
+io.stdout:write("passed\n")
+files[dir()] = true
+files[dir()] = true
+
+testlib.test(OTHER,"Dir.call", files["file1.txt"])
+testlib.test(OTHER,"Dir.call", files["file2.txt"])
+testlib.test(OTHER,"Dir.call", files["file3.txt"])
+testlib.test(OTHER,"Dir.call", dir() == nil)
+testlib.test(OTHER,"Dir.call", dir() == nil)
+
+testlib.testing("Dir.close")
+
+testlib.test(OTHER,"Dir.close", pcall(callDirFunc, "close", dir, t))
+testlib.test(OTHER,"Dir.close", pcall(callDirFunc, "close", dir, t))
+
+testlib.testing("Negative testing 1")
+-- now try breaking it
+testlib.test(OTHER,"Dir.open", pcall(callDirFunc, "open", "temp", t))
+dir = t.result
+-- call dir() now
+files = {}
+files[dir()] = true
+
+Dir.remove_all("temp")
+
+-- call it again
+files[dir()] = true
+files[dir()] = true
+testlib.test(OTHER,"Dir.call", files["file1.txt"])
+testlib.test(OTHER,"Dir.call", files["file2.txt"])
+testlib.test(OTHER,"Dir.call", files["file3.txt"])
+testlib.test(OTHER,"Dir.close", pcall(callDirFunc, "close", dir, t))
+
+testlib.testing("Negative testing 2")
+-- do it again, but this time don't do dir() until after removing the files
+Dir.make("temp")
+makeFile("temp/file1.txt")
+makeFile("temp/file2.txt")
+makeFile("temp/file3.txt")
+
+testlib.test(OTHER,"Dir.open", pcall(callDirFunc, "open", "temp", t))
+dir = t.result
+
+Dir.remove_all("temp")
+-- now do it
+file = dir()
+testlib.test(OTHER,"Dir.call", file == nil)
+testlib.test(OTHER,"Dir.close", pcall(callDirFunc, "close", dir, t))
+
+
+-- negative tests
+testlib.testing("Negative testing 3")
+
+-- invalid args
+testlib.test(OTHER,"Dir.make", not pcall(callDirFunc, "make", {}, t))
+testlib.test(OTHER,"Dir.make", not pcall(callDirFunc, "make", nil, t))
+testlib.test(OTHER,"Dir.remove", not pcall(callDirFunc, "remove", {}, t))
+testlib.test(OTHER,"Dir.remove", not pcall(callDirFunc, "remove", nil, t))
+testlib.test(OTHER,"Dir.remove_all", not pcall(callDirFunc, "remove_all", {}, t))
+testlib.test(OTHER,"Dir.remove_all", not pcall(callDirFunc, "remove_all", nil, t))
+testlib.test(OTHER,"Dir.open", not pcall(callDirFunc, "open", {}, t))
+testlib.test(OTHER,"Dir.open", not pcall(callDirFunc, "open", nil, t))
+testlib.test(OTHER,"Dir.close", not pcall(callDirFunc, "close", "dir", t))
+testlib.test(OTHER,"Dir.close", not pcall(callDirFunc, "close", nil, t))
+
+
+print("\n-----------------------------\n")
+
+testlib.getResults()
diff --git a/test/lua/dissectFPM.lua b/test/lua/dissectFPM.lua
new file mode 100644
index 0000000..da52d74
--- /dev/null
+++ b/test/lua/dissectFPM.lua
@@ -0,0 +1,452 @@
+----------------------------------------
+--
+-- author: Hadriel Kaplan <hadriel@128technology.com>
+-- Copyright (c) 2015, Hadriel Kaplan
+-- This code is in the Public Domain, or the BSD (3 clause) license
+-- if Public Domain does not apply in your country.
+--
+-- Version: 1.0
+--
+------------------------------------------
+--[[
+ This code is a plugin for Wireshark, to dissect Quagga FPM Netlink
+ protocol messages over TCP.
+
+ This script is used for testing, so it does some odd things:
+ * it dissects the FPM in two ways, controlled by a pref setting:
+ 1) using the desegment_offset/desegment_len method
+ 2) using the dissect_tcp_pdus() method
+ * it removes any existing FPM dissector; there isn't one right now
+ but there likely will be in the future.
+
+ Wireshark has a "Netlink" protocol dissector, but it currently expects
+ to be running on a Linux cooked-mode SLL header and link type. That's
+ because Netlink has traditionally been used between the Linux kernel
+ and user-space apps. But the open-source Quagga, zebra, and the
+ commercial ZebOS routing products also send Netlink messages over TCP
+ to other processes or even outside the box, to a "Forwarding Plane Manager"
+ (FPM) that controls forwarding-plane devices (typically hardware).
+
+ The Netlink message is encapsulated within an FPM header, which identifies
+ an FPM message version (currently 1), the type of message it contains
+ (namely a Netlink message), and its length.
+
+ So we have:
+ struct fpm_msg_hdr_t
+ {
+ uint8_t version;
+ uint8_t msg_type;
+ uint16_t msg_len;
+ }
+ followed by a Netlink message.
+]]----------------------------------------
+
+
+----------------------------------------
+-- do not modify this table
+local debug_level = {
+ DISABLED = 0,
+ LEVEL_1 = 1,
+ LEVEL_2 = 2
+}
+
+-- set this DEBUG to debug_level.LEVEL_1 to enable printing debug_level info
+-- set it to debug_level.LEVEL_2 to enable really verbose printing
+-- note: this will be overridden by user's preference settings
+local DEBUG = debug_level.LEVEL_1
+
+local default_settings =
+{
+ debug_level = DEBUG,
+ enabled = true, -- whether this dissector is enabled or not
+ port = 2620,
+ max_msg_len = 4096,
+ desegment = true, -- whether to TCP desegement or not
+ dissect_tcp = false, -- whether to use the dissect_tcp_pdus method or not
+ subdissect = true, -- whether to call sub-dissector or not
+ subdiss_type = wtap.NETLINK, -- the encap we get the subdissector for
+}
+
+local dprint = function() end
+local dprint2 = function() end
+local function reset_debug_level()
+ if default_settings.debug_level > debug_level.DISABLED then
+ dprint = function(...)
+ print(table.concat({"Lua:", ...}," "))
+ end
+
+ if default_settings.debug_level > debug_level.LEVEL_1 then
+ dprint2 = dprint
+ end
+ end
+end
+-- call it now
+reset_debug_level()
+
+
+----------------------------------------
+-- creates a Proto object, but doesn't register it yet
+local fpmProto = Proto("fpm", "FPM Header")
+
+
+----------------------------------------
+-- a function to convert tables of enumerated types to valstring tables
+-- i.e., from { "name" = number } to { number = "name" }
+local function makeValString(enumTable)
+ local t = {}
+ for name,num in pairs(enumTable) do
+ t[num] = name
+ end
+ return t
+end
+
+local MsgType = {
+ NONE = 0,
+ NETLINK = 1,
+}
+local msgtype_valstr = makeValString(MsgType)
+
+
+----------------------------------------
+-- a table of all of our Protocol's fields
+local hdr_fields =
+{
+ version = ProtoField.uint8 ("fpm.version", "Version", base.DEC),
+ msg_type = ProtoField.uint8 ("fpm.type", "Type", base.DEC, msgtype_valstr),
+ msg_len = ProtoField.uint16("fpm.length", "Length", base.DEC),
+}
+
+-- create a flat array table of the above that can be registered
+local pfields = {}
+
+-- recursive function to flatten the table into pfields
+local function flattenTable(tbl)
+ for k,v in pairs(tbl) do
+ if type(v) == 'table' then
+ flattenTable(v)
+ else
+ pfields[#pfields+1] = v
+ end
+ end
+end
+-- call it
+flattenTable(hdr_fields)
+
+-- register them
+fpmProto.fields = pfields
+
+dprint2("fpmProto ProtoFields registered")
+
+
+----------------------------------------
+-- some forward "declarations" of helper functions we use in the dissector
+local createSLL
+
+-- due to a bug in wireshark, we need to keep newly created tvb's for longer
+-- than the duration of the dissect function
+local tvbs = {}
+
+function fpmProto.init()
+ tvbs = {}
+end
+
+
+local FPM_MSG_HDR_LEN = 4
+
+----------------------------------------
+-- the following function is used for the new dissect_tcp_pdus method
+-- this one returns the length of the full message
+local function get_fpm_length(tvbuf, pktinfo, offset)
+ dprint2("FPM get_fpm_length function called")
+ local lengthVal = tvbuf:range(offset + 2, 2):uint()
+
+ if lengthVal > default_settings.max_msg_len then
+ -- too many bytes, invalid message
+ dprint("FPM message length is too long: ", lengthVal)
+ lengthVal = tvbuf:len()
+ end
+
+ return lengthVal
+end
+
+-- the following is the dissection function called for
+-- the new dissect_tcp_pdus method
+local function dissect_fpm_pdu(tvbuf, pktinfo, root)
+ dprint2("FPM dissect_fpm_pdu function called")
+
+ local lengthTvbr = tvbuf:range(2, 2)
+ local lengthVal = lengthTvbr:uint()
+
+ -- set the protocol column to show our protocol name
+ pktinfo.cols.protocol:set("FPM")
+
+ -- We start by adding our protocol to the dissection display tree.
+ local tree = root:add(fpmProto, tvbuf:range(offset, lengthVal))
+
+ local versionTvbr = tvbuf:range(0, 1)
+ local versionVal = versionTvbr:uint()
+ tree:add(hdr_fields.version, versionTvbr)
+
+ local msgTypeTvbr = tvbuf:range(1, 1)
+ local msgTypeVal = msgTypeTvbr:uint()
+ tree:add(hdr_fields.msg_type, msgTypeTvbr)
+
+ tree:add(hdr_fields.msg_len, lengthTvbr)
+
+ local result
+ if (versionVal == 1) and (msgTypeVal == MsgType.NETLINK) then
+ -- it carries a Netlink message, so we're going to create
+ -- a fake Linux SLL header for the built-in Netlink dissector
+ local payload = tvbuf:raw(FPM_MSG_HDR_LEN, lengthVal - FPM_MSG_HDR_LEN)
+ result = createSLL(payload)
+ end
+
+ -- looks good, go dissect it
+ if result then
+ -- ok now the hard part - try calling a sub-dissector?
+ -- only if settings/prefs told us to of course...
+ if default_settings.subdissect then
+ dprint2("FPM trying sub-dissector for wtap encap type:", default_settings.subdiss_type)
+
+ -- due to a bug in wireshark, we need to keep newly created tvb's for longer
+ -- than the duration of the dissect function
+ tvbs[#tvbs+1] = ByteArray.new(result, true):tvb("Netlink Message")
+ DissectorTable.get("wtap_encap"):try(default_settings.subdiss_type, tvbs[#tvbs], pktinfo, root)
+
+ -- local tvb = ByteArray.new(result, true):tvb("Netlink Message")
+ -- DissectorTable.get("wtap_encap"):try(default_settings.subdiss_type, tvb, pktinfo, root)
+ dprint2("FPM returning from sub-dissector")
+ end
+ else
+ dprint("FPM header not correctly dissected")
+ end
+
+ return lengthVal, 0
+end
+
+
+----------------------------------------
+-- the following function is used for dissecting using the
+-- old desegment_offset/desegment_len method
+-- it's a separate function because we run over TCP and thus might
+-- need to parse multiple messages in a single segment
+local function dissect(tvbuf, pktinfo, root, offset, origlen)
+ dprint2("FPM dissect function called")
+
+ local pktlen = origlen - offset
+
+ if pktlen < FPM_MSG_HDR_LEN then
+ -- we need more bytes
+ pktinfo.desegment_offset = offset
+ pktinfo.desegment_len = DESEGMENT_ONE_MORE_SEGMENT
+ return 0, DESEGMENT_ONE_MORE_SEGMENT
+ end
+
+ local lengthTvbr = tvbuf:range(offset + 2, 2)
+ local lengthVal = lengthTvbr:uint()
+
+ if lengthVal > default_settings.max_msg_len then
+ -- too many bytes, invalid message
+ dprint("FPM message length is too long: ", lengthVal)
+ return pktlen, 0
+ end
+
+ if pktlen < lengthVal then
+ dprint2("Need more bytes to desegment FPM")
+ pktinfo.desegment_offset = offset
+ pktinfo.desegment_len = (lengthVal - pktlen)
+ return 0, -(lengthVal - pktlen)
+ end
+
+ -- set the protocol column to show our protocol name
+ pktinfo.cols.protocol:set("FPM")
+
+ -- We start by adding our protocol to the dissection display tree.
+ local tree = root:add(fpmProto, tvbuf:range(offset, lengthVal))
+
+ local versionTvbr = tvbuf:range(offset, 1)
+ local versionVal = versionTvbr:uint()
+ tree:add(hdr_fields.version, versionTvbr)
+
+ local msgTypeTvbr = tvbuf:range(offset + 1, 1)
+ local msgTypeVal = msgTypeTvbr:uint()
+ tree:add(hdr_fields.msg_type, msgTypeTvbr)
+
+ tree:add(hdr_fields.msg_len, lengthTvbr)
+
+ local result
+ if (versionVal == 1) and (msgTypeVal == MsgType.NETLINK) then
+ -- it carries a Netlink message, so we're going to create
+ -- a fake Linux SLL header for the built-in Netlink dissector
+ local payload = tvbuf:raw(offset + FPM_MSG_HDR_LEN, lengthVal - FPM_MSG_HDR_LEN)
+ result = createSLL(payload)
+ end
+
+ -- looks good, go dissect it
+ if result then
+ -- ok now the hard part - try calling a sub-dissector?
+ -- only if settings/prefs told us to of course...
+ if default_settings.subdissect then
+ dprint2("FPM trying sub-dissector for wtap encap type:", default_settings.subdiss_type)
+
+ -- due to a bug in wireshark, we need to keep newly created tvb's for longer
+ -- than the duration of the dissect function
+ tvbs[#tvbs+1] = ByteArray.new(result, true):tvb("Netlink Message")
+ DissectorTable.get("wtap_encap"):try(default_settings.subdiss_type, tvbs[#tvbs], pktinfo, root)
+
+ -- local tvb = ByteArray.new(result, true):tvb("Netlink Message")
+ -- DissectorTable.get("wtap_encap"):try(default_settings.subdiss_type, tvb, pktinfo, root)
+ dprint2("FPM returning from sub-dissector")
+ end
+ else
+ dprint("FPM header not correctly dissected")
+ end
+
+ return lengthVal, 0
+end
+
+
+----------------------------------------
+-- The following creates the callback function for the dissector.
+-- It's the same as doing "appProto.dissector = function (tvbuf,pkt,root)"
+-- The 'tvbuf' is a Tvb object, 'pktinfo' is a Pinfo object, and 'root' is a TreeItem object.
+-- Whenever Wireshark dissects a packet that our Proto is hooked into, it will call
+-- this function and pass it these arguments for the packet it's dissecting.
+function fpmProto.dissector(tvbuf, pktinfo, root)
+ dprint2("fpmProto.dissector called")
+
+ local bytes_consumed = 0
+
+ if default_settings.dissect_tcp then
+ dprint2("using new dissect_tcp_pdus method")
+ dissect_tcp_pdus(tvbuf, root, FPM_MSG_HDR_LEN, get_fpm_length, dissect_fpm_pdu, default_settings.desegment)
+ bytes_consumed = tvbuf:len()
+ else
+ dprint2("using old desegment_offset/desegment_len method")
+ -- get the length of the packet buffer (Tvb).
+ local pktlen = tvbuf:len()
+ local offset, bytes_needed = 0, 0
+
+ tvbs = {}
+ while bytes_consumed < pktlen do
+ offset, bytes_needed = dissect(tvbuf, pktinfo, root, bytes_consumed, pktlen)
+ if offset == 0 then
+ if bytes_consumed > 0 then
+ return bytes_consumed
+ else
+ return bytes_needed
+ end
+ end
+ bytes_consumed = bytes_consumed + offset
+ end
+ end
+
+ return bytes_consumed
+end
+
+
+----------------------------------------
+-- we want to have our protocol dissection invoked for a specific TCP port,
+-- so get the TCP dissector table and add our protocol to it
+-- first remove any existing dissector for that port, if there is one
+local old_dissector = DissectorTable.get("tcp.port"):get_dissector(default_settings.port)
+if old_dissector then
+ dprint("Retrieved existing dissector")
+end
+
+local function enableDissector()
+ DissectorTable.get("tcp.port"):set(default_settings.port, fpmProto)
+end
+-- call it now
+enableDissector()
+
+local function disableDissector()
+ if old_dissector then
+ DissectorTable.get("tcp.port"):set(default_settings.port, old_dissector)
+ end
+end
+
+
+--------------------------------------------------------------------------------
+-- preferences handling stuff
+--------------------------------------------------------------------------------
+
+local debug_pref_enum = {
+ { 1, "Disabled", debug_level.DISABLED },
+ { 2, "Level 1", debug_level.LEVEL_1 },
+ { 3, "Level 2", debug_level.LEVEL_2 },
+}
+
+----------------------------------------
+-- register our preferences
+fpmProto.prefs.enabled = Pref.bool("Dissector enabled", default_settings.enabled,
+ "Whether the FPM dissector is enabled or not")
+
+
+fpmProto.prefs.desegment = Pref.bool("Reassemble FPM messages spanning multiple TCP segments",
+ default_settings.desegment,
+ "Whether the FPM dissector should reassemble"..
+ " messages spanning multiple TCP segments."..
+ " To use this option, you must also enable"..
+ " \"Allow subdissectors to reassemble TCP"..
+ " streams\" in the TCP protocol settings.")
+
+fpmProto.prefs.dissect_tcp = Pref.bool("Use dissect_tcp_pdus", default_settings.dissect_tcp,
+ "Whether the FPM dissector should use the new" ..
+ " dissect_tcp_pdus model or not")
+
+fpmProto.prefs.subdissect = Pref.bool("Enable sub-dissectors", default_settings.subdissect,
+ "Whether the FPM packet's content" ..
+ " should be dissected or not")
+
+fpmProto.prefs.debug = Pref.enum("Debug", default_settings.debug_level,
+ "The debug printing level", debug_pref_enum)
+
+----------------------------------------
+-- a function for handling prefs being changed
+function fpmProto.prefs_changed()
+ dprint2("prefs_changed called")
+
+ default_settings.dissect_tcp = fpmProto.prefs.dissect_tcp
+
+ default_settings.subdissect = fpmProto.prefs.subdissect
+
+ default_settings.debug_level = fpmProto.prefs.debug
+ reset_debug_level()
+
+ if default_settings.enabled ~= fpmProto.prefs.enabled then
+ default_settings.enabled = fpmProto.prefs.enabled
+ if default_settings.enabled then
+ enableDissector()
+ else
+ disableDissector()
+ end
+ -- have to reload the capture file for this type of change
+ reload()
+ end
+
+end
+
+dprint2("pcapfile Prefs registered")
+
+
+----------------------------------------
+-- the hatype field of the SLL must be 824 decimal, in big-endian encoding (0x0338)
+local ARPHRD_NETLINK = "\003\056"
+local WS_NETLINK_ROUTE = "\000\000"
+local function emptyBytes(num)
+ return string.rep("\000", num)
+end
+
+createSLL = function (payload)
+ dprint2("FPM createSLL function called")
+ local sllmsg =
+ {
+ emptyBytes(2), -- Unused 2B
+ ARPHRD_NETLINK, -- netlink type
+ emptyBytes(10), -- Unused 10B
+ WS_NETLINK_ROUTE, -- Route type
+ payload -- the Netlink message
+ }
+ return table.concat(sllmsg)
+end
diff --git a/test/lua/dissector.lua b/test/lua/dissector.lua
new file mode 100644
index 0000000..836aa7b
--- /dev/null
+++ b/test/lua/dissector.lua
@@ -0,0 +1,659 @@
+----------------------------------------
+-- script-name: dns_dissector.lua
+--
+-- author: Hadriel Kaplan <hadrielk at yahoo dot com>
+-- Copyright (c) 2014, Hadriel Kaplan
+-- This code is in the Public Domain, or the BSD (3 clause) license if Public Domain does not apply
+-- in your country.
+--
+-- Version: 2.1
+--
+-- Changes since 2.0:
+-- * fixed a bug with default settings
+-- * added ability for command-line to overide defaults
+--
+-- Changes since 1.0:
+-- * made it use the new ProtoExpert class model for expert info
+-- * add a protocol column with the proto name
+-- * added heuristic dissector support
+-- * added preferences settings
+-- * removed byteArray2String(), and uses the new ByteArray:raw() method instead
+--
+-- BACKGROUND:
+-- This is an example Lua script for a protocol dissector. The purpose of this script is two-fold:
+-- * To provide a reference tutorial for others writing Wireshark dissectors in Lua
+-- * To test various functions being called in various ways, so this script can be used in the test-suites
+-- I've tried to meet both of those goals, but it wasn't easy. No doubt some folks will wonder why some
+-- functions are called some way, or differently than previous invocations of the same function. I'm trying to
+-- to show both that it can be done numerous ways, but also I'm trying to test those numerous ways, and my more
+-- immediate need is for test coverage rather than tutorial guide. (the Lua API is sorely lacking in test scripts)
+--
+-- OVERVIEW:
+-- This script creates an elementary dissector for DNS. It's neither comprehensive nor error-free with regards
+-- to the DNS protocol. That's OK. The goal isn't to fully dissect DNS properly - Wireshark already has a good
+-- DNS dissector built-in. We don't need another one. We also have other example Lua scripts, but I don't think
+-- they do a good job of explaining things, and the nice thing about this one is getting capture files to
+-- run it against is trivial. (plus I uploaded one)
+--
+-- HOW TO RUN THIS SCRIPT:
+-- Wireshark and Tshark support multiple ways of loading Lua scripts: through a dofile() call in init.lua,
+-- through the file being in either the global or personal plugins directories, or via the command line.
+-- See the Wireshark User's Guide chapter on Lua (https://www.wireshark.org/docs/wsdg_html_chunked/wsluarm_modules.html).
+-- Once the script is loaded, it creates a new protocol named "MyDNS" (or "MYDNS" in some places). If you have
+-- a capture file with DNS packets in it, simply select one in the Packet List pane, right-click on it, and
+-- select "Decode As ...", and then in the dialog box that shows up scroll down the list of protocols to one
+-- called "MYDNS", select that and click the "ok" or "apply" button. Voila`, you're now decoding DNS packets
+-- using the simplistic dissector in this script. Another way is to download the capture file made for
+-- this script, and open that - since the DNS packets in it use UDP port 65333 (instead of the default 53),
+-- and since the MyDNS protocol in this script has been set to automatically decode UDP port 65333, it will
+-- automagically do it without doing "Decode As ...".
+--
+----------------------------------------
+-- do not modify this table
+local debug_level = {
+ DISABLED = 0,
+ LEVEL_1 = 1,
+ LEVEL_2 = 2
+}
+
+-- set this DEBUG to debug_level.LEVEL_1 to enable printing debug_level info
+-- set it to debug_level.LEVEL_2 to enable really verbose printing
+-- note: this will be overridden by user's preference settings
+local DEBUG = debug_level.LEVEL_1
+
+local default_settings =
+{
+ debug_level = DEBUG,
+ port = 65333,
+ heur_enabled = true,
+ heur_regmode = 1,
+}
+
+-- for testing purposes, we want to be able to pass in changes to the defaults
+-- from the command line; because you can't set lua preferences from the command
+-- line using the '-o' switch (the preferences don't exist until this script is
+-- loaded, so the command line thinks they're invalid preferences being set)
+-- so we pass them in as command arguments insetad, and handle it here:
+local args={...} -- get passed-in args
+if args and #args > 0 then
+ for _, arg in ipairs(args) do
+ local name, value = arg:match("(.+)=(.+)")
+ if name and value then
+ if tonumber(value) then
+ value = tonumber(value)
+ elseif value == "true" or value == "TRUE" then
+ value = true
+ elseif value == "false" or value == "FALSE" then
+ value = false
+ elseif value == "DISABLED" then
+ value = debug_level.DISABLED
+ elseif value == "LEVEL_1" then
+ value = debug_level.LEVEL_1
+ elseif value == "LEVEL_2" then
+ value = debug_level.LEVEL_2
+ else
+ error("invalid commandline argument value")
+ end
+ else
+ error("invalid commandline argument syntax")
+ end
+
+ default_settings[name] = value
+ end
+end
+
+local dprint = function() end
+local dprint2 = function() end
+local function reset_debug_level()
+ if default_settings.debug_level > debug_level.DISABLED then
+ dprint = function(...)
+ print(table.concat({"Lua:", ...}," "))
+ end
+
+ if default_settings.debug_level > debug_level.LEVEL_1 then
+ dprint2 = dprint
+ end
+ end
+end
+-- call it now
+reset_debug_level()
+
+dprint2("Wireshark version = ", get_version())
+dprint2("Lua version = ", _VERSION)
+
+----------------------------------------
+-- Unfortunately, the older Wireshark/Tshark versions have bugs, and part of the point
+-- of this script is to test those bugs are now fixed. So we need to check the version
+-- end error out if it's too old.
+local major, minor, micro = get_version():match("(%d+)%.(%d+)%.(%d+)")
+if major and tonumber(major) <= 1 and ((tonumber(minor) <= 10) or (tonumber(minor) == 11 and tonumber(micro) < 3)) then
+ error( "Sorry, but your Wireshark/Tshark version ("..get_version()..") is too old for this script!\n"..
+ "This script needs Wireshark/Tshark version 1.11.3 or higher.\n" )
+end
+
+-- more sanity checking
+-- verify we have the ProtoExpert class in wireshark, as that's the newest thing this file uses
+assert(ProtoExpert.new, "Wireshark does not have the ProtoExpert class, so it's too old - get the latest 1.11.3 or higher")
+
+----------------------------------------
+
+
+----------------------------------------
+-- creates a Proto object, but doesn't register it yet
+local dns = Proto("mydns","MyDNS Protocol")
+
+----------------------------------------
+-- multiple ways to do the same thing: create a protocol field (but not register it yet)
+-- the abbreviation should always have "<myproto>." before the specific abbreviation, to avoid collisions
+local pf_trasaction_id = ProtoField.new ("Transaction ID", "mydns.trans_id", ftypes.UINT16)
+local pf_flags = ProtoField.new ("Flags", "mydns.flags", ftypes.UINT16, nil, base.HEX)
+local pf_num_questions = ProtoField.uint16("mydns.num_questions", "Number of Questions")
+local pf_num_answers = ProtoField.uint16("mydns.num_answers", "Number of Answer RRs")
+local pf_num_authority_rr = ProtoField.uint16("mydns.num_authority_rr", "Number of Authority RRs")
+local pf_num_additional_rr = ProtoField.uint16("mydns.num_additional_rr", "Number of Additional RRs")
+
+-- within the flags field, we want to parse/show the bits separately
+-- note the "base" argument becomes the size of the bitmask'ed field when ftypes.BOOLEAN is used
+-- the "mask" argument is which bits we want to use for this field (e.g., base=16 and mask=0x8000 means we want the top bit of a 16-bit field)
+-- again the following shows different ways of doing the same thing basically
+local pf_flag_response = ProtoField.new ("Response", "mydns.flags.response", ftypes.BOOLEAN, {"this is a response","this is a query"}, 16, 0x8000, "is the message a response?")
+local pf_flag_opcode = ProtoField.new ("Opcode", "mydns.flags.opcode", ftypes.UINT16, nil, base.DEC, 0x7800, "operation code")
+local pf_flag_authoritative = ProtoField.new ("Authoritative", "mydns.flags.authoritative", ftypes.BOOLEAN, nil, 16, 0x0400, "is the response authoritative?")
+local pf_flag_truncated = ProtoField.bool ("mydns.flags.truncated", "Truncated", 16, nil, 0x0200, "is the message truncated?")
+local pf_flag_recursion_desired = ProtoField.bool ("mydns.flags.recursion_desired", "Recursion desired", 16, {"yes","no"}, 0x0100, "do the query recursivley?")
+local pf_flag_recursion_available = ProtoField.bool ("mydns.flags.recursion_available", "Recursion available", 16, nil, 0x0080, "does the server support recursion?")
+local pf_flag_z = ProtoField.uint16("mydns.flags.z", "World War Z - Reserved for future use", base.HEX, nil, 0x0040, "when is it the future?")
+local pf_flag_authenticated = ProtoField.bool ("mydns.flags.authenticated", "Authenticated", 16, {"yes","no"}, 0x0020, "did the server DNSSEC authenticate?")
+local pf_flag_checking_disabled = ProtoField.bool ("mydns.flags.checking_disabled", "Checking disabled", 16, nil, 0x0010)
+
+-- no, these aren't all the DNS response codes - this is just an example
+local rcodes = {
+ [0] = "No Error",
+ [1] = "Format Error",
+ [2] = "Server Failure",
+ [3] = "Non-Existent Domain",
+ [9] = "Server Not Authoritative for zone"
+}
+-- the above rcodes table is used in this next ProtoField
+local pf_flag_rcode = ProtoField.uint16("mydns.flags.rcode", "Response code", base.DEC, rcodes, 0x000F)
+local pf_query = ProtoField.new("Query", "mydns.query", ftypes.BYTES)
+local pf_query_name = ProtoField.new("Name", "mydns.query.name", ftypes.STRING)
+local pf_query_name_len = ProtoField.new("Name Length", "mydns.query.name.len", ftypes.UINT8)
+local pf_query_label_count = ProtoField.new("Label Count", "mydns.query.label.count", ftypes.UINT8)
+local rrtypes = { [1] = "A (IPv4 host address)", [2] = "NS (authoritative name server)", [28] = "AAAA (for geeks only)" }
+local pf_query_type = ProtoField.uint16("mydns.query.type", "Type", base.DEC, rrtypes)
+-- again, not all class types are listed here
+local classes = {
+ [0] = "Reserved",
+ [1] = "IN (Internet)",
+ [2] = "The 1%",
+ [5] = "First class",
+ [6] = "Business class",
+ [65535] = "Cattle class"
+}
+local pf_query_class = ProtoField.uint16("mydns.query.class", "Class", base.DEC, classes, nil, "keep it classy folks")
+
+----------------------------------------
+-- this actually registers the ProtoFields above, into our new Protocol
+-- in a real script I wouldn't do it this way; I'd build a table of fields programmatically
+-- and then set dns.fields to it, so as to avoid forgetting a field
+dns.fields = { pf_trasaction_id, pf_flags,
+ pf_num_questions, pf_num_answers, pf_num_authority_rr, pf_num_additional_rr,
+ pf_flag_response, pf_flag_opcode, pf_flag_authoritative,
+ pf_flag_truncated, pf_flag_recursion_desired, pf_flag_recursion_available,
+ pf_flag_z, pf_flag_authenticated, pf_flag_checking_disabled, pf_flag_rcode,
+ pf_query, pf_query_name, pf_query_name_len, pf_query_label_count, pf_query_type, pf_query_class }
+
+----------------------------------------
+-- create some expert info fields (this is new functionality in 1.11.3)
+-- Expert info fields are very similar to proto fields: they're tied to our protocol,
+-- they're created in a similar way, and registered by setting a 'experts' field to
+-- a table of them just as proto fields were put into the 'dns.fields' above
+-- The old way of creating expert info was to just add it to the tree, but that
+-- didn't let the expert info be filterable in wireshark, whereas this way does
+local ef_query = ProtoExpert.new("mydns.query.expert", "DNS query message",
+ expert.group.REQUEST_CODE, expert.severity.CHAT)
+local ef_response = ProtoExpert.new("mydns.response.expert", "DNS response message",
+ expert.group.RESPONSE_CODE, expert.severity.CHAT)
+local ef_ultimate = ProtoExpert.new("mydns.response.ultimate.expert", "DNS answer to life, the universe, and everything",
+ expert.group.COMMENTS_GROUP, expert.severity.NOTE)
+-- some error expert info's
+local ef_too_short = ProtoExpert.new("mydns.too_short.expert", "DNS message too short",
+ expert.group.MALFORMED, expert.severity.ERROR)
+local ef_bad_query = ProtoExpert.new("mydns.query.missing.expert", "DNS query missing or malformed",
+ expert.group.MALFORMED, expert.severity.WARN)
+
+-- register them
+dns.experts = { ef_query, ef_too_short, ef_bad_query, ef_response, ef_ultimate }
+
+----------------------------------------
+-- we don't just want to display our protocol's fields, we want to access the value of some of them too!
+-- There are several ways to do that. One is to just parse the buffer contents in Lua code to find
+-- the values. But since ProtoFields actually do the parsing for us, and can be retrieved using Field
+-- objects, it's kinda cool to do it that way. So let's create some Fields to extract the values.
+-- The following creates the Field objects, but they're not 'registered' until after this script is loaded.
+-- Also, these lines can't be before the 'dns.fields = ...' line above, because the Field.new() here is
+-- referencing fields we're creating, and they're not "created" until that line above.
+-- Furthermore, you cannot put these 'Field.new()' lines inside the dissector function.
+-- Before Wireshark version 1.11, you couldn't even do this concept (of using fields you just created).
+local questions_field = Field.new("mydns.num_questions")
+local query_type_field = Field.new("mydns.query.type")
+local query_class_field = Field.new("mydns.query.class")
+local response_field = Field.new("mydns.flags.response")
+
+-- here's a little helper function to access the response_field value later.
+-- Like any Field retrieval, you can't retrieve a field's value until its value has been
+-- set, which won't happen until we actually use our ProtoFields in TreeItem:add() calls.
+-- So this isResponse() function can't be used until after the pf_flag_response ProtoField
+-- has been used inside the dissector.
+-- Note that calling the Field object returns a FieldInfo object, and calling that
+-- returns the value of the field - in this case a boolean true/false, since we set the
+-- "mydns.flags.response" ProtoField to ftype.BOOLEAN way earlier when we created the
+-- pf_flag_response ProtoField. Clear as mud?
+--
+-- A shorter version of this function would be:
+-- local function isResponse() return response_field()() end
+-- but I though the below is easier to understand.
+local function isResponse()
+ local response_fieldinfo = response_field()
+ return response_fieldinfo()
+end
+
+--------------------------------------------------------------------------------
+-- preferences handling stuff
+--------------------------------------------------------------------------------
+
+-- a "enum" table for our enum pref, as required by Pref.enum()
+-- having the "index" number makes ZERO sense, and is completely illogical
+-- but it's what the code has expected it to be for a long time. Ugh.
+local debug_pref_enum = {
+ { 1, "Disabled", debug_level.DISABLED },
+ { 2, "Level 1", debug_level.LEVEL_1 },
+ { 3, "Level 2", debug_level.LEVEL_2 },
+}
+
+dns.prefs.debug = Pref.enum("Debug", default_settings.debug_level,
+ "The debug printing level", debug_pref_enum)
+
+dns.prefs.port = Pref.uint("Port number", default_settings.port,
+ "The UDP port number for MyDNS")
+
+dns.prefs.heur = Pref.bool("Heuristic enabled", default_settings.heur_enabled,
+ "Whether heuristic dissection is enabled or not")
+
+----------------------------------------
+-- a function for handling prefs being changed
+function dns.prefs_changed()
+ dprint2("prefs_changed called")
+
+ default_settings.debug_level = dns.prefs.debug
+ reset_debug_level()
+
+ default_settings.heur_enabled = dns.prefs.heur
+
+ if default_settings.port ~= dns.prefs.port then
+ -- remove old one, if not 0
+ if default_settings.port ~= 0 then
+ dprint2("removing MyDNS from port",default_settings.port)
+ DissectorTable.get("udp.port"):remove(default_settings.port, dns)
+ end
+ -- set our new default
+ default_settings.port = dns.prefs.port
+ -- add new one, if not 0
+ if default_settings.port ~= 0 then
+ dprint2("adding MyDNS to port",default_settings.port)
+ DissectorTable.get("udp.port"):add(default_settings.port, dns)
+ end
+ end
+
+end
+
+dprint2("MyDNS Prefs registered")
+
+
+----------------------------------------
+---- some constants for later use ----
+-- the DNS header size
+local DNS_HDR_LEN = 12
+
+-- the smallest possible DNS query field size
+-- has to be at least a label null terminator, 2-bytes type and 2-bytes class
+local MIN_QUERY_LEN = 5
+
+----------------------------------------
+-- some forward "declarations" of helper functions we use in the dissector
+-- I don't usually use this trick, but it'll help reading/grok'ing this script I think
+-- if we don't focus on them.
+local getQueryName
+
+
+----------------------------------------
+-- The following creates the callback function for the dissector.
+-- It's the same as doing "dns.dissector = function (tvbuf,pkt,root)"
+-- The 'tvbuf' is a Tvb object, 'pktinfo' is a Pinfo object, and 'root' is a TreeItem object.
+-- Whenever Wireshark dissects a packet that our Proto is hooked into, it will call
+-- this function and pass it these arguments for the packet it's dissecting.
+function dns.dissector(tvbuf,pktinfo,root)
+ dprint2("dns.dissector called")
+
+ -- set the protocol column to show our protocol name
+ pktinfo.cols.protocol:set("MYDNS")
+
+ -- We want to check that the packet size is rational during dissection, so let's get the length of the
+ -- packet buffer (Tvb).
+ -- Because DNS has no additional payload data other than itself, and it rides on UDP without padding,
+ -- we can use tvb:len() or tvb:reported_len() here; but I prefer tvb:reported_length_remaining() as it's safer.
+ local pktlen = tvbuf:reported_length_remaining()
+
+ -- We start by adding our protocol to the dissection display tree.
+ -- A call to tree:add() returns the child created, so we can add more "under" it using that return value.
+ -- The second argument is how much of the buffer/packet this added tree item covers/represents - in this
+ -- case (DNS protocol) that's the remainder of the packet.
+ local tree = root:add(dns, tvbuf:range(0,pktlen))
+
+ -- now let's check it's not too short
+ if pktlen < DNS_HDR_LEN then
+ -- since we're going to add this protocol to a specific UDP port, we're going to
+ -- assume packets in this port are our protocol, so the packet being too short is an error
+ -- the old way: tree:add_expert_info(PI_MALFORMED, PI_ERROR, "packet too short")
+ -- the correct way now:
+ tree:add_proto_expert_info(ef_too_short)
+ dprint("packet length",pktlen,"too short")
+ return
+ end
+
+ -- Now let's add our transaction id under our dns protocol tree we just created.
+ -- The transaction id starts at offset 0, for 2 bytes length.
+ tree:add(pf_trasaction_id, tvbuf:range(0,2))
+
+ -- We'd like to put the transaction id number in the GUI row for this packet, in its
+ -- INFO column/cell. First we need the transaction id value, though. Since we just
+ -- dissected it with the previous code line, we could now get it using a Field's
+ -- FieldInfo extractor, but instead we'll get it directly from the TvbRange just
+ -- to show how to do that. We'll use Field/FieldInfo extractors later on...
+ local transid = tvbuf:range(0,2):uint()
+ pktinfo.cols.info:set("(".. transid ..")")
+
+ -- now let's add the flags, which are all in the packet bytes at offset 2 of length 2
+ -- instead of calling this again and again, let's just use a variable
+ local flagrange = tvbuf:range(2,2)
+
+ -- for our flags field, we want a sub-tree
+ local flag_tree = tree:add(pf_flags, flagrange)
+ -- I'm indenting this for clarity, because it's adding to the flag's child-tree
+
+ -- let's add the type of message (query vs. response)
+ local query_flag_tree = flag_tree:add(pf_flag_response, flagrange)
+
+ -- let's also add an expert info about it
+ if isResponse() then
+ query_flag_tree:add_proto_expert_info(ef_response, "It's a response!")
+ if transid == 42 then
+ tree:add_tvb_expert_info(ef_ultimate, tvbuf:range(0,2))
+ end
+ else
+ query_flag_tree:add_proto_expert_info(ef_query)
+ end
+
+ -- we now know if it's a response or query, so let's put that in the
+ -- GUI packet row, in the INFO column cell
+ -- this line of code uses a Lua trick for doing something similar to
+ -- the C/C++ 'test ? true : false' shorthand
+ pktinfo.cols.info:prepend(isResponse() and "Response " or "Query ")
+
+ flag_tree:add(pf_flag_opcode, flagrange)
+
+ if isResponse() then
+ flag_tree:add(pf_flag_authoritative, flagrange)
+ end
+
+ flag_tree:add(pf_flag_truncated, flagrange)
+
+ if isResponse() then
+ flag_tree:add(pf_flag_recursion_available, flagrange)
+ else
+ flag_tree:add(pf_flag_recursion_desired, flagrange)
+ end
+
+ flag_tree:add(pf_flag_z, flagrange)
+
+ if isResponse() then
+ flag_tree:add(pf_flag_authenticated, flagrange)
+ flag_tree:add(pf_flag_rcode, flagrange)
+ end
+
+ flag_tree:add(pf_flag_checking_disabled, flagrange)
+
+ -- now add more to the main mydns tree
+ tree:add(pf_num_questions, tvbuf:range(4,2))
+ tree:add(pf_num_answers, tvbuf:range(6,2))
+ -- another way to get a TvbRange is just to call the Tvb like this
+ tree:add(pf_num_authority_rr, tvbuf(8,2))
+ -- or if we're crazy, we can create a sub-TvbRange, from a sub-TvbRange of the TvbRange
+ tree:add(pf_num_additional_rr, tvbuf:range(10,2):range()())
+
+ local num_queries = questions_field()()
+ local pos = DNS_HDR_LEN
+
+ if num_queries > 0 then
+ -- let's create a sub-tree, using a plain text description (not a field from the packet)
+ local queries_tree = tree:add("Queries")
+
+ local pktlen_remaining = pktlen - pos
+
+ -- multiple questions in one query hasn't been used for a long time, but just in case, let's loop
+ while num_queries > 0 and pktlen_remaining > 0 do
+ if pktlen_remaining < MIN_QUERY_LEN then
+ -- old way: queries_tree:add_expert_info(PI_MALFORMED, PI_ERROR, "query field missing or too short")
+ queries_tree:add_proto_expert_info(ef_bad_query)
+ return
+ end
+
+ -- we don't know how long this query field in total is, so we have to parse it first before
+ -- adding it to the tree, because we want to identify the correct bytes it covers
+ local label_count, name, name_len = getQueryName(tvbuf:range(pos,pktlen_remaining))
+ if not label_count then
+ queries_tree:add_expert_info(PI_MALFORMED, PI_ERROR, name)
+ return
+ end
+
+ -- now add the first query to the 'Queries' child tree we just created
+ -- we're going to change the string generated by this later, after we figure out the subsequent fields.
+ -- the whole query field is the query name field length we just got, plus 2-byte type and 2-byte class.
+ local q_tree = queries_tree:add(pf_query, tvbuf:range(pos, name_len + 4))
+
+ q_tree:add(pf_query_name, tvbuf:range(pos, name_len), name)
+ pos = pos + name_len
+
+ pktinfo.cols.info:append(" "..name)
+
+ -- the following tree items are generated by us, not encoded in the packet per se, so mark them as such
+ q_tree:add(pf_query_name_len, name_len):set_generated()
+ q_tree:add(pf_query_label_count, label_count):set_generated()
+
+ q_tree:add(pf_query_type, tvbuf:range(pos, 2))
+ q_tree:add(pf_query_class, tvbuf:range(pos + 2, 2))
+ pos = pos + 4
+
+ -- now change the query text
+ -- calling a Field returns a multival of one FieldInfo object for
+ -- each value, so we select() only the most recent one
+ q_tree:set_text(name..": type "..select(-1, query_type_field()).display
+ ..", class "..select(-1, query_class_field()).display)
+
+ pktlen_remaining = pktlen_remaining - (name_len + 4)
+ num_queries = num_queries - 1
+ end -- end of while loop
+
+ if num_queries > 0 then
+ -- we didn't process them all
+ queries_tree:add_expert_info(PI_MALFORMED, PI_ERROR, num_queries .. " query field(s) missing")
+ return
+ end
+ end
+
+ -- parsing answers, authority RRs, and additional RRs is up to you!
+
+ dprint2("dns.dissector returning",pos)
+
+ -- tell wireshark how much of tvbuff we dissected
+ return pos
+end
+
+----------------------------------------
+-- we want to have our protocol dissection invoked for a specific UDP port,
+-- so get the udp dissector table and add our protocol to it
+DissectorTable.get("udp.port"):add(default_settings.port, dns)
+
+----------------------------------------
+-- we also want to add the heuristic dissector, for any UDP protocol
+-- first we need a heuristic dissection function
+-- this is that function - when wireshark invokes this, it will pass in the same
+-- things it passes in to the "dissector" function, but we only want to actually
+-- dissect it if it's for us, and we need to return true if it's for us, or else false
+-- figuring out if it's for us or not is not easy
+-- we need to try as hard as possible, or else we'll think it's for us when it's
+-- not and block other heuristic dissectors from getting their chance
+--
+-- in practice, you'd never set a dissector like this to be heuristic, because there
+-- just isn't enough information to safely detect if it's DNS or not
+-- but I'm doing it to show how it would be done
+--
+-- Note: this heuristic stuff is new in 1.11.3
+local function heur_dissect_dns(tvbuf,pktinfo,root)
+ dprint2("heur_dissect_dns called")
+
+ -- if our preferences tell us not to do this, return false
+ if not default_settings.heur_enabled then
+ return false
+ end
+
+ if tvbuf:len() < DNS_HDR_LEN then
+ dprint("heur_dissect_dns: tvb shorter than DNS_HDR_LEN of:",DNS_HDR_LEN)
+ return false
+ end
+
+ local tvbr = tvbuf:range(0,DNS_HDR_LEN)
+
+ -- the first 2 bytes are transaction id, which can be anything so no point in checking those
+ -- the next 2 bytes contain flags, a couple of which have some values we can check against
+
+ -- the opcode has to be 0, 1, 2, 4 or 5
+ -- the opcode field starts at bit offset 17 (in C-indexing), for 4 bits in length
+ local check = tvbr:bitfield(17,4)
+ if check == 3 or check > 5 then
+ dprint("heur_dissect_dns: invalid opcode:",check)
+ return false
+ end
+
+ -- the rcode has to be 0-10, 16-22 (we're ignoring private use rcodes here)
+ -- the rcode field starts at bit offset 28 (in C-indexing), for 4 bits in length
+ check = tvbr:bitfield(28,4)
+ if check > 22 or (check > 10 and check < 16) then
+ dprint("heur_dissect_dns: invalid rcode:",check)
+ return false
+ end
+
+ dprint2("heur_dissect_dns checking questions/answers")
+
+ -- now let's verify the number of questions/answers are reasonable
+ check = tvbr:range(4,2):uint() -- num questions
+ if check > 100 then return false end
+ check = tvbr:range(6,2):uint() -- num answers
+ if check > 100 then return false end
+ check = tvbr:range(8,2):uint() -- num authority
+ if check > 100 then return false end
+ check = tvbr:range(10,2):uint() -- num additional
+ if check > 100 then return false end
+
+ dprint2("heur_dissect_dns: everything looks good calling the real dissector")
+
+ -- don't do this line in your script - I'm just doing it so our test-suite can
+ -- verify this script
+ root:add("Heuristic dissector used"):set_generated()
+
+ -- ok, looks like it's ours, so go dissect it
+ -- note: calling the dissector directly like this is new in 1.11.3
+ -- also note that calling a Dissector object, as this does, means we don't
+ -- get back the return value of the dissector function we created previously
+ -- so it might be better to just call the function directly instead of doing
+ -- this, but this script is used for testing and this tests the call() function
+ dns.dissector(tvbuf,pktinfo,root)
+
+ -- since this is over a transport protocol, such as UDP, we can set the
+ -- conversation to make it sticky for our dissector, so that all future
+ -- packets to/from the same address:port pair will just call our dissector
+ -- function directly instead of this heuristic function
+ -- this is a new attribute of pinfo in 1.11.3
+ pktinfo.conversation = dns
+
+ return true
+end
+
+-- now register that heuristic dissector into the udp heuristic list
+if default_settings.heur_regmode == 1 then
+ -- this is the "normal" way to register a heuristic: using a lua function
+ dns:register_heuristic("udp",heur_dissect_dns)
+elseif default_settings.heur_regmode == 2 then
+ -- this is to test the fix for bug 10695:
+ dns:register_heuristic("udp",dns.dissector)
+elseif default_settings.heur_regmode == 3 then
+ -- and this too is to test the fix for bug 10695:
+ dns:register_heuristic("udp", function (...) return dns.dissector(...); end )
+end
+
+-- We're done!
+-- our protocol (Proto) gets automatically registered after this script finishes loading
+----------------------------------------
+
+----------------------------------------
+-- DNS query names are not just null-terminated strings; they're actually a sequence of
+-- 'labels', with a length octet before each one. So "foobar.com" is actually the
+-- string "\06foobar\03com\00". We could create a ProtoField for label_length and label_name
+-- or whatever, but since this is an example script I'll show how to do it in raw code.
+-- This function is given the TvbRange object from the dissector() function, and needs to
+-- parse it.
+-- On success, it returns three things: the number of labels, the name string, and how
+-- many bytes it covered of the buffer.
+-- On failure, it returns nil and the error message.
+getQueryName = function (tvbr)
+ local label_count = 0
+ local name = ""
+ local name_len = 0
+
+ local len_remaining = tvbr:len()
+ if len_remaining < 2 then
+ -- it's too short
+ return nil, "invalid name"
+ end
+
+ local barray = tvbr:bytes() -- gets a ByteArray of the TvbRange
+ local pos = 0 -- unlike Lua, ByteArray uses 0-based indexing
+
+ repeat
+ local label_len = barray:get_index(pos)
+ if label_len >= len_remaining then
+ return nil, "invalid label length of "..label_len
+ end
+ pos = pos + 1 -- move past label length octet
+ if label_len > 0 then
+ -- append the label and a dot to name string
+ -- note: this uses the new method of ByteArray:raw(), added in 1.11.3
+ name = name .. barray:raw(pos, label_len) .. "."
+ label_count = label_count + 1
+ pos = pos + label_len -- move past label
+ end
+ name_len = name_len + label_len + 1
+ len_remaining = len_remaining - (label_len + 1) -- subtract label and its length octet
+ until label_len == 0
+
+ -- we appended an extra dot, so get rid of it
+ name = name:sub(1, -2)
+
+ if name == "" then
+ -- this is the root zone (.)
+ name = "<Root>"
+ end
+
+ return label_count, name, name_len
+end
diff --git a/test/lua/field.lua b/test/lua/field.lua
new file mode 100644
index 0000000..f049b81
--- /dev/null
+++ b/test/lua/field.lua
@@ -0,0 +1,165 @@
+-- test script for wslua Field/FieldInfo functions
+-- use with dhcp.pcap in test/captures directory
+local testlib = require("testlib")
+
+local FRAME = "frame"
+local PER_FRAME = "per-frame"
+local OTHER = "other"
+
+local n_frames = 1
+testlib.init({
+ [FRAME] = n_frames,
+ [PER_FRAME] = n_frames*43,
+ [OTHER] = 16,
+})
+
+------------- helper funcs ------------
+
+local function toMacAddr(addrhex)
+ return addrhex:gsub("..","%0:"):sub(1,-2)
+end
+
+-- the following are so we can use pcall (which needs a function to call)
+local function makeField(name)
+ local foo = Field.new(name)
+ return true
+end
+
+local function makeFieldInfo(field)
+ local foo = field()
+ return true
+end
+
+local function setFieldInfo(finfo,name,value)
+ finfo[name] = value
+ return true
+end
+
+local function getFieldInfo(finfo,name)
+ local foo = finfo[name]
+ return true
+end
+
+--------------------------
+
+testlib.testing(OTHER, "Field")
+
+testlib.test(OTHER,"Field.new-0",pcall(makeField,"ip.src"))
+testlib.test(OTHER,"Field.new-1",not pcall(makeField,"FooBARhowdy"))
+testlib.test(OTHER,"Field.new-2",not pcall(makeField))
+testlib.test(OTHER,"Field.new-3",not pcall(makeField,""))
+testlib.test(OTHER,"Field.new-4",not pcall(makeField,"IP.SRC"))
+
+-- declare some field extractors
+local f_frame_encap_type = Field.new("frame.encap_type")
+local f_frame_proto = Field.new("frame.protocols")
+local f_eth_src = Field.new("eth.src")
+local f_eth_dst = Field.new("eth.dst")
+local f_eth_mac = Field.new("eth.addr")
+local f_ip_src = Field.new("ip.src")
+local f_ip_dst = Field.new("ip.dst")
+local f_udp_srcport = Field.new("udp.srcport")
+local f_udp_dstport = Field.new("udp.dstport")
+local f_dhcp_hw = Field.new("dhcp.hw.mac_addr")
+local f_dhcp_opt = Field.new("dhcp.option.type")
+
+testlib.test(OTHER,"Field__tostring-1", tostring(f_frame_proto) == "frame.protocols")
+
+testlib.test(OTHER,"Field.name-1", f_frame_proto.name == "frame.protocols")
+testlib.test(OTHER,"Field.name-2", f_eth_src.name == "eth.src")
+
+testlib.test(OTHER,"Field.display-1", f_frame_proto.display == "Protocols in frame")
+testlib.test(OTHER,"Field.display-2", f_eth_src.display == "Source")
+
+testlib.test(OTHER,"Field.type-1", f_frame_proto.type == ftypes.STRING)
+testlib.test(OTHER,"Field.type-2", f_eth_src.type == ftypes.ETHER)
+testlib.test(OTHER,"Field.type-3", f_ip_src.type == ftypes.IPv4)
+testlib.test(OTHER,"Field.type-4", f_udp_srcport.type == ftypes.UINT16)
+testlib.test(OTHER,"Field.type-5", f_dhcp_opt.type == ftypes.UINT8)
+
+-- make sure can't create a FieldInfo outside tap
+testlib.test(OTHER,"Field__call-1",not pcall(makeFieldInfo,f_eth_src))
+
+local tap = Listener.new()
+
+--------------------------
+
+function tap.packet(pinfo,tvb)
+ testlib.countPacket(FRAME)
+
+ testlib.testing(FRAME,"Field")
+ testlib.test(PER_FRAME,"Field__tostring-2", tostring(f_frame_proto) == "frame.protocols")
+
+ -- make sure can't create a Field inside tap
+ testlib.test(PER_FRAME,"Field.new-5",not pcall(makeField,"ip.src"))
+
+ testlib.test(PER_FRAME,"Field__call-2",pcall(makeFieldInfo,f_eth_src))
+
+ testlib.test(PER_FRAME,"Field.name-3", f_frame_proto.name == "frame.protocols")
+ testlib.test(PER_FRAME,"Field.name-4", f_eth_src.name == "eth.src")
+
+ testlib.test(PER_FRAME,"Field.display-3", f_frame_proto.display == "Protocols in frame")
+ testlib.test(PER_FRAME,"Field.display-4", f_eth_src.display == "Source")
+
+ testlib.test(PER_FRAME,"Field.type-6", f_frame_proto.type == ftypes.STRING)
+ testlib.test(PER_FRAME,"Field.type-7", f_eth_src.type == ftypes.ETHER)
+ testlib.test(PER_FRAME,"Field.type-8", f_ip_src.type == ftypes.IPv4)
+ testlib.test(PER_FRAME,"Field.type-9", f_udp_srcport.type == ftypes.UINT16)
+ testlib.test(PER_FRAME,"Field.type-10", f_dhcp_opt.type == ftypes.UINT8)
+
+ testlib.testing(FRAME,"FieldInfo")
+
+ local finfo_udp_srcport = f_udp_srcport()
+ testlib.test(PER_FRAME,"FieldInfo.name-1", finfo_udp_srcport.name == "udp.srcport")
+ testlib.test(PER_FRAME,"FieldInfo.type-1", finfo_udp_srcport.type == ftypes.UINT16)
+ testlib.test(PER_FRAME,"FieldInfo.little_endian-1", finfo_udp_srcport.little_endian == false)
+ testlib.test(PER_FRAME,"FieldInfo.big_endian-1", finfo_udp_srcport.big_endian == true)
+ testlib.test(PER_FRAME,"FieldInfo.is_url-1", finfo_udp_srcport.is_url == false)
+ testlib.test(PER_FRAME,"FieldInfo.offset-1", finfo_udp_srcport.offset == 34)
+ testlib.test(PER_FRAME,"FieldInfo.source-1", finfo_udp_srcport.source == tvb)
+
+ -- check ether addr
+ local fi_eth_src = f_eth_src()
+ testlib.test(PER_FRAME,"FieldInfo.type-2", fi_eth_src.type == ftypes.ETHER)
+ testlib.test(PER_FRAME,"FieldInfo.range-0",pcall(getFieldInfo,fi_eth_src,"range"))
+ local eth_macs = { f_eth_mac() }
+ local eth_src1 = tostring(f_eth_src().range)
+ local eth_src2 = tostring(tvb:range(6,6))
+ local eth_src3 = tostring(eth_macs[2].tvb)
+
+ testlib.test(PER_FRAME,"FieldInfo.range-1", eth_src1 == eth_src2)
+ testlib.test(PER_FRAME,"FieldInfo.range-2", eth_src1 == eth_src3)
+ testlib.test(PER_FRAME,"FieldInfo.range-3",not pcall(setFieldInfo,fi_eth_src,"range",3))
+ testlib.test(PER_FRAME,"FieldInfo.range-4", tostring(f_frame_encap_type().range) == "<EMPTY>")
+
+ testlib.test(PER_FRAME,"FieldInfo.generated-1", f_frame_proto().generated == true)
+ testlib.test(PER_FRAME,"FieldInfo.generated-2", eth_macs[2].generated == false)
+ testlib.test(PER_FRAME,"FieldInfo.generated-3",not pcall(setFieldInfo,fi_eth_src,"generated",3))
+
+ testlib.test(PER_FRAME,"FieldInfo.name-1", fi_eth_src.name == "eth.src")
+ testlib.test(PER_FRAME,"FieldInfo.name-2",not pcall(setFieldInfo,fi_eth_src,"name","3"))
+
+ testlib.test(PER_FRAME,"FieldInfo.label-1", fi_eth_src.label == tostring(fi_eth_src))
+ testlib.test(PER_FRAME,"FieldInfo.label-2", fi_eth_src.label == toMacAddr(eth_src1))
+ testlib.test(PER_FRAME,"FieldInfo.label-3",not pcall(setFieldInfo,fi_eth_src,"label","3"))
+
+ testlib.test(PER_FRAME,"FieldInfo.display-1", select(1, string.find(fi_eth_src.display, toMacAddr(eth_src1))) ~= nil)
+ testlib.test(PER_FRAME,"FieldInfo.display-2",not pcall(setFieldInfo,fi_eth_src,"display","3"))
+
+ testlib.test(PER_FRAME,"FieldInfo.eq-1", eth_macs[2] == select(2, f_eth_mac()))
+ testlib.test(PER_FRAME,"FieldInfo.eq-2", eth_macs[1] ~= fi_eth_src)
+ testlib.test(PER_FRAME,"FieldInfo.eq-3", eth_macs[1] == f_eth_dst())
+
+ testlib.test(PER_FRAME,"FieldInfo.offset-1", eth_macs[1].offset == 0)
+ testlib.test(PER_FRAME,"FieldInfo.offset-2", -fi_eth_src == 6)
+ testlib.test(PER_FRAME,"FieldInfo.offset-3",not pcall(setFieldInfo,fi_eth_src,"offset","3"))
+
+ testlib.test(PER_FRAME,"FieldInfo.len-1", fi_eth_src.len == 6)
+ testlib.test(PER_FRAME,"FieldInfo.len-2",not pcall(setFieldInfo,fi_eth_src,"len",6))
+
+ testlib.pass(FRAME)
+end
+
+function tap.draw()
+ testlib.getResults()
+end
diff --git a/test/lua/field_setup.lua b/test/lua/field_setup.lua
new file mode 100644
index 0000000..d73b13a
--- /dev/null
+++ b/test/lua/field_setup.lua
@@ -0,0 +1,108 @@
+function field_setup(proto, prefix)
+
+ local pf_boolean = ProtoField.new("Boolean", prefix..".boolean", ftypes.BOOLEAN)
+ local pf_char = ProtoField.new("Char", prefix..".char", ftypes.CHAR)
+ local pf_uint8 = ProtoField.new("Uint8", prefix..".uint8", ftypes.UINT8)
+ local pf_uint16 = ProtoField.new("Uint16", prefix..".uint16", ftypes.UINT16)
+ local pf_uint24 = ProtoField.new("Uint24", prefix..".uint24", ftypes.UINT24)
+ local pf_uint32 = ProtoField.new("Uint32", prefix..".uint32", ftypes.UINT32)
+ local pf_uint64 = ProtoField.new("Uint64", prefix..".uint64", ftypes.UINT64)
+ local pf_int8 = ProtoField.new("Int8", prefix..".int8", ftypes.INT8)
+ local pf_int16 = ProtoField.new("Int16", prefix..".int16", ftypes.INT16)
+ local pf_int24 = ProtoField.new("Int24", prefix..".int24", ftypes.INT24)
+ local pf_int32 = ProtoField.new("Int32", prefix..".int32", ftypes.INT32)
+ local pf_int64 = ProtoField.new("Int64", prefix..".int64", ftypes.INT64)
+ local pf_float = ProtoField.new("Float", prefix..".float", ftypes.FLOAT)
+ local pf_double = ProtoField.new("Double", prefix..".double", ftypes.DOUBLE)
+ local pf_absolute_time = ProtoField.new("Absolute_Time", prefix..".absolute_time", ftypes.ABSOLUTE_TIME)
+ local pf_relative_time = ProtoField.new("Relative_Time", prefix..".relative_time", ftypes.RELATIVE_TIME)
+ local pf_string = ProtoField.new("String", prefix..".string", ftypes.STRING)
+ local pf_stringz = ProtoField.new("Stringz", prefix..".stringz", ftypes.STRINGZ)
+ local pf_ether = ProtoField.new("Ether", prefix..".ether", ftypes.ETHER)
+ local pf_bytes = ProtoField.new("Bytes", prefix..".bytes", ftypes.BYTES)
+ local pf_uint_bytes = ProtoField.new("Uint_Bytes", prefix..".uint_bytes", ftypes.UINT_BYTES)
+ local pf_ipv4 = ProtoField.new("Ipv4", prefix..".ipv4", ftypes.IPv4)
+ local pf_ipv6 = ProtoField.new("Ipv6", prefix..".ipv6", ftypes.IPv6)
+ local pf_ipxnet = ProtoField.new("Ipxnet", prefix..".ipxnet", ftypes.IPXNET)
+ local pf_framenum = ProtoField.new("Framenum", prefix..".framenum", ftypes.FRAMENUM)
+ local pf_guid = ProtoField.new("Guid", prefix..".guid", ftypes.GUID)
+ local pf_oid = ProtoField.new("Oid", prefix..".oid", ftypes.OID)
+ local pf_rel_oid = ProtoField.new("Rel_Oid", prefix..".rel_oid", ftypes.REL_OID)
+ local pf_system_id = ProtoField.new("System_Id", prefix..".system_id", ftypes.SYSTEM_ID)
+ local pf_eui64 = ProtoField.new("Eui64", prefix..".eui64", ftypes.EUI64)
+
+ proto.fields = {
+ pf_boolean, pf_char, pf_uint8, pf_uint16, pf_uint24, pf_uint32, pf_uint64, pf_int8,
+ pf_int16, pf_int24, pf_int32, pf_int64, pf_float, pf_double, pf_absolute_time, pf_relative_time,
+ pf_string, pf_stringz, pf_ether, pf_bytes, pf_uint_bytes, pf_ipv4, pf_ipv6, pf_ipxnet,
+ pf_framenum, pf_guid, pf_oid, pf_rel_oid, pf_system_id, pf_eui64,
+ }
+
+ local vf_boolean = Field.new(prefix..".boolean")
+ local vf_char = Field.new(prefix..".char")
+ local vf_uint8 = Field.new(prefix..".uint8")
+ local vf_uint16 = Field.new(prefix..".uint16")
+ local vf_uint24 = Field.new(prefix..".uint24")
+ local vf_uint32 = Field.new(prefix..".uint32")
+ local vf_uint64 = Field.new(prefix..".uint64")
+ local vf_int8 = Field.new(prefix..".int8")
+ local vf_int16 = Field.new(prefix..".int16")
+ local vf_int24 = Field.new(prefix..".int24")
+ local vf_int32 = Field.new(prefix..".int32")
+ local vf_int64 = Field.new(prefix..".int64")
+ local vf_float = Field.new(prefix..".float")
+ local vf_double = Field.new(prefix..".double")
+ local vf_absolute_time = Field.new(prefix..".absolute_time")
+ local vf_relative_time = Field.new(prefix..".relative_time")
+ local vf_string = Field.new(prefix..".string")
+ local vf_stringz = Field.new(prefix..".stringz")
+ local vf_ether = Field.new(prefix..".ether")
+ local vf_bytes = Field.new(prefix..".bytes")
+ local vf_uint_bytes = Field.new(prefix..".uint_bytes")
+ local vf_ipv4 = Field.new(prefix..".ipv4")
+ local vf_ipv6 = Field.new(prefix..".ipv6")
+ local vf_ipxnet = Field.new(prefix..".ipxnet")
+ local vf_framenum = Field.new(prefix..".framenum")
+ local vf_guid = Field.new(prefix..".guid")
+ local vf_oid = Field.new(prefix..".oid")
+ local vf_rel_oid = Field.new(prefix..".rel_oid")
+ local vf_system_id = Field.new(prefix..".system_id")
+ local vf_eui64 = Field.new(prefix..".eui64")
+
+ local fieldmap = {
+ ["boolean"] = {packet_field = pf_boolean, value_field = vf_boolean},
+ ["char"] = {packet_field = pf_char, value_field = vf_char},
+ ["uint8"] = {packet_field = pf_uint8, value_field = vf_uint8},
+ ["uint16"] = {packet_field = pf_uint16, value_field = vf_uint16},
+ ["uint24"] = {packet_field = pf_uint24, value_field = vf_uint24},
+ ["uint32"] = {packet_field = pf_uint32, value_field = vf_uint32},
+ ["uint64"] = {packet_field = pf_uint64, value_field = vf_uint64},
+ ["int8"] = {packet_field = pf_int8, value_field = vf_int8},
+ ["int16"] = {packet_field = pf_int16, value_field = vf_int16},
+ ["int24"] = {packet_field = pf_int24, value_field = vf_int24},
+ ["int32"] = {packet_field = pf_int32, value_field = vf_int32},
+ ["int64"] = {packet_field = pf_int64, value_field = vf_int64},
+ ["float"] = {packet_field = pf_float, value_field = vf_float},
+ ["double"] = {packet_field = pf_double, value_field = vf_double},
+ ["absolute_time"] = {packet_field = pf_absolute_time, value_field = vf_absolute_time},
+ ["relative_time"] = {packet_field = pf_relative_time, value_field = vf_relative_time},
+ ["string"] = {packet_field = pf_string, value_field = vf_string},
+ ["stringz"] = {packet_field = pf_stringz, value_field = vf_stringz},
+ ["ether"] = {packet_field = pf_ether, value_field = vf_ether},
+ ["bytes"] = {packet_field = pf_bytes, value_field = vf_bytes},
+ ["uint_bytes"] = {packet_field = pf_uint_bytes, value_field = vf_uint_bytes},
+ ["ipv4"] = {packet_field = pf_ipv4, value_field = vf_ipv4},
+ ["ipv6"] = {packet_field = pf_ipv6, value_field = vf_ipv6},
+ ["ipxnet"] = {packet_field = pf_ipxnet, value_field = vf_ipxnet},
+ ["framenum"] = {packet_field = pf_framenum, value_field = vf_framenum},
+ ["guid"] = {packet_field = pf_guid, value_field = vf_guid},
+ ["oid"] = {packet_field = pf_oid, value_field = vf_oid},
+ ["rel_oid"] = {packet_field = pf_rel_oid, value_field = vf_rel_oid},
+ ["system_id"] = {packet_field = pf_system_id, value_field = vf_system_id},
+ ["eui64"] = {packet_field = pf_eui64, value_field = vf_eui64},
+ }
+
+ return fieldmap
+end
+
+return field_setup
diff --git a/test/lua/globals_2.2.txt b/test/lua/globals_2.2.txt
new file mode 100644
index 0000000..4e32457
--- /dev/null
+++ b/test/lua/globals_2.2.txt
@@ -0,0 +1,1221 @@
+-- Wireshark version: 1.12.6
+{
+ ["Address"] = {
+ ["__typeof"] = "Address",
+ ["ip"] = '<function 1>',
+ ["ipv4"] = '<function 1>',
+ ['<metatable>'] = {
+ ["__eq"] = '<function 2>',
+ ["__index"] = '<filtered>',
+ ["__le"] = '<function 4>',
+ ["__lt"] = '<function 5>',
+ ["__methods"] = '<table 2>',
+ ["__tostring"] = '<function 6>',
+ ["__typeof"] = "Address"
+ }
+ },
+ ["ByteArray"] = {
+ ["__typeof"] = "ByteArray",
+ ["append"] = '<function 7>',
+ ["base64_decode"] = '<function 8>',
+ ["get_index"] = '<function 9>',
+ ["len"] = '<function 10>',
+ ["new"] = '<function 11>',
+ ["prepend"] = '<function 12>',
+ ["raw"] = '<function 13>',
+ ["set_index"] = '<function 14>',
+ ["set_size"] = '<function 15>',
+ ["subset"] = '<function 16>',
+ ["tohex"] = '<function 17>',
+ ["tvb"] = '<function 18>',
+ ['<metatable>'] = {
+ ["__call"] = '<function 16>',
+ ["__concat"] = '<function 19>',
+ ["__eq"] = '<function 20>',
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 3>',
+ ["__tostring"] = '<function 22>',
+ ["__typeof"] = "ByteArray"
+ }
+ },
+ ["Column"] = {
+ ["__typeof"] = "Column",
+ ["append"] = '<function 23>',
+ ["clear"] = '<function 24>',
+ ["clear_fence"] = '<function 25>',
+ ["fence"] = '<function 26>',
+ ["prepend"] = '<function 27>',
+ ["preppend"] = '<function 27>',
+ ["set"] = '<function 28>',
+ ['<metatable>'] = {
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 4>',
+ ["__tostring"] = '<function 30>',
+ ["__typeof"] = "Column"
+ }
+ },
+ ["DATA_DIR"] = '<filtered>',
+ ["DESEGMENT_ONE_MORE_SEGMENT"] = 268435455,
+ ["Dir"] = {
+ ["__typeof"] = "Dir",
+ ["close"] = '<function 31>',
+ ["exists"] = '<function 32>',
+ ["global_config_path"] = '<function 33>',
+ ["global_plugins_path"] = '<function 34>',
+ ["make"] = '<function 35>',
+ ["open"] = '<function 36>',
+ ["personal_config_path"] = '<function 37>',
+ ["personal_plugins_path"] = '<function 38>',
+ ["remove"] = '<function 39>',
+ ["remove_all"] = '<function 40>',
+ ['<metatable>'] = {
+ ["__call"] = '<function 41>',
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 5>',
+ ["__typeof"] = "Dir"
+ }
+ },
+ ["Dissector"] = {
+ ["__typeof"] = "Dissector",
+ ["call"] = '<function 43>',
+ ["get"] = '<function 44>',
+ ["list"] = '<function 45>',
+ ['<metatable>'] = {
+ ["__call"] = '<function 46>',
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 6>',
+ ["__tostring"] = '<function 48>',
+ ["__typeof"] = "Dissector"
+ }
+ },
+ ["DissectorTable"] = {
+ ["__typeof"] = "DissectorTable",
+ ["add"] = '<function 49>',
+ ["get"] = '<function 50>',
+ ["get_dissector"] = '<function 51>',
+ ["heuristic_list"] = '<function 52>',
+ ["list"] = '<function 53>',
+ ["new"] = '<function 54>',
+ ["remove"] = '<function 55>',
+ ["remove_all"] = '<function 56>',
+ ["set"] = '<function 57>',
+ ["try"] = '<function 58>',
+ ['<metatable>'] = {
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 7>',
+ ["__tostring"] = '<function 60>',
+ ["__typeof"] = "DissectorTable"
+ }
+ },
+ ["Dumper"] = {
+ ["__typeof"] = "Dumper",
+ ["close"] = '<function 61>',
+ ["dump"] = '<function 62>',
+ ["dump_current"] = '<function 63>',
+ ["flush"] = '<function 64>',
+ ["new"] = '<function 65>',
+ ["new_for_current"] = '<function 66>',
+ ['<metatable>'] = {
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 8>',
+ ["__typeof"] = "Dumper"
+ }
+ },
+ ["ENC_3GPP_TS_23_038_7BITS"] = 44,
+ ["ENC_ASCII"] = 0,
+ ["ENC_ASCII_7BITS"] = 52,
+ ["ENC_BIG_ENDIAN"] = 0,
+ ["ENC_CHARENCODING_MASK"] = 2147483646,
+ ["ENC_CP437"] = 50,
+ ["ENC_EBCDIC"] = 46,
+ ["ENC_ISO_8601_DATE"] = 65536,
+ ["ENC_ISO_8601_DATE_TIME"] = 196608,
+ ["ENC_ISO_8601_DATE_TIME_BASIC"] = 1048576,
+ ["ENC_ISO_8601_TIME"] = 131072,
+ ["ENC_ISO_8859_1"] = 10,
+ ["ENC_ISO_8859_10"] = 28,
+ ["ENC_ISO_8859_11"] = 30,
+ ["ENC_ISO_8859_13"] = 34,
+ ["ENC_ISO_8859_14"] = 36,
+ ["ENC_ISO_8859_15"] = 38,
+ ["ENC_ISO_8859_16"] = 40,
+ ["ENC_ISO_8859_2"] = 12,
+ ["ENC_ISO_8859_3"] = 14,
+ ["ENC_ISO_8859_4"] = 16,
+ ["ENC_ISO_8859_5"] = 18,
+ ["ENC_ISO_8859_6"] = 20,
+ ["ENC_ISO_8859_7"] = 22,
+ ["ENC_ISO_8859_8"] = 24,
+ ["ENC_ISO_8859_9"] = 26,
+ ["ENC_LITTLE_ENDIAN"] = 2147483648,
+ ["ENC_MAC_ROMAN"] = 48,
+ ["ENC_NA"] = 0,
+ ["ENC_NUM_PREF"] = 2097152,
+ ["ENC_RFC_1123"] = 524288,
+ ["ENC_RFC_822"] = 262144,
+ ["ENC_SEP_COLON"] = 131072,
+ ["ENC_SEP_DASH"] = 262144,
+ ["ENC_SEP_DOT"] = 524288,
+ ["ENC_SEP_MASK"] = 2031616,
+ ["ENC_SEP_NONE"] = 65536,
+ ["ENC_SEP_SPACE"] = 1048576,
+ ["ENC_STRING"] = 50331648,
+ ["ENC_STR_HEX"] = 33554432,
+ ["ENC_STR_MASK"] = 65534,
+ ["ENC_STR_NUM"] = 16777216,
+ ["ENC_STR_TIME_MASK"] = 983040,
+ ["ENC_TIME_NTP"] = 2,
+ ["ENC_TIME_TIMESPEC"] = 0,
+ ["ENC_TIME_TOD"] = 4,
+ ["ENC_UCS_2"] = 6,
+ ["ENC_UCS_4"] = 8,
+ ["ENC_UTF_16"] = 4,
+ ["ENC_UTF_8"] = 2,
+ ["ENC_WINDOWS_1250"] = 42,
+ ["Field"] = {
+ ["__typeof"] = "Field",
+ ["list"] = '<function 68>',
+ ["new"] = '<function 69>',
+ ['<metatable>'] = {
+ ["__call"] = '<function 70>',
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 9>',
+ ["__tostring"] = '<function 72>',
+ ["__typeof"] = "Field"
+ }
+ },
+ ["File"] = {
+ ["__typeof"] = "File",
+ ["lines"] = '<function 73>',
+ ["read"] = '<function 74>',
+ ["seek"] = '<function 75>',
+ ["write"] = '<function 76>',
+ ['<metatable>'] = {
+ ["__getters"] = {
+ ["__typeof"] = "getter",
+ ["compressed"] = '<function 78>'
+ },
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 10>',
+ ["__newindex"] = '<function 79>',
+ ["__setters"] = {
+ ["__typeof"] = "setter"
+ },
+ ["__tostring"] = '<function 80>',
+ ["__typeof"] = "File"
+ }
+ },
+ ["FileHandler"] = {
+ ["__typeof"] = "FileHandler",
+ ["new"] = '<function 81>',
+ ['<metatable>'] = {
+ ["__getters"] = {
+ ["__typeof"] = "getter",
+ ["extensions"] = '<function 83>',
+ ["supported_comment_types"] = '<function 84>',
+ ["type"] = '<function 85>',
+ ["writes_name_resolution"] = '<function 86>',
+ ["writing_must_seek"] = '<function 87>'
+ },
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 11>',
+ ["__newindex"] = '<function 88>',
+ ["__setters"] = {
+ ["__typeof"] = "setter",
+ ["can_write_encap"] = '<function 89>',
+ ["extensions"] = '<function 90>',
+ ["read"] = '<function 91>',
+ ["read_close"] = '<function 92>',
+ ["read_open"] = '<function 93>',
+ ["seek_read"] = '<function 94>',
+ ["seq_read_close"] = '<function 95>',
+ ["supported_comment_types"] = '<function 96>',
+ ["write"] = '<function 97>',
+ ["write_close"] = '<function 98>',
+ ["write_open"] = '<function 99>',
+ ["writes_name_resolution"] = '<function 100>',
+ ["writing_must_seek"] = '<function 101>'
+ },
+ ["__tostring"] = '<function 102>',
+ ["__typeof"] = "FileHandler"
+ }
+ },
+ ["FrameInfo"] = {
+ ["__typeof"] = "FrameInfo",
+ ["read_data"] = '<function 103>',
+ ['<metatable>'] = {
+ ["__getters"] = {
+ ["__typeof"] = "getter",
+ ["captured_length"] = '<function 105>',
+ ["comment"] = '<function 106>',
+ ["data"] = '<function 107>',
+ ["encap"] = '<function 108>',
+ ["flags"] = '<function 109>',
+ ["original_length"] = '<function 110>',
+ ["rec_type"] = '<function 111>',
+ ["time"] = '<function 112>'
+ },
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 12>',
+ ["__newindex"] = '<function 113>',
+ ["__setters"] = {
+ ["__typeof"] = "setter",
+ ["captured_length"] = '<function 114>',
+ ["comment"] = '<function 115>',
+ ["data"] = '<function 116>',
+ ["encap"] = '<function 117>',
+ ["flags"] = '<function 118>',
+ ["original_length"] = '<function 119>',
+ ["rec_type"] = '<function 120>',
+ ["time"] = '<function 121>'
+ },
+ ["__tostring"] = '<function 122>',
+ ["__typeof"] = "FrameInfo"
+ }
+ },
+ ["FrameInfoConst"] = {
+ ["__typeof"] = "FrameInfoConst",
+ ["write_data"] = '<function 123>',
+ ['<metatable>'] = {
+ ["__getters"] = {
+ ["__typeof"] = "getter",
+ ["captured_length"] = '<function 125>',
+ ["comment"] = '<function 126>',
+ ["data"] = '<function 127>',
+ ["encap"] = '<function 128>',
+ ["flags"] = '<function 129>',
+ ["original_length"] = '<function 130>',
+ ["rec_type"] = '<function 131>',
+ ["time"] = '<function 132>'
+ },
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 13>',
+ ["__newindex"] = '<function 133>',
+ ["__setters"] = {
+ ["__typeof"] = "setter"
+ },
+ ["__tostring"] = '<function 134>',
+ ["__typeof"] = "FrameInfoConst"
+ }
+ },
+ ["GUI_ENABLED"] = false,
+ ["H225_ALERTING"] = 3,
+ ["H225_CALL_PROCEDING"] = 1,
+ ["H225_CONNECT"] = 2,
+ ["H225_CS"] = 1,
+ ["H225_EMPTY"] = 8,
+ ["H225_FACILITY"] = 6,
+ ["H225_INFORMATION"] = 4,
+ ["H225_NOTIFY"] = 12,
+ ["H225_OTHER"] = 13,
+ ["H225_OTHERS"] = 2,
+ ["H225_PROGRESS"] = 7,
+ ["H225_RAS"] = 0,
+ ["H225_RELEASE_COMPLET"] = 5,
+ ["H225_SETUP"] = 0,
+ ["H225_SETUP_ACK"] = 11,
+ ["H225_STATUS"] = 9,
+ ["H225_STATUS_INQUIRY"] = 10,
+ ["Int64"] = {
+ ["__typeof"] = "Int64",
+ ["arshift"] = '<function 145>',
+ ["band"] = '<function 146>',
+ ["bnot"] = '<function 147>',
+ ["bor"] = '<function 148>',
+ ["bswap"] = '<function 149>',
+ ["bxor"] = '<function 150>',
+ ["decode"] = '<function 151>',
+ ["encode"] = '<function 152>',
+ ["fromhex"] = '<function 153>',
+ ["higher"] = '<function 154>',
+ ["lower"] = '<function 155>',
+ ["lshift"] = '<function 156>',
+ ["max"] = '<function 157>',
+ ["min"] = '<function 158>',
+ ["new"] = '<function 159>',
+ ["rol"] = '<function 160>',
+ ["ror"] = '<function 161>',
+ ["rshift"] = '<function 162>',
+ ["tohex"] = '<function 163>',
+ ["tonumber"] = '<function 164>',
+ ['<metatable>'] = {
+ ["__add"] = '<function 165>',
+ ["__call"] = '<function 166>',
+ ["__concat"] = '<function 167>',
+ ["__div"] = '<function 168>',
+ ["__eq"] = '<function 169>',
+ ["__index"] = '<filtered>',
+ ["__le"] = '<function 171>',
+ ["__lt"] = '<function 172>',
+ ["__methods"] = '<table 14>',
+ ["__mod"] = '<function 173>',
+ ["__mul"] = '<function 174>',
+ ["__pow"] = '<function 175>',
+ ["__sub"] = '<function 176>',
+ ["__tostring"] = '<function 177>',
+ ["__typeof"] = "Int64",
+ ["__unm"] = '<function 178>'
+ }
+ },
+ ["Listener"] = {
+ ["__typeof"] = "Listener",
+ ["list"] = '<function 179>',
+ ["new"] = '<function 180>',
+ ["remove"] = '<function 181>',
+ ['<metatable>'] = {
+ ["__getters"] = {
+ ["__typeof"] = "getter"
+ },
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 15>',
+ ["__newindex"] = '<function 183>',
+ ["__setters"] = {
+ ["__typeof"] = "setter",
+ ["draw"] = '<function 184>',
+ ["packet"] = '<function 185>',
+ ["reset"] = '<function 186>'
+ },
+ ["__tostring"] = '<function 187>',
+ ["__typeof"] = "Listener"
+ }
+ },
+ ["MENU_PACKET_ANALYZE_UNSORTED"] = 0,
+ ["MENU_ANALYZE_CONVERSATION_FILTER"] = 1,
+ ["MENU_STAT_UNSORTED"] = 2,
+ ["MENU_STAT_GENERIC"] = 3,
+ ["MENU_STAT_CONVERSATION_LIST"] = 4,
+ ["MENU_STAT_ENDPOINT_LIST"] = 5,
+ ["MENU_STAT_RESPONSE_TIME"] = 6,
+ ["MENU_STAT_RSERPOOL"] = 7,
+ ["MENU_STAT_TELEPHONY"] = 8,
+ ["MENU_STAT_TELEPHONY_ANSI"] = 9,
+ ["MENU_STAT_TELEPHONY_GSM"] = 10,
+ ["MENU_STAT_TELEPHONY_LTE"] = 11,
+ ["MENU_STAT_TELEPHONY_MTP3"] = 12,
+ ["MENU_STAT_TELEPHONY_SCTP"] = 13,
+ ["MENU_TOOLS_UNSORTED"] = 14,
+ ["MENU_LOG_ANALYZE_UNSORTED"] = 15,
+ ["MENU_LOG_STAT_UNSORTED"] = 16,
+ ["NSTime"] = '<filtered>',
+ ["PI_ASSUMPTION"] = 218103808,
+ ["PI_CHAT"] = 2097152,
+ ["PI_CHECKSUM"] = 16777216,
+ ["PI_COMMENT"] = 1048576,
+ ["PI_COMMENTS_GROUP"] = 184549376,
+ ["PI_DEBUG"] = 134217728,
+ ["PI_DECRYPTION"] = 201326592,
+ ["PI_DEPRECATED"] = 234881024,
+ ["PI_ERROR"] = 8388608,
+ ["PI_GROUP_MASK"] = 4278190080,
+ ["PI_MALFORMED"] = 117440512,
+ ["PI_NOTE"] = 4194304,
+ ["PI_PROTOCOL"] = 150994944,
+ ["PI_REASSEMBLE"] = 100663296,
+ ["PI_REQUEST_CODE"] = 67108864,
+ ["PI_RESPONSE_CODE"] = 50331648,
+ ["PI_SECURITY"] = 167772160,
+ ["PI_SEQUENCE"] = 33554432,
+ ["PI_SEVERITY_MASK"] = 15728640,
+ ["PI_UNDECODED"] = 83886080,
+ ["PI_WARN"] = 6291456,
+ ["Pref"] = {
+ ["__typeof"] = "Pref",
+ ["bool"] = '<function 188>',
+ ["enum"] = '<function 189>',
+ ["range"] = '<function 190>',
+ ["statictext"] = '<function 191>',
+ ["string"] = '<function 192>',
+ ["uint"] = '<function 193>',
+ ['<metatable>'] = {
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 16>',
+ ["__typeof"] = "Pref"
+ }
+ },
+ ["ProgDlg"] = {
+ ["__typeof"] = "ProgDlg",
+ ["close"] = '<function 195>',
+ ["new"] = '<function 196>',
+ ["stopped"] = '<function 197>',
+ ["update"] = '<function 198>',
+ ['<metatable>'] = {
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 17>',
+ ["__tostring"] = '<function 200>',
+ ["__typeof"] = "ProgDlg"
+ }
+ },
+ ["Proto"] = '<filtered>',
+ ["ProtoExpert"] = {
+ ["__typeof"] = "ProtoExpert",
+ ["new"] = '<function 201>',
+ ['<metatable>'] = {
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 18>',
+ ["__tostring"] = '<function 203>',
+ ["__typeof"] = "ProtoExpert"
+ }
+ },
+ ["ProtoField"] = {
+ ["__typeof"] = "ProtoField",
+ ["absolute_time"] = '<function 204>',
+ ["bool"] = '<function 205>',
+ ["bytes"] = '<function 206>',
+ ["double"] = '<function 207>',
+ ["ether"] = '<function 208>',
+ ["float"] = '<function 209>',
+ ["framenum"] = '<function 210>',
+ ["guid"] = '<function 211>',
+ ["int16"] = '<function 212>',
+ ["int24"] = '<function 213>',
+ ["int32"] = '<function 214>',
+ ["int64"] = '<function 215>',
+ ["int8"] = '<function 216>',
+ ["ipv4"] = '<function 217>',
+ ["ipv6"] = '<function 218>',
+ ["ipx"] = '<function 219>',
+ ["new"] = '<function 220>',
+ ["oid"] = '<function 221>',
+ ["rel_oid"] = '<function 222>',
+ ["relative_time"] = '<function 223>',
+ ["string"] = '<function 224>',
+ ["stringz"] = '<function 225>',
+ ["systemid"] = '<function 226>',
+ ["ubytes"] = '<function 227>',
+ ["uint16"] = '<function 228>',
+ ["uint24"] = '<function 229>',
+ ["uint32"] = '<function 230>',
+ ["uint64"] = '<function 231>',
+ ["uint8"] = '<function 232>',
+ ['<metatable>'] = {
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 19>',
+ ["__tostring"] = '<function 234>',
+ ["__typeof"] = "ProtoField"
+ }
+ },
+ ["PseudoHeader"] = {
+ ["__typeof"] = "PseudoHeader",
+ ["atm"] = '<function 235>',
+ ["eth"] = '<function 236>',
+ ["mtp2"] = '<function 237>',
+ ["none"] = '<function 238>',
+ ['<metatable>'] = {
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 20>',
+ ["__typeof"] = "PseudoHeader"
+ }
+ },
+ ["Struct"] = {
+ ["__typeof"] = "Struct",
+ ["fromhex"] = '<function 240>',
+ ["pack"] = '<function 241>',
+ ["size"] = '<function 242>',
+ ["tohex"] = '<function 243>',
+ ["unpack"] = '<function 244>',
+ ["values"] = '<function 245>',
+ ['<metatable>'] = {
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 21>',
+ ["__typeof"] = "Struct"
+ }
+ },
+ ["TextWindow"] = {
+ ["__typeof"] = "TextWindow",
+ ["add_button"] = '<function 247>',
+ ["append"] = '<function 248>',
+ ["clear"] = '<function 249>',
+ ["get_text"] = '<function 250>',
+ ["new"] = '<function 251>',
+ ["prepend"] = '<function 252>',
+ ["set"] = '<function 253>',
+ ["set_atclose"] = '<function 254>',
+ ["set_editable"] = '<function 255>',
+ ['<metatable>'] = {
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 22>',
+ ["__tostring"] = '<function 250>',
+ ["__typeof"] = "TextWindow"
+ }
+ },
+ ["TreeItem"] = {
+ ["__typeof"] = "TreeItem",
+ ["add"] = '<function 257>',
+ ["add_expert_info"] = '<function 258>',
+ ["add_le"] = '<function 259>',
+ ["add_packet_field"] = '<function 260>',
+ ["add_proto_expert_info"] = '<function 261>',
+ ["add_tvb_expert_info"] = '<function 262>',
+ ["append_text"] = '<function 263>',
+ ["prepend_text"] = '<function 264>',
+ ["set_generated"] = '<function 265>',
+ ["set_hidden"] = '<function 266>',
+ ["set_len"] = '<function 267>',
+ ["set_text"] = '<function 268>',
+ ['<metatable>'] = {
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 23>',
+ ["__typeof"] = "TreeItem"
+ }
+ },
+ ["Tvb"] = {
+ ["__typeof"] = "Tvb",
+ ["len"] = '<function 270>',
+ ["offset"] = '<function 271>',
+ ["range"] = '<function 272>',
+ ["raw"] = '<function 273>',
+ ["reported_len"] = '<function 274>',
+ ["reported_length_remaining"] = '<function 275>',
+ ['<metatable>'] = {
+ ["__call"] = '<function 272>',
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 24>',
+ ["__tostring"] = '<function 277>',
+ ["__typeof"] = "Tvb"
+ }
+ },
+ ["TvbRange"] = {
+ ["__typeof"] = "TvbRange",
+ ["bitfield"] = '<function 278>',
+ ["bytes"] = '<function 279>',
+ ["ether"] = '<function 280>',
+ ["float"] = '<function 281>',
+ ["int"] = '<function 282>',
+ ["int64"] = '<function 283>',
+ ["ipv4"] = '<function 284>',
+ ["le_float"] = '<function 285>',
+ ["le_int"] = '<function 286>',
+ ["le_int64"] = '<function 287>',
+ ["le_ipv4"] = '<function 288>',
+ ["le_nstime"] = '<function 289>',
+ ["le_uint"] = '<function 290>',
+ ["le_uint64"] = '<function 291>',
+ ["le_ustring"] = '<function 292>',
+ ["le_ustringz"] = '<function 293>',
+ ["len"] = '<function 294>',
+ ["nstime"] = '<function 295>',
+ ["offset"] = '<function 296>',
+ ["range"] = '<function 297>',
+ ["raw"] = '<function 298>',
+ ["string"] = '<function 299>',
+ ["stringz"] = '<function 300>',
+ ["strsize"] = '<function 301>',
+ ["tvb"] = '<function 302>',
+ ["uint"] = '<function 303>',
+ ["uint64"] = '<function 304>',
+ ["uncompress"] = '<function 305>',
+ ["ustring"] = '<function 306>',
+ ["ustringz"] = '<function 307>',
+ ['<metatable>'] = {
+ ["__call"] = '<function 297>',
+ ["__concat"] = '<function 167>',
+ ["__index"] = '<filtered>',
+ ["__methods"] = '<table 25>',
+ ["__tostring"] = '<function 309>',
+ ["__typeof"] = "TvbRange"
+ }
+ },
+ ["UInt64"] = {
+ ["__typeof"] = "UInt64",
+ ["arshift"] = '<function 310>',
+ ["band"] = '<function 311>',
+ ["bnot"] = '<function 312>',
+ ["bor"] = '<function 313>',
+ ["bswap"] = '<function 314>',
+ ["bxor"] = '<function 315>',
+ ["decode"] = '<function 316>',
+ ["encode"] = '<function 317>',
+ ["fromhex"] = '<function 318>',
+ ["higher"] = '<function 319>',
+ ["lower"] = '<function 320>',
+ ["lshift"] = '<function 321>',
+ ["max"] = '<function 322>',
+ ["min"] = '<function 323>',
+ ["new"] = '<function 324>',
+ ["rol"] = '<function 325>',
+ ["ror"] = '<function 326>',
+ ["rshift"] = '<function 327>',
+ ["tohex"] = '<function 328>',
+ ["tonumber"] = '<function 329>',
+ ['<metatable>'] = {
+ ["__add"] = '<function 330>',
+ ["__call"] = '<function 331>',
+ ["__concat"] = '<function 167>',
+ ["__div"] = '<function 332>',
+ ["__eq"] = '<function 333>',
+ ["__index"] = '<filtered>',
+ ["__le"] = '<function 335>',
+ ["__lt"] = '<function 336>',
+ ["__methods"] = '<table 26>',
+ ["__mod"] = '<function 337>',
+ ["__mul"] = '<function 338>',
+ ["__pow"] = '<function 339>',
+ ["__sub"] = '<function 340>',
+ ["__tostring"] = '<function 341>',
+ ["__typeof"] = "UInt64",
+ ["__unm"] = '<function 342>'
+ }
+ },
+ ["USER_DIR"] = '<filtered>',
+ ["_G"] = '<table 1>',
+ ["_VERSION"] = '<filtered>',
+ ["all_field_infos"] = '<function 343>',
+ ["apply_filter"] = '<function 344>',
+ ["assert"] = '<function 345>',
+ ["base"] = {
+ ["CUSTOM"] = 6,
+ ["DEC"] = 1,
+ ["DEC_HEX"] = 4,
+ ["HEX"] = 2,
+ ["HEX_DEC"] = 5,
+ ["NONE"] = 0,
+ ["OCT"] = 3
+ },
+ ["bit"] = {
+ ["arshift"] = '<function 346>',
+ ["band"] = '<function 347>',
+ ["bnot"] = '<function 348>',
+ ["bor"] = '<function 349>',
+ ["bswap"] = '<function 350>',
+ ["bxor"] = '<function 351>',
+ ["lshift"] = '<function 352>',
+ ["rol"] = '<function 353>',
+ ["ror"] = '<function 354>',
+ ["rshift"] = '<function 355>',
+ ["tobit"] = '<function 356>',
+ ["tohex"] = '<function 357>'
+ },
+ ["bit32"] = {
+ ["arshift"] = '<function 358>',
+ ["band"] = '<function 359>',
+ ["bnot"] = '<function 360>',
+ ["bor"] = '<function 361>',
+ ["btest"] = '<function 362>',
+ ["bxor"] = '<function 363>',
+ ["extract"] = '<function 364>',
+ ["lrotate"] = '<function 365>',
+ ["lshift"] = '<function 366>',
+ ["replace"] = '<function 367>',
+ ["rrotate"] = '<function 368>',
+ ["rshift"] = '<function 369>'
+ },
+ ["browser_open_data_file"] = '<function 370>',
+ ["browser_open_url"] = '<function 371>',
+ ["collectgarbage"] = '<function 372>',
+ ["copy_to_clipboard"] = '<function 373>',
+ ["coroutine"] = {
+ ["create"] = '<function 374>',
+ ["resume"] = '<function 375>',
+ ["running"] = '<function 376>',
+ ["status"] = '<function 377>',
+ ["wrap"] = '<function 378>',
+ ["yield"] = '<function 379>'
+ },
+ ["datafile_path"] = '<function 33>',
+ ["deregister_filehandler"] = '<function 382>',
+ ["dofile"] = '<function 383>',
+ ["error"] = '<function 384>',
+ ["expert"] = {
+ ["group"] = {
+ ["CHECKSUM"] = 16777216,
+ ["COMMENTS_GROUP"] = 184549376,
+ ["DEBUG"] = 134217728,
+ ["MALFORMED"] = 117440512,
+ ["PROTOCOL"] = 150994944,
+ ["REASSEMBLE"] = 100663296,
+ ["REQUEST_CODE"] = 67108864,
+ ["RESPONSE_CODE"] = 50331648,
+ ["SECURITY"] = 167772160,
+ ["SEQUENCE"] = 33554432,
+ ["UNDECODED"] = 83886080
+ },
+ ["severity"] = {
+ ["CHAT"] = 2097152,
+ ["COMMENT"] = 1048576,
+ ["ERROR"] = 8388608,
+ ["NOTE"] = 4194304,
+ ["WARN"] = 6291456
+ }
+ },
+ ["file_exists"] = '<function 385>',
+ ["format_date"] = '<function 386>',
+ ["format_time"] = '<function 387>',
+ ["ftypes"] = {
+ ["ABSOLUTE_TIME"] = 15,
+ ["AX25"] = 31,
+ ["BOOLEAN"] = 2,
+ ["BYTES"] = 21,
+ ["DOUBLE"] = 14,
+ ["ETHER"] = 20,
+ ["EUI64"] = 30,
+ ["FLOAT"] = 13,
+ ["FRAMENUM"] = 26,
+ ["GUID"] = 28,
+ ["INT16"] = 9,
+ ["INT24"] = 10,
+ ["INT32"] = 11,
+ ["INT64"] = 12,
+ ["INT8"] = 8,
+ ["IPXNET"] = 25,
+ ["IPv4"] = 23,
+ ["IPv6"] = 24,
+ ["NONE"] = 0,
+ ["OID"] = 29,
+ ["PROTOCOL"] = 1,
+ ["RELATIVE_TIME"] = 16,
+ ["REL_OID"] = 33,
+ ["STRING"] = 17,
+ ["STRINGZ"] = 18,
+ ["STRINGZPAD"] = 35,
+ ["SYSTEM_ID"] = 34,
+ ["UINT16"] = 4,
+ ["UINT24"] = 5,
+ ["UINT32"] = 6,
+ ["UINT64"] = 7,
+ ["UINT8"] = 3,
+ ["UINT_BYTES"] = 22,
+ ["UINT_STRING"] = 19,
+ ["VINES"] = 32
+ },
+ ["get_filter"] = '<function 388>',
+ ["get_version"] = '<function 389>',
+ ["getmetatable"] = '<function 390>',
+ ["gui_enabled"] = '<function 391>',
+ ["h225_cs_type"] = {
+ [1] = "H225_CALL_PROCEDING",
+ [2] = "H225_CONNECT",
+ [3] = "H225_ALERTING",
+ [4] = "H225_INFORMATION",
+ [5] = "H225_RELEASE_COMPLET",
+ [6] = "H225_FACILITY",
+ [7] = "H225_PROGRESS",
+ [8] = "H225_EMPTY",
+ [9] = "H225_STATUS",
+ [10] = "H225_STATUS_INQUIRY",
+ [11] = "H225_SETUP_ACK",
+ [12] = "H225_NOTIFY",
+ [13] = "H225_OTHER",
+ [0] = "H225_SETUP"
+ },
+ ["h225_msg_type"] = {
+ [1] = "H225_CS",
+ [2] = "H225_OTHERS",
+ [0] = "H225_RAS"
+ },
+ ["init_routines"] = {},
+ ["io"] = {
+ ["close"] = '<function 393>',
+ ["flush"] = '<function 394>',
+ ["input"] = '<function 395>',
+ ["lines"] = '<function 396>',
+ ["open"] = '<function 397>',
+ ["output"] = '<function 398>',
+ ["popen"] = '<function 399>',
+ ["read"] = '<function 400>',
+ ["stderr"] = '<userdata 1>',
+ ["stdin"] = '<userdata 2>',
+ ["stdout"] = '<userdata 3>',
+ ["tmpfile"] = '<function 401>',
+ ["type"] = '<function 402>',
+ ["write"] = '<function 403>'
+ },
+ ["ipairs"] = '<function 404>',
+ ["load"] = '<function 405>',
+ ["loadfile"] = '<function 406>',
+ ["loadstring"] = '<function 405>',
+ ["math"] = {
+ ["abs"] = '<function 407>',
+ ["acos"] = '<function 408>',
+ ["asin"] = '<function 409>',
+ ["atan"] = '<function 410>',
+ ["atan2"] = '<function 411>',
+ ["ceil"] = '<function 412>',
+ ["cos"] = '<function 413>',
+ ["cosh"] = '<function 414>',
+ ["deg"] = '<function 415>',
+ ["exp"] = '<function 416>',
+ ["floor"] = '<function 417>',
+ ["fmod"] = '<function 418>',
+ ["frexp"] = '<function 419>',
+ ["huge"] = '<number inf>',
+ ["ldexp"] = '<function 420>',
+ ["log"] = '<function 421>',
+ ["log10"] = '<function 422>',
+ ["max"] = '<function 423>',
+ ["min"] = '<function 424>',
+ ["modf"] = '<function 425>',
+ ["pi"] = 3.1415926535898,
+ ["pow"] = '<function 426>',
+ ["rad"] = '<function 427>',
+ ["random"] = '<function 428>',
+ ["randomseed"] = '<function 429>',
+ ["sin"] = '<function 430>',
+ ["sinh"] = '<function 431>',
+ ["sqrt"] = '<function 432>',
+ ["tan"] = '<function 433>',
+ ["tanh"] = '<function 434>'
+ },
+ ["module"] = '<function 436>',
+ ["new_dialog"] = '<function 437>',
+ ["next"] = '<function 438>',
+ ["open_capture_file"] = '<function 439>',
+ ["os"] = {
+ ["clock"] = '<function 440>',
+ ["date"] = '<function 441>',
+ ["difftime"] = '<function 442>',
+ ["execute"] = '<function 443>',
+ ["exit"] = '<function 444>',
+ ["getenv"] = '<function 445>',
+ ["remove"] = '<function 446>',
+ ["rename"] = '<function 447>',
+ ["setlocale"] = '<function 448>',
+ ["time"] = '<function 449>',
+ ["tmpname"] = '<function 450>'
+ },
+ ["package"] = {
+ ["config"] = '<filtered>',
+ ["cpath"] = '<filtered>',
+ ["loaded"] = '<filtered>',
+ ["loaders"] = {
+ [1] = '<function 451>',
+ [2] = '<function 452>',
+ [3] = '<function 453>',
+ [4] = '<function 454>' },
+ ["loadlib"] = '<function 455>',
+ ["path"] = '<filtered>',
+ ["preload"] = {},
+ ["searchers"] = '<table 33>',
+ ["searchpath"] = '<function 457>',
+ ["seeall"] = '<function 458>'
+ },
+ ["pairs"] = '<function 459>',
+ ["pcall"] = '<function 460>',
+ ["persconffile_path"] = '<function 37>',
+ ["prefs_changed"] = {},
+ ["print"] = '<function 461>',
+ ["rawequal"] = '<function 462>',
+ ["rawget"] = '<function 463>',
+ ["rawlen"] = '<function 464>',
+ ["rawset"] = '<function 465>',
+ ["register_filehandler"] = '<function 466>',
+ ["register_menu"] = '<function 467>',
+ ["register_postdissector"] = '<function 468>',
+ ["register_stat_cmd_arg"] = '<function 469>',
+ ["reload"] = '<function 470>',
+ ["report_failure"] = '<function 471>',
+ ["require"] = '<function 472>',
+ ["retap_packets"] = '<function 473>',
+ ["rex_pcre2"] = {
+ ["_VERSION"] = "Lrexlib 2.9.1 (for PCRE2)"
+ },
+ ["running_superuser"] = '<filtered>',
+ ["select"] = '<function 474>',
+ ["set_color_filter_slot"] = '<function 475>',
+ ["set_filter"] = '<function 476>',
+ ["setmetatable"] = '<function 477>',
+ ["string"] = {
+ ["byte"] = '<function 478>',
+ ["char"] = '<function 479>',
+ ["dump"] = '<function 480>',
+ ["find"] = '<function 481>',
+ ["format"] = '<function 482>',
+ ["gmatch"] = '<function 483>',
+ ["gsub"] = '<function 484>',
+ ["len"] = '<function 485>',
+ ["lower"] = '<function 486>',
+ ["match"] = '<function 487>',
+ ["rep"] = '<function 488>',
+ ["reverse"] = '<function 489>',
+ ["sub"] = '<function 490>',
+ ["upper"] = '<function 491>'
+ },
+ ["table"] = {
+ ["concat"] = '<function 492>',
+ ["insert"] = '<function 493>',
+ ["maxn"] = '<function 494>',
+ ["pack"] = '<function 495>',
+ ["remove"] = '<function 496>',
+ ["sort"] = '<function 497>',
+ ["unpack"] = '<function 498>'
+ },
+ ["tonumber"] = '<function 499>',
+ ["tostring"] = '<function 500>',
+ ["type"] = '<function 501>',
+ ["typeof"] = '<function 502>',
+ ["unpack"] = '<function 498>',
+ ["wtap"] = {
+ ["APPLE_IP_OVER_IEEE1394"] = 62,
+ ["ARCNET"] = 8,
+ ["ARCNET_LINUX"] = 9,
+ ["ASCEND"] = 16,
+ ["ATM_PDUS"] = 13,
+ ["ATM_PDUS_UNTRUNCATED"] = 14,
+ ["ATM_RFC1483"] = 10,
+ ["AX25"] = 148,
+ ["AX25_KISS"] = 147,
+ ["BACNET_MS_TP"] = 63,
+ ["BACNET_MS_TP_WITH_PHDR"] = 143,
+ ["BER"] = 90,
+ ["BLUETOOTH_BREDR_BB"] = 160,
+ ["BLUETOOTH_H4"] = 41,
+ ["BLUETOOTH_H4_WITH_PHDR"] = 99,
+ ["BLUETOOTH_HCI"] = 102,
+ ["BLUETOOTH_LE_LL"] = 154,
+ ["BLUETOOTH_LE_LL_WITH_PHDR"] = 161,
+ ["BLUETOOTH_LINUX_MONITOR"] = 159,
+ ["CAN20B"] = 109,
+ ["CATAPULT_DCT2000"] = 89,
+ ["CHDLC"] = 28,
+ ["CHDLC_WITH_PHDR"] = 40,
+ ["CISCO_IOS"] = 29,
+ ["COSINE"] = 34,
+ ["DBUS"] = 146,
+ ["DOCSIS"] = 33,
+ ["DPNSS"] = 117,
+ ["DVBCI"] = 132,
+ ["ENC"] = 38,
+ ["EPON"] = 172,
+ ["ERF"] = 98,
+ ["ETHERNET"] = 1,
+ ["FDDI"] = 5,
+ ["FDDI_BITSWAPPED"] = 6,
+ ["FIBRE_CHANNEL_FC2"] = 121,
+ ["FIBRE_CHANNEL_FC2_WITH_FRAME_DELIMS"] = 122,
+ ["FLEXRAY"] = 106,
+ ["FRELAY"] = 26,
+ ["FRELAY_WITH_PHDR"] = 27,
+ ["GCOM_SERIAL"] = 78,
+ ["GCOM_TIE1"] = 77,
+ ["GPRS_LLC"] = 66,
+ ["GSM_UM"] = 116,
+ ["HHDLC"] = 32,
+ ["I2C_LINUX"] = 112,
+ ["IEEE802_15_4"] = 104,
+ ["IEEE802_15_4_NOFCS"] = 127,
+ ["IEEE802_15_4_NONASK_PHY"] = 113,
+ ["IEEE802_16_MAC_CPS"] = 93,
+ ["IEEE_802_11"] = 20,
+ ["IEEE_802_11_AVS"] = 24,
+ ["IEEE_802_11_NETMON"] = 126,
+ ["IEEE_802_11_PRISM"] = 21,
+ ["IEEE_802_11_RADIOTAP"] = 23,
+ ["IEEE_802_11_WITH_RADIO"] = 22,
+ ["INFINIBAND"] = 150,
+ ["IPMB_KONTRON"] = 103,
+ ["IPMI_TRACE"] = 173,
+ ["IPNET"] = 124,
+ ["IP_OVER_FC"] = 18,
+ ["IP_OVER_IB_PCAP"] = 180,
+ ["IP_OVER_IB_SNOOP"] = 137,
+ ["IRDA"] = 44,
+ ["ISDN"] = 17,
+ ["IXVERIWAVE"] = 144,
+ ["JPEG_JFIF"] = 123,
+ ["JUNIPER_ATM1"] = 67,
+ ["JUNIPER_ATM2"] = 68,
+ ["JUNIPER_CHDLC"] = 86,
+ ["JUNIPER_ETHER"] = 83,
+ ["JUNIPER_FRELAY"] = 85,
+ ["JUNIPER_GGSN"] = 87,
+ ["JUNIPER_MLFR"] = 82,
+ ["JUNIPER_MLPPP"] = 81,
+ ["JUNIPER_PPP"] = 84,
+ ["JUNIPER_PPPOE"] = 76,
+ ["JUNIPER_SVCS"] = 151,
+ ["JUNIPER_VP"] = 91,
+ ["K12"] = 80,
+ ["LAPB"] = 12,
+ ["LAPD"] = 131,
+ ["LAYER1_EVENT"] = 110,
+ ["LIN"] = 107,
+ ["LINUX_ATM_CLIP"] = 11,
+ ["LINUX_LAPD"] = 88,
+ ["LOCALTALK"] = 30,
+ ["LOGCAT"] = 163,
+ ["LOGCAT_BRIEF"] = 164,
+ ["LOGCAT_LONG"] = 170,
+ ["LOGCAT_PROCESS"] = 165,
+ ["LOGCAT_TAG"] = 166,
+ ["LOGCAT_THREAD"] = 167,
+ ["LOGCAT_THREADTIME"] = 169,
+ ["LOGCAT_TIME"] = 168,
+ ["MIME"] = 134,
+ ["MOST"] = 108,
+ ["MPEG"] = 96,
+ ["MPEG_2_TS"] = 138,
+ ["MTP2"] = 42,
+ ["MTP2_WITH_PHDR"] = 75,
+ ["MTP3"] = 43,
+ ["MUX27010"] = 133,
+ ["NETANALYZER"] = 135,
+ ["NETANALYZER_TRANSPARENT"] = 136,
+ ["NETLINK"] = 158,
+ ["NETTL_ETHERNET"] = 71,
+ ["NETTL_FDDI"] = 73,
+ ["NETTL_RAW_ICMP"] = 64,
+ ["NETTL_RAW_ICMPV6"] = 65,
+ ["NETTL_RAW_IP"] = 70,
+ ["NETTL_RAW_TELNET"] = 94,
+ ["NETTL_TOKEN_RING"] = 72,
+ ["NETTL_UNKNOWN"] = 74,
+ ["NETTL_X25"] = 79,
+ ["NFC_LLCP"] = 140,
+ ["NFLOG"] = 141,
+ ["NSTRACE_1_0"] = 119,
+ ["NSTRACE_2_0"] = 120,
+ ["NSTRACE_3_0"] = 162,
+ ["NULL"] = 15,
+ ["OLD_PFLOG"] = 31,
+ ["PACKETLOGGER"] = 118,
+ ["PER_PACKET"] = -1,
+ ["PFLOG"] = 39,
+ ["PKTAP"] = 171,
+ ["PPI"] = 97,
+ ["PPP"] = 4,
+ ["PPP_ETHER"] = 139,
+ ["PPP_WITH_PHDR"] = 19,
+ ["RAW_IP"] = 7,
+ ["RAW_IP4"] = 129,
+ ["RAW_IP6"] = 130,
+ ["RAW_IPFIX"] = 128,
+ ["REDBACK"] = 69,
+ ["RTAC_SERIAL"] = 153,
+ ["SCCP"] = 101,
+ ["SCTP"] = 149,
+ ["SDH"] = 145,
+ ["SDLC"] = 36,
+ ["SITA"] = 100,
+ ["SLIP"] = 3,
+ ["SLL"] = 25,
+ ["SOCKETCAN"] = 125,
+ ["STANAG_4607"] = 156,
+ ["STANAG_5066_D_PDU"] = 157,
+ ["SYMANTEC"] = 61,
+ ["TNEF"] = 114,
+ ["TOKEN_RING"] = 2,
+ ["TZSP"] = 37,
+ ["UNKNOWN"] = 0,
+ ["USB_FREEBSD"] = 92,
+ ["USBPCAP"] = 152,
+ ["USB_LINUX"] = 95,
+ ["USB_LINUX_MMAPPED"] = 115,
+ ["USER0"] = 45,
+ ["USER1"] = 46,
+ ["USER10"] = 55,
+ ["USER11"] = 56,
+ ["USER12"] = 57,
+ ["USER13"] = 58,
+ ["USER14"] = 59,
+ ["USER15"] = 60,
+ ["USER2"] = 47,
+ ["USER3"] = 48,
+ ["USER4"] = 49,
+ ["USER5"] = 50,
+ ["USER6"] = 51,
+ ["USER7"] = 52,
+ ["USER8"] = 53,
+ ["USER9"] = 54,
+ ["V5_EF"] = 142,
+ ["WFLEET_HDLC"] = 35,
+ ["WIRESHARK_UPPER_PDU"] = 156,
+ ["X2E_SERIAL"] = 111,
+ ["X2E_XORAYA"] = 105
+ },
+ ["wtap_comments"] = {
+ ["PER_INTERFACE"] = 2,
+ ["PER_PACKET"] = 4,
+ ["PER_SECTION"] = 1
+ },
+ ["wtap_encaps"] = '<table 36>',
+ ["wtap_filetypes"] = {
+ ["5VIEWS"] = 9,
+ ["AETHRA"] = 60,
+ ["ASCEND"] = 26,
+ ["BER"] = 12,
+ ["BTSNOOP"] = 50,
+ ["CAMINS"] = 64,
+ ["CATAPULT_DCT2000"] = 14,
+ ["COMMVIEW"] = 49,
+ ["COSINE"] = 17,
+ ["CSIDS"] = 18,
+ ["DAINTREE_SNA"] = 54,
+ ["DBS_ETHERWATCH"] = 19,
+ ["DCT3TRACE"] = 52,
+ ["ERF"] = 20,
+ ["EYESDN"] = 21,
+ ["HCIDUMP"] = 13,
+ ["I4BTRACE"] = 25,
+ ["IPFIX"] = 58,
+ ["IPTRACE_1_0"] = 10,
+ ["IPTRACE_2_0"] = 11,
+ ["ISERIES"] = 23,
+ ["ISERIES_UNICODE"] = 24,
+ ["JPEG_JFIF"] = 57,
+ ["K12"] = 40,
+ ["K12TEXT"] = 47,
+ ["LANALYZER"] = 34,
+ ["LOGCAT"] = 67,
+ ["LOGCAT_BRIEF"] = 68,
+ ["LOGCAT_LONG"] = 74,
+ ["LOGCAT_PROCESS"] = 69,
+ ["LOGCAT_TAG"] = 70,
+ ["LOGCAT_THREAD"] = 71,
+ ["LOGCAT_THREADTIME"] = 73,
+ ["LOGCAT_TIME"] = 72,
+ ["MIME"] = 59,
+ ["MPEG"] = 46,
+ ["MPEG_2_TS"] = 61,
+ ["NETSCALER_1_0"] = 55,
+ ["NETSCALER_2_0"] = 56,
+ ["NETSCALER_3_0"] = 66,
+ ["NETSCREEN"] = 48,
+ ["NETTL"] = 22,
+ ["NETWORK_INSTRUMENTS"] = 33,
+ ["NETXRAY_1_0"] = 16,
+ ["NETXRAY_1_1"] = 31,
+ ["NETXRAY_OLD"] = 15,
+ ["NGSNIFFER_COMPRESSED"] = 30,
+ ["NGSNIFFER_UNCOMPRESSED"] = 29,
+ ["PACKETLOGGER"] = 53,
+ ["PCAP"] = 1,
+ ["PCAPNG"] = 2,
+ ["PCAP_AIX"] = 4,
+ ["PCAP_NOKIA"] = 6,
+ ["PCAP_NSEC"] = 3,
+ ["PCAP_SS990417"] = 7,
+ ["PCAP_SS990915"] = 8,
+ ["PCAP_SS991029"] = 5,
+ ["PEEKCLASSIC_V56"] = 43,
+ ["PEEKCLASSIC_V7"] = 44,
+ ["PEEKTAGGED"] = 45,
+ ["PPPDUMP"] = 35,
+ ["RADCOM"] = 36,
+ ["SHOMITI"] = 38,
+ ["SNOOP"] = 37,
+ ["STANAG_4607"] = 65,
+ ["TNEF"] = 51,
+ ["TOSHIBA"] = 41,
+ ["TSPREC_CSEC"] = 2,
+ ["TSPREC_DSEC"] = 1,
+ ["TSPREC_MSEC"] = 3,
+ ["TSPREC_NSEC"] = 9,
+ ["TSPREC_SEC"] = 0,
+ ["TSPREC_USEC"] = 6,
+ ["UNKNOWN"] = 0,
+ ["VISUAL_NETWORKS"] = 42,
+ ["VMS"] = 39,
+ ["VWR_80211"] = 62,
+ ["VWR_ETH"] = 63
+ },
+ ["wtap_presence_flags"] = {
+ ["CAP_LEN"] = 2,
+ ["INTERFACE_ID"] = 4,
+ ["TS"] = 1
+ },
+ ["wtap_rec_types"] = {
+ ["FT_SPECIFIC_EVENT"] = 1,
+ ["FT_SPECIFIC_REPORT"] = 2,
+ ["PACKET"] = 0
+ },
+ ["xpcall"] = '<function 504>'
+}
diff --git a/test/lua/inspect.lua b/test/lua/inspect.lua
new file mode 100644
index 0000000..6b4aff9
--- /dev/null
+++ b/test/lua/inspect.lua
@@ -0,0 +1,715 @@
+-------------------------------------------------------------------
+-- This was changed for Wireshark's use by Hadriel Kaplan.
+--
+-- Changes made:
+-- * provided 'serialize' option to output serialized info (ie, can be marshaled),
+-- though note that serializing functions/metatables/userdata/threads will not
+-- magically make them be their original type when marshaled.
+-- * provided 'notostring' option, which if true will disabled calling __tostring
+-- metamethod of tables.
+-- * made it always print the index number of numbered-array entries, and on separate
+-- lines like the normal key'd entries (much easier to read this way I think)
+-- New public functions:
+-- inspect.compare(first,second[,options])
+-- inspect.marshal(inString[,options])
+-- inspect.makeFilter(arrayTable)
+--
+-- For the *changes*:
+-- Copyright (c) 2014, Hadriel Kaplan
+-- My change to the code is in the Public Domain, or the BSD (3 clause) license if
+-- Public Domain does not apply in your country, or you would prefer a BSD license.
+-- But the original code is still under Enrique García Cota's MIT license (below).
+-------------------------------------------------------------------
+
+local inspect ={
+ _VERSION = 'inspect.lua 2.0.0 - with changes',
+ _URL = 'http://github.com/kikito/inspect.lua',
+ _DESCRIPTION = 'human-readable representations of tables',
+ _LICENSE = [[
+ MIT LICENSE
+
+ Copyright (c) 2013 Enrique García Cota
+
+ Permission is hereby granted, free of charge, to any person obtaining a
+ copy of this software and associated documentation files (the
+ "Software"), to deal in the Software without restriction, including
+ without limitation the rights to use, copy, modify, merge, publish,
+ distribute, sublicense, and/or sell copies of the Software, and to
+ permit persons to whom the Software is furnished to do so, subject to
+ the following conditions:
+
+ The above copyright notice and this permission notice shall be included
+ in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+ OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+ IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+ CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
+ TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
+ SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+ ]],
+ _TINDEX_KEY = '<index>', -- the key name to use for index number entries for tables
+ _DEPTH_MARKER = " ['<depth>'] = true " -- instead of printing '...' we print this
+}
+
+-- Apostrophizes the string if it has quotes, but not apostrophes
+-- Otherwise, it returns a regular quoted string
+local function smartQuote(str)
+ if str:match('"') and not str:match("'") then
+ return "'" .. str .. "'"
+ end
+ return '"' .. str:gsub('"', '\\"') .. '"'
+end
+
+local controlCharsTranslation = {
+ ["\a"] = "\\a", ["\b"] = "\\b", ["\f"] = "\\f", ["\n"] = "\\n",
+ ["\r"] = "\\r", ["\t"] = "\\t", ["\v"] = "\\v"
+}
+
+local function escapeChar(c) return controlCharsTranslation[c] end
+
+local function escape(str)
+ local result = str:gsub("\\", "\\\\"):gsub("(%c)", escapeChar)
+ return result
+end
+
+local function isIdentifier(str)
+ return type(str) == 'string' and str:match( "^[_%a][_%a%d]*$" )
+end
+
+local function isArrayKey(k, length)
+ return type(k) == 'number' and 1 <= k and k <= length
+end
+
+local function isDictionaryKey(k, length)
+ return not isArrayKey(k, length)
+end
+
+local defaultTypeOrders = {
+ ['number'] = 1, ['boolean'] = 2, ['string'] = 3, ['table'] = 4,
+ ['function'] = 5, ['userdata'] = 6, ['thread'] = 7
+}
+
+local function sortKeys(a, b)
+ local ta, tb = type(a), type(b)
+
+ -- strings and numbers are sorted numerically/alphabetically
+ if ta == tb and (ta == 'string' or ta == 'number') then return a < b end
+
+ local dta, dtb = defaultTypeOrders[ta], defaultTypeOrders[tb]
+ -- Two default types are compared according to the defaultTypeOrders table
+ if dta and dtb then return defaultTypeOrders[ta] < defaultTypeOrders[tb]
+ elseif dta then return true -- default types before custom ones
+ elseif dtb then return false -- custom types after default ones
+ end
+
+ -- custom types are sorted out alphabetically
+ return ta < tb
+end
+
+local function getDictionaryKeys(t)
+ local keys, length = {}, #t
+ for k,_ in pairs(t) do
+ if isDictionaryKey(k, length) then table.insert(keys, k) end
+ end
+ table.sort(keys, sortKeys)
+ return keys
+end
+
+local function getToStringResultSafely(t, mt)
+ local __tostring = type(mt) == 'table' and rawget(mt, '__tostring')
+ local str, ok
+ if type(__tostring) == 'function' then
+ ok, str = pcall(__tostring, t)
+ str = ok and str or 'error: ' .. tostring(str)
+ end
+ if type(str) == 'string' and #str > 0 then return str end
+end
+
+local maxIdsMetaTable = {
+ __index = function(self, typeName)
+ rawset(self, typeName, 0)
+ return 0
+ end
+}
+
+local idsMetaTable = {
+ __index = function (self, typeName)
+ local col = setmetatable({}, {__mode = "kv"})
+ rawset(self, typeName, col)
+ return col
+ end
+}
+
+local function countTableAppearances(t, tableAppearances)
+ tableAppearances = tableAppearances or setmetatable({}, {__mode = "k"})
+
+ if type(t) == 'table' then
+ if not tableAppearances[t] then
+ tableAppearances[t] = 1
+ for k,v in pairs(t) do
+ countTableAppearances(k, tableAppearances)
+ countTableAppearances(v, tableAppearances)
+ end
+ countTableAppearances(getmetatable(t), tableAppearances)
+ else
+ tableAppearances[t] = tableAppearances[t] + 1
+ end
+ end
+
+ return tableAppearances
+end
+
+local function parse_filter(filter)
+ if type(filter) == 'function' then return filter end
+ -- not a function, so it must be a table or table-like
+ filter = type(filter) == 'table' and filter or {filter}
+ local dictionary = {}
+ for _,v in pairs(filter) do dictionary[v] = true end
+ return function(x) return dictionary[x] end
+end
+
+local function makePath(path, key)
+ local newPath, len = {}, #path
+ for i=1, len do newPath[i] = path[i] end
+ newPath[len+1] = key
+ return newPath
+end
+
+-------------------------------------------------------------------
+function inspect.inspect(rootObject, options)
+ options = options or {}
+ local depth = options.depth or math.huge
+ local filter = parse_filter(options.filter or {})
+ local serialize = options.serialize
+
+ local depth_marker = inspect._DEPTH_MARKER
+
+ local tableAppearances = countTableAppearances(rootObject)
+
+ local buffer = {}
+ local maxIds = setmetatable({}, maxIdsMetaTable)
+ local ids = setmetatable({}, idsMetaTable)
+ local level = 0
+ local blen = 0 -- buffer length
+
+ local function puts(...)
+ local args = {...}
+ for i=1, #args do
+ blen = blen + 1
+ buffer[blen] = tostring(args[i])
+ end
+ end
+
+ -- like puts above, but for things we want as quoted strings
+ -- so they become values, as we do if serializing
+ local function putv(...)
+ blen = blen + 1
+ buffer[blen] = "'"
+ puts(...)
+ blen = blen + 1
+ buffer[blen] = "'"
+ end
+
+ -- if serializing, using raw strings is unsafe, so we use the full "['key']" style
+ local function putk(...)
+ blen = blen + 1
+ buffer[blen] = "['"
+ puts(...)
+ blen = blen + 1
+ buffer[blen] = "']"
+ end
+
+ -- if not serializing, it's all puts
+ if not serialize then
+ putv = puts
+ putk = puts
+ depth_marker = '...'
+ end
+
+ -- disable using __tostring metamethod
+ local getToStringResultSafely = getToStringResultSafely
+ if options.notostring or serialize then
+ getToStringResultSafely = function() return end
+ end
+
+ local function down(f)
+ level = level + 1
+ f()
+ level = level - 1
+ end
+
+ local function tabify()
+ puts("\n", string.rep(" ", level))
+ end
+
+ local function commaControl(needsComma)
+ if needsComma then puts(',') end
+ return true
+ end
+
+ local function alreadyVisited(v)
+ return ids[type(v)][v] ~= nil
+ end
+
+ local function getId(v)
+ local tv = type(v)
+ local id = ids[tv][v]
+ if not id then
+ id = maxIds[tv] + 1
+ maxIds[tv] = id
+ ids[tv][v] = id
+ end
+ return id
+ end
+
+ local putValue -- forward declaration that needs to go before putTable & putKey
+
+ local function putKey(k)
+ if not serialize and isIdentifier(k) then return puts(k) end
+ puts("[")
+ putValue(k, {})
+ puts("]")
+ end
+
+ local function putTable(t, path)
+ if alreadyVisited(t) then
+ putv('<table ', getId(t), '>')
+ elseif level >= depth then
+ puts('{', depth_marker, '}')
+ else
+ if not serialize and tableAppearances[t] > 1 then puts('<', getId(t), '>') end
+
+ local dictKeys = getDictionaryKeys(t)
+ local length = #t
+ local mt = getmetatable(t)
+ local to_string_result = getToStringResultSafely(t, mt)
+
+ puts('{')
+ down(function()
+ if to_string_result then
+ puts(' -- ', escape(to_string_result))
+ if length >= 1 then tabify() end -- tabify the array values
+ end
+
+ local needsComma = false
+
+ if serialize and tableAppearances[t] > 1 then
+ getId(t)
+ end
+
+ for i=1, length do
+ needsComma = commaControl(needsComma)
+ -- just doing puts(' ') made for ugly arrays
+ tabify()
+ putKey(i)
+ puts(' = ')
+ putValue(t[i], makePath(path, i))
+ end
+
+ for _,k in ipairs(dictKeys) do
+ needsComma = commaControl(needsComma)
+ tabify()
+ putKey(k)
+ puts(' = ')
+ putValue(t[k], makePath(path, k))
+ end
+
+ if mt then
+ needsComma = commaControl(needsComma)
+ tabify()
+ putk('<metatable>')
+ puts(' = ')
+ putValue(mt, makePath(path, '<metatable>'))
+ end
+ end)
+
+ if #dictKeys > 0 or mt then -- dictionary table. Justify closing }
+ tabify()
+ elseif length > 0 then -- array tables have one extra space before closing }
+ puts(' ')
+ end
+
+ puts('}')
+ end
+
+ end
+
+ -- putvalue is forward-declared before putTable & putKey
+ putValue = function(v, path)
+ if filter(v, path) then
+ putv('<filtered>')
+ else
+ local tv = type(v)
+
+ if tv == 'string' then
+ puts(smartQuote(escape(v)))
+ elseif tv == 'number' and v == math.huge then
+ putv('<number inf>')
+ elseif tv == 'number' or tv == 'boolean' or tv == 'nil' then
+ puts(tostring(v))
+ elseif tv == 'table' then
+ putTable(v, path)
+ else
+ putv('<',tv,' ',getId(v),'>')
+ end
+ end
+ end
+
+ putValue(rootObject, {})
+
+ return table.concat(buffer)
+end
+
+setmetatable(inspect, { __call = function(_, ...) return inspect.inspect(...) end })
+
+-------------------------------------------------------------------
+
+-- The above is very close to Enrique's original inspect library.
+-- Below are my main changes.
+
+-------------------------------------------------------------------
+-- Given a string generated by inspect() with the serialize option,
+-- this function marshals it back into a Lua table/whatever.
+-- If the string's table(s) had metatable(s), i.e. "<metatable>" tables,
+-- then this keeps them as "<metatable>" subtables unless the option
+-- 'nometa' is set to true.
+--
+-- This function also removes all "<index>" entries.
+--
+function inspect.marshal(inString, options)
+ options = options or {}
+ local index = inspect._TINDEX_KEY
+
+ local function removeIndex(t)
+ if type(t) == 'table' then
+ t[index] = nil
+ for _, v in pairs(t) do
+ removeIndex(v)
+ end
+ end
+ end
+
+ local function removeMeta(t)
+ if type(t) == 'table' then
+ t['<metatable>'] = nil
+ for _, v in pairs(t) do
+ removeMeta(v)
+ end
+ end
+ end
+
+ -- first skip past comments/empty-lines
+ -- warning: super-hack-ish weak
+ local pos, ok, dk = 1, true, true
+ local fin
+ local stop = string.len(inString)
+ while ok or dk do
+ ok, fin = inString:find("^[%s\r\n]+",pos)
+ if ok then pos = fin + 1 end
+ dk, fin = inString:find("^%-%-.-\n",pos)
+ if dk then pos = fin + 1 end
+ end
+
+ if not inString:find("^%s*return[%s%{]",pos) then
+ inString = "return " .. inString
+ end
+
+ local t = assert(loadstring(inString))()
+
+ removeIndex(t)
+
+ if options.nometa then removeMeta(t) end
+
+ return t
+end
+
+-------------------------------------------------------------------
+
+-------------------------------------------------------------------
+-- more private functions
+
+-- things like '<function>' are equal to '<function 32>'
+local mungetypes = {
+ {"^<function ?%d*>", '<function>'},
+ {"^<table ?%d*>", '<table>'},
+ {"^<userdata ?%d*>", '<userdata>'},
+ {"^<thread ?%d*>", '<thread>'}
+}
+local function normalizeString(s)
+ for _,t in ipairs(mungetypes) do
+ if s:find(t[1]) then
+ return t[2]
+ end
+ end
+ return s
+end
+
+local typetable = {
+ ['<function>'] = 'function',
+ ['<table>'] = 'table',
+ ['<userdata>'] = 'userdata',
+ ['<thread>'] = 'thread'
+}
+local function getType(v)
+ local tv = type(v)
+ if tv == 'string' then
+ tv = typetable[normalizeString(v)] or 'string'
+ end
+ return tv
+end
+
+local function tablelength(t)
+ local count = 0
+ for _ in pairs(t) do count = count + 1 end
+ return count
+end
+
+-- for pretty-printing paths, for debug output
+-- this is non-optimal, but only gets used in verbose mode anyway
+local function serializePath(path)
+ local t = {}
+ for i,k in ipairs(path) do
+ local tk = type(k)
+ if isIdentifier(k) then
+ t[i] = ((i == 1) and k) or ('.'..k)
+ elseif tk == 'string' then
+ t[i] = '[' .. smartQuote(escape(k)) .. ']'
+ elseif tk == 'number' or tk == 'boolean' then
+ t[i] = '[' .. tostring(k) .. ']'
+ else
+ t[i] = "['<" .. tk .. ">']"
+ end
+ end
+ if #t == 0 then t[1] = '{}' end
+ return table.concat(t)
+end
+
+-------------------------------------------------------------------
+
+-------------------------------------------------------------------
+-- Given one table and another, this function detects if the first is
+-- completely contained in the second object. The second can have more
+-- entries, but cannot be missing an entry in the first one. Entry values
+-- must match as well - i.e., string values are the same, numbers the
+-- same, booleans the same.
+--
+-- The function returns true if the first is in the second, false otherwise.
+-- It also returns a table of the diff, which will be empty if they matched.
+-- This returned table is structured like the first one passed in,
+-- so calling print(inspect(returnedTabled)) will make it pretty print.
+--
+-- The returned table's members have their values replaced with mismatch
+-- information, explaining what the mismatch was. Setting the option "keep"
+-- makes it not replace the values, but keep them as they were in the first
+-- table.
+--
+-- By default, the key's values must match in both tables. If the option
+-- 'nonumber' is set, then number values are not compared. This is useful
+-- if they're things that can change (like exported C-code numbers).
+--
+-- By default, the metatables/"<metatables>" are also compared. If the option
+-- 'nometa' is set, then metatables are not compared, nor does it matter if
+-- they exist in either table.
+--
+-- Like inspect(), there's a 'filter' option, which works the same way:
+-- it ignores its value completely in terms of matching, so their string values
+-- can be different, but the keys still have to exist. Sub-tables of
+-- such keys (i.e., if the key's value is a table) are not checked/compared.
+-- In other words, it's identical to the filter option for inspect().
+--
+-- The option 'ignore' is similar to 'filter', except matching ones
+-- are not checked for existence in the tables at all.
+--
+-- Setting the 'depth' option applies as in inspect(), to both tables.
+--
+-- Setting the option 'verbose' makes it print out as it compares, for
+-- debugging or test purposes.
+--
+function inspect.compare(firstTable, secondTable, options)
+ options = options or {}
+ local depth = options.depth or math.huge
+ local filter = parse_filter(options.filter or {})
+ local ignore = parse_filter(options.ignore or {})
+
+ local function puts(...)
+ local args = {...}
+ for i=1, #args do
+ blen = blen + 1
+ buffer[blen] = tostring(args[i])
+ end
+ end
+
+ -- for debug printing
+ local function dprint(...)
+ local args = {...}
+ print(table.concat(args))
+ end
+
+ local serializePath = serializePath
+
+ if not options.verbose then
+ dprint = function() return end
+ serializePath = function() return end
+ end
+
+ -- for error message replacing key value
+ local function emsg(...)
+ local args = {...}
+ return(table.concat(args))
+ end
+
+ if options.keep then
+ emsg = function() return end
+ end
+
+ -- declare checkValue here
+ local checkValue
+
+ local function checkTable(f, s, path)
+ dprint("checking ",serializePath(path)," table contents")
+
+ for k, v in pairs(f) do
+ local child = makePath(path, k)
+
+ if not ignore(v,child) then
+ local ret, msg = checkValue(v, s[k], child)
+ if ret then
+ f[k] = nil
+ elseif msg then
+ f[k] = msg
+ dprint(serializePath(child)," ",msg)
+ end
+ else
+ dprint("ignoring ",serializePath(child))
+ f[k] = nil
+ end
+ end
+ return tablelength(f) == 0
+ end
+
+ -- a wrapper for failure cases in checkValue() that can be handled the same way
+ local function compCheck(f,s,func)
+ if not func() then
+ return false, emsg("mismatched ",getType(f)," values: ",tostring(f)," --> ",tostring(s))
+ end
+ return true
+ end
+
+ -- kinda ugly, but I wanted pretty information output
+ checkValue = function(f, s, path)
+ local tf = getType(f)
+
+ dprint("checking ",serializePath(path)," (",tf,")")
+
+ if s == nil then
+ return false, emsg("missing ",tf,"!")
+ elseif tf ~= getType(s) then
+ return false, emsg("type mismatch (",tf,") --> (",getType(s),")")
+ elseif type(f) == 'table' then
+ return checkTable(f, s, path)
+ end
+
+ return compCheck(f,s,function()
+ if tf == 'string' or tf == 'boolean' then
+ return f == s
+ elseif tf == 'number' then
+ return f == s or options.nonumber
+ else
+ -- assume they're the same functions/userdata/looped-table
+ -- type matching before would already cover it otherwise
+ return true
+ end
+ end)
+ end
+
+ -- inspect+serialize both tables, to normalize them, separate their
+ -- metatables, limit depth, etc. Also, since we pass the filter option on,
+ -- the filtered items become "<filtered>" and will by definition match
+ local function normalizeTable(t)
+ return assert( inspect.marshal( inspect.inspect(t,{serialize=true,depth=depth,filter=filter}), {nometa=options.nometa} ))
+ end
+
+ local first = normalizeTable(firstTable)
+ local second = normalizeTable(secondTable)
+
+ return checkTable(first, second, {}), first
+
+end
+
+-------------------------------------------------------------------
+
+
+
+-------------------------------------------------------------------
+-- Given a table of key strings, return a function that can be used for
+-- the 'filter' option of inspect() and inspect.compare() functions.
+function inspect.makeFilter(arrayTable)
+ local filter = {} -- our filter lookup tree (tables of tables)
+ local matchNode = {} -- a table instance we use as a key for nodes which match
+ local wildcard = {} -- a key table of wildcard match names
+
+ local function buildFilter(pathname)
+ local t = filter
+ local key
+ -- if the filtered name starts with a '.', it's a wildcard
+ if pathname:find("^%.") then
+ wildcard[pathname:sub(2)] = true
+ return
+ end
+ for sep, name in pathname:gmatch("([%.%[\"\']*)([^%.%[\"\'%]]+)[\"\'%]]?") do
+ if sep == '[' then
+ if name == 'true' then
+ key = true
+ elseif name == 'false' then
+ key = false
+ else
+ key = tonumber(name)
+ end
+ else
+ -- to be safe, we'll check the key name doesn't mean a table/function/userdata
+ local tn = getType(name)
+ if tn == 'string' then
+ key = name
+ else
+ error("filter key '"..pathname.."' has key '"..name.."' which is an unsupported type ("..tn..")")
+ end
+ end
+
+ if not t[key] then
+ t[key] = {}
+ end
+ t = t[key]
+ end
+
+ t[matchNode] = true
+ end
+
+ -- we could call serializePath() and do a simple lookup, but it's expensive and
+ -- we'd be calling it a LOT. So instead we break up the filter
+ -- table into true "path" elements, into a filter tree, and compare
+ -- against it... thereby avoiding string concat/manip during compare.
+
+ for _, pathname in ipairs(arrayTable) do
+ buildFilter(pathname)
+ end
+
+ return function(value,path)
+ local t = filter
+ if wildcard[ path[#path] ] then
+ return true
+ end
+ for _,v in ipairs(path) do
+ if not t[v] then
+ return false
+ end
+ t = t[v]
+ end
+ return t[matchNode] == true
+ end
+
+end
+
+return inspect
+
diff --git a/test/lua/int64.lua b/test/lua/int64.lua
new file mode 100644
index 0000000..6a703d3
--- /dev/null
+++ b/test/lua/int64.lua
@@ -0,0 +1,360 @@
+
+-- This is a test script for tshark/wireshark.
+-- This script runs inside tshark/wireshark, so to run it do:
+-- wireshark -X lua_script:<path_to_testdir>/lua/int64.lua
+-- tshark -r bogus.cap -X lua_script:<path_to_testdir>/lua/int64.lua
+
+-- Tests Int64/UInt64 functions
+
+local testlib = require("testlib")
+local OTHER = "other"
+testlib.init( { [OTHER] = 23 } )
+
+-- you can't compare (use the '==') userdata objects with numbers, so this function does it instead.
+function checkeq(arg1,arg2)
+ if arg1 == arg2 then
+ return true
+ elseif type(arg1) == 'userdata' and arg1.tonumber then
+ if type(arg2) == 'userdata' and arg2.tonumber then
+ return arg1:tonumber() == arg2:tonumber()
+ else
+ return arg1:tonumber() == arg2
+ end
+ elseif type(arg2) == 'userdata' and arg2.tonumber then
+ return arg1 == arg2:tonumber()
+ else
+ return false
+ end
+end
+
+-----------------------------
+
+testlib.testing("Int64/UInt64 library")
+
+local testtbl = {
+ { ["type"]=Int64, ["name"]="Int64" } ,
+ { ["type"]=UInt64, ["name"]="UInt64" },
+}
+
+for i,t in ipairs(testtbl) do
+ testlib.init( { [t.name] = 125+(t.name == "Int64" and 3 or 0) } )
+
+ testlib.testing(t.name, "class")
+ local obj = t.type
+
+ for name, val in pairs(obj) do
+ print("\t"..name.." = "..type(val))
+ end
+
+ testlib.test(t.name,"class1",type(obj) == 'table')
+ testlib.test(t.name,"class2",type(obj.new) == 'function')
+ testlib.test(t.name,"class3",type(obj.max) == 'function')
+ testlib.test(t.name,"class4",type(obj.min) == 'function')
+ testlib.test(t.name,"class5",type(obj.tonumber) == 'function')
+ testlib.test(t.name,"class6",type(obj.fromhex) == 'function')
+ testlib.test(t.name,"class7",type(obj.tohex) == 'function')
+ testlib.test(t.name,"class8",type(obj.higher) == 'function')
+ testlib.test(t.name,"class9",type(obj.lower) == 'function')
+
+
+ testlib.testing(t.name, "new, tonumber, tostring")
+ local val = 12345
+ local my64a = obj.new(val)
+ local my64b = obj.new(tostring(val))
+ local zero = obj.new(0)
+ -- remember in Lua it's a double, so only precise up to 9,007,199,254,740,992
+ local my64c = obj.new(val,100)
+ local valc = (100 * 4294967296) + val
+ print(tostring(my64c))
+ local my64z = obj.new(0,0)
+ local my64d = obj.new(0,100)
+ local vald = (100 * 4294967296)
+
+ testlib.test(t.name,"new1",checkeq(my64a,val))
+ testlib.test(t.name,"new2",checkeq(my64b,val))
+ testlib.test(t.name,"new3",checkeq(my64a,obj.new(my64b)))
+ testlib.test(t.name,"new3b",checkeq(my64a,obj(my64b)))
+ testlib.test(t.name,"new4",checkeq(valc,my64c))
+ testlib.test(t.name,"new5",checkeq(0,my64z))
+ testlib.test(t.name,"new6",obj.new(0,1):tonumber() == (2^32))
+ if t.name == "Int64" then
+ testlib.test(t.name,"new7",obj(-1):tonumber() == -1)
+ testlib.test(t.name,"new8",obj.new(0,-1):tonumber() == -4294967296)
+ testlib.test(t.name,"new9",obj(obj.new(-1)):tonumber() == -1)
+ end
+
+ testlib.test(t.name,"tonumber1",val == my64a:tonumber())
+ testlib.test(t.name,"tonumber2",valc == my64c:tonumber())
+ testlib.test(t.name,"tonumber3",vald == my64d:tonumber())
+ testlib.test(t.name,"tonumber4",0 == my64z:tonumber())
+
+ testlib.test(t.name,"tostring1", tostring(my64a)==tostring(val))
+ testlib.test(t.name,"tostring2",tostring(my64b)==tostring(val))
+ testlib.test(t.name,"tostring3",tostring(my64c)==tostring(valc))
+ testlib.test(t.name,"tostring4",tostring(my64d)==tostring(vald))
+
+
+ testlib.testing(t.name, "compare ops")
+
+ testlib.test(t.name,"eq", my64a == my64b)
+
+ testlib.test(t.name,"le1", my64a <= my64b)
+ testlib.test(t.name,"le2", my64a <= my64c)
+ testlib.test(t.name,"le3", my64z <= my64c)
+
+ testlib.test(t.name,"ge1", my64a >= my64b)
+ testlib.test(t.name,"ge2", my64c >= my64b)
+ testlib.test(t.name,"ge2", my64c >= my64z)
+
+ testlib.test(t.name,"neq1",not(my64a ~= my64b))
+ testlib.test(t.name,"neq2",my64a ~= obj(0))
+ testlib.test(t.name,"neq2",my64a ~= my64c)
+
+ testlib.test(t.name,"gt1",my64a > my64z)
+ testlib.test(t.name,"gt2",my64c > my64a)
+
+ testlib.test(t.name,"lt1",not(my64a < my64b))
+ testlib.test(t.name,"lt2",my64a < my64c)
+
+
+ testlib.testing(t.name, "math ops")
+
+ testlib.test(t.name,"add1",checkeq(my64a + my64b, val + val))
+ testlib.test(t.name,"add2",my64a + my64z == my64b)
+ testlib.test(t.name,"add3",my64a + my64b == my64b + my64a)
+ testlib.test(t.name,"add4",my64d + my64a == my64c)
+ testlib.test(t.name,"add5",checkeq(my64a + vald, valc))
+ testlib.test(t.name,"add6",checkeq(vald + my64a, valc))
+
+ testlib.test(t.name,"sub1",checkeq(my64a - my64b, 0))
+ testlib.test(t.name,"sub2",my64a - my64b == my64z)
+ testlib.test(t.name,"sub3",my64a - my64b == my64b - my64a)
+ testlib.test(t.name,"sub4",my64c - my64a == my64d)
+ testlib.test(t.name,"sub5",checkeq(my64a - val, 0))
+
+ testlib.test(t.name,"mod1",checkeq(my64a % my64b, 0))
+ testlib.test(t.name,"mod2",checkeq(my64c % my64b, valc % val))
+ testlib.test(t.name,"mod3",checkeq(my64c % val, valc % val))
+ testlib.test(t.name,"mod4",checkeq(val % my64c, val % valc))
+
+ testlib.test(t.name,"div1",checkeq(my64a / my64b, 1))
+ testlib.test(t.name,"div2",checkeq(my64a / val, 1))
+ testlib.test(t.name,"div3",checkeq(val / my64a, 1))
+ testlib.test(t.name,"div4",my64c / my64d == obj.new(1))
+
+ testlib.test(t.name,"pow1",checkeq(my64a ^ 1, val))
+ testlib.test(t.name,"pow2",checkeq(my64a ^ obj.new(2), val ^ 2))
+ testlib.test(t.name,"pow3",checkeq(my64a ^ obj.new(3), val ^ 3))
+ testlib.test(t.name,"pow4",checkeq(my64c ^ 1, valc ^ 1))
+
+ testlib.test(t.name,"mul1",checkeq(my64a * obj(1), my64b))
+ testlib.test(t.name,"mul2",checkeq(my64a * my64b, my64b * my64a))
+ testlib.test(t.name,"mul3",checkeq(my64a * 1, my64b))
+ testlib.test(t.name,"mul4",checkeq(2 * my64c, 2 * valc))
+
+ if t.name == "Int64" then
+ -- unary minus on UInt64 is illogical, but oh well
+ testlib.test(t.name,"unm1",checkeq(-my64a,-val))
+ testlib.test(t.name,"unm2",checkeq(string.sub(tostring(-my64a),1,1), "-"))
+ testlib.test(t.name,"unm3",checkeq(-my64c,-valc))
+ else
+ testlib.test(t.name,"unm1",checkeq(-my64a,val))
+ testlib.test(t.name,"unm2",checkeq(string.sub(tostring(-my64a),1,1), "1"))
+ testlib.test(t.name,"unm3",checkeq(-my64c,valc))
+ end
+ testlib.test(t.name,"unm4",checkeq(-my64z,0))
+
+ testlib.testing(t.name, "methods")
+
+ testlib.test(t.name,"higher1",my64a:higher() == 0)
+ testlib.test(t.name,"higher2",my64c:higher() == 100)
+
+ testlib.test(t.name,"lower1",my64a:lower() == val)
+ testlib.test(t.name,"lower2",my64c:lower() == val)
+ testlib.test(t.name,"lower3",my64d:lower() == 0)
+
+ local vale1 = 3735928559 -- yields hex of deadbeef
+ local vale2 = 5045997 -- yields 4cfeed
+ local my64e = obj.new(vale1, vale2)
+ testlib.test(t.name,"fromhex1",obj.fromhex("0000000000003039") == my64a);
+ testlib.test(t.name,"fromhex2",obj.fromhex("3039") == my64a);
+ testlib.test(t.name,"fromhex3",obj.fromhex("0000006400003039") == my64c);
+ testlib.test(t.name,"fromhex4",obj.fromhex("0000000000000000") == my64z);
+ testlib.test(t.name,"fromhex5",obj.fromhex("004cfeeddeadbeef") == my64e);
+ testlib.test(t.name,"fromhex6",obj.fromhex("4cFEEDDEADBEEF") == my64e);
+
+ testlib.test(t.name,"tohex1",my64a:tohex() == "0000000000003039")
+ testlib.test(t.name,"tohex2",my64c:tohex(16) == "0000006400003039")
+ testlib.test(t.name,"tohex3",my64z:tohex() == "0000000000000000")
+ testlib.test(t.name,"tohex4",my64e:tohex() == "004cfeeddeadbeef")
+ testlib.test(t.name,"tohex5",my64e:tohex(8) == "deadbeef")
+ testlib.test(t.name,"tohex6",my64e:tohex(-8) == "DEADBEEF")
+
+ testlib.test(t.name,"encode1",my64a:encode(true) == "\57\48\00\00\00\00\00\00")
+ testlib.test(t.name,"encode2",my64a:encode(false) == "\00\00\00\00\00\00\48\57")
+ testlib.test(t.name,"encode3",my64c:encode(false) == "\00\00\00\100\00\00\48\57")
+
+ testlib.test(t.name,"decode1",obj.decode("\57\48\00\00\00\00\00\00", true) == my64a)
+ testlib.test(t.name,"decode2",obj.decode("\00\00\00\00\00\00\48\57", false) == my64a)
+ testlib.test(t.name,"decode3",obj.decode("\00\00\00\100\00\00\48\57", false) == my64c)
+
+
+ local function testpower(b)
+ testlib.testing(t.name, "powers of "..b)
+ b=obj.new(b)
+ local z=obj.new(1)
+ for i=0,100 do
+ print(i,z,b^i)
+ assert(z==b^i)
+ z=b*z
+ end
+ end
+
+ testpower(2)
+ testpower(3)
+
+ testlib.testing(t.name, "factorials")
+
+ F={
+ [1]="1",
+ [2]="2",
+ [3]="6",
+ [4]="24",
+ [5]="120",
+ [6]="720",
+ [7]="5040",
+ [8]="40320",
+ [9]="362880",
+ [10]="3628800",
+ [11]="39916800",
+ [12]="479001600",
+ [13]="6227020800",
+ [14]="87178291200",
+ [15]="1307674368000",
+ [16]="20922789888000",
+ [17]="355687428096000",
+ [18]="6402373705728000",
+ [19]="121645100408832000",
+ [20]="2432902008176640000",
+ }
+ z=obj.new(1)
+ f=1
+ for i=1,20 do
+ z=z*i
+ f=f*i
+ s=obj.tonumber(z)
+ print(i,z,f,f==obj.tonumber(z),tostring(z)==F[i])
+ --print(i,int64.new(F[i]))
+ end
+
+ testlib.testing(t.name, "bit operations")
+
+ testlib.test(t.name,"band1",checkeq(obj(1):band(1), 1))
+ testlib.test(t.name,"band2",checkeq(obj(1):band(0), 0))
+ testlib.test(t.name,"band3",checkeq(obj(4294967295,100):band(4294967295), 4294967295))
+ testlib.test(t.name,"band4",obj.new(4294967295,100):band(obj(0,100),obj(0,100),obj(0,100)) == obj(0,100))
+ testlib.test(t.name,"band5",checkeq(obj.new(4294967295,100):band(obj.new(0,100),obj(0)), 0))
+
+ testlib.test(t.name,"bor1",checkeq(obj(1):bor(1), 1))
+ testlib.test(t.name,"bor2",checkeq(obj(1):bor(0), 1))
+ testlib.test(t.name,"bor3",checkeq(obj(0):bor(0), 0))
+ testlib.test(t.name,"bor4",obj.new(0,100):bor(4294967295) == obj.new(4294967295,100))
+ testlib.test(t.name,"bor5",obj.new(1):bor(obj(2),obj.new(4),obj(8),16,32,64,128) == obj(255))
+
+ testlib.test(t.name,"bxor1",checkeq(obj.new(1):bxor(1), 0))
+ testlib.test(t.name,"bxor2",checkeq(obj.new(1):bxor(0), 1))
+ testlib.test(t.name,"bxor3",checkeq(obj.new(0):bxor(0), 0))
+ testlib.test(t.name,"bxor4",obj.new(4294967295,100):bxor(obj(0,100)) == obj.new(4294967295))
+ testlib.test(t.name,"bxor5",obj.new(1):bxor(obj(2),obj(4),obj(8),16,32,64,128) == obj(255))
+
+ testlib.test(t.name,"bnot1",checkeq(obj.new(4294967295,4294967295):bnot(), 0))
+ testlib.test(t.name,"bnot2",obj.new(0):bnot() == obj.new(4294967295,4294967295))
+ testlib.test(t.name,"bnot3",obj.new(0xaaaaaaaa,0xaaaaaaaa):bnot() == obj.new( 0x55555555, 0x55555555))
+
+ testlib.test(t.name,"bsawp1",obj.new( 0x01020304, 0x05060708 ):bswap() == obj.new( 0x08070605, 0x04030201 ))
+ testlib.test(t.name,"bsawp2",obj.new( 0xFF020304, 0xFF060708 ):bswap() == obj.new( 0x080706FF, 0x040302FF ))
+
+ testlib.test(t.name,"lshift1",obj.new( 0x01020304, 0x0506070F ):lshift(4) == obj.new( 0x10203040, 0x506070f0 ))
+ testlib.test(t.name,"lshift2",obj.new( 0x0102030F, 0x05060708 ):lshift(63) == obj.new( 0, 0x80000000 ))
+ if t.name == "Int64" then
+ testlib.test(t.name,"lshift3",checkeq(obj.new( 0x0102030F, 0x05060708 ):lshift(63), -9223372036854775808))
+ else
+ testlib.test(t.name,"lshift3",obj.new( 0x0102030F, 0x05060708 ):lshift(63) == obj.new( 0, 0x80000000 ))
+ end
+
+ testlib.test(t.name,"rshift1",obj.new( 0x01020304, 0xF5060708 ):rshift(4) == obj.new( 0x80102030, 0x0F506070 ))
+ testlib.test(t.name,"rshift2",checkeq(obj.new( 0x01020304, 0xF5060708 ):rshift(63), 1))
+
+ if t.name == "Int64" then
+ testlib.test(t.name,"arshift1",obj.new( 0x01020304, 0xF5060708 ):arshift(4) == obj.new( 0x80102030, 0xFF506070 ))
+ testlib.test(t.name,"arshift2",obj.new( 0x01020304, 0xF5060708 ):arshift(63) == obj.new( 0xFFFFFFFF, 0xFFFFFFFF ))
+ else
+ testlib.test(t.name,"arshift1",obj.new( 0x01020304, 0xF5060708 ):arshift(4) == obj.new( 0x80102030, 0x0F506070 ))
+ testlib.test(t.name,"arshift2",checkeq(obj.new( 0x01020304, 0xF5060708 ):arshift(63),1))
+ end
+ testlib.test(t.name,"arshift3",obj.new( 0x01020304, 0x05060708 ):arshift(4) == obj.new( 0x80102030, 0x00506070 ))
+ testlib.test(t.name,"arshift4",checkeq(obj.new( 0x01020304, 0x05060708 ):arshift(63), 0))
+
+ testlib.test(t.name,"rol1",obj.new( 0x01020304, 0xF5060708 ):rol(4) == obj.new( 0x1020304F, 0x50607080 ))
+ testlib.test(t.name,"rol2",obj.new( 0x01020304, 0xF5060708 ):rol(32):rol(32) == obj.new( 0x01020304, 0xF5060708 ))
+
+ testlib.test(t.name,"ror1",obj.new( 0x01020304, 0xF5060708 ):ror(4) == obj.new( 0x80102030, 0x4F506070 ))
+ testlib.test(t.name,"ror2",obj.new( 0x01020304, 0xF5060708 ):ror(32):ror(32) == obj.new( 0x01020304, 0xF5060708 ))
+
+end
+
+testlib.testing("min and max values")
+z=Int64.new(2)
+z=z^63-1
+testlib.test(OTHER,"max1",tostring(Int64.max()) == "9223372036854775807")
+testlib.test(OTHER,"max2",Int64.max() == Int64.new(4294967295, 2147483647))
+testlib.test(OTHER,"max3",z==Int64.max())
+testlib.test(OTHER,"min1",tostring(Int64.min()) == "-9223372036854775808")
+testlib.test(OTHER,"min2",Int64.min() == Int64.new(0,2147483648))
+z=-z
+z=z-1
+testlib.test(OTHER,"min3",z==Int64.min())
+
+testlib.test(OTHER,"minmax",Int64.min()== - Int64.max() - 1)
+
+--Because of g_ascii_strtoll() usage without errno check, "invalid" strings are converted to 0
+testlib.testing("invalid string values")
+testlib.test(OTHER,"invalid",Int64.new("invalid")== Int64.new(0,0))
+testlib.test(OTHER,"invalid2",UInt64.new("invalid")== UInt64.new(0,0))
+
+testlib.testing("error conditions")
+
+local function divtest(f,s)
+ local r = (f / s)
+ if r == 5 then
+ io.stdout:write("ok...")
+ else
+ error("test failed!")
+ end
+end
+
+local function modtest(f,s)
+ local r = (f % s)
+ if r == 5 then
+ io.stdout:write("ok...")
+ else
+ error("test failed!")
+ end
+end
+
+testlib.test(OTHER,"error1", pcall(divtest, 10, 2)) -- not an error, but checking the div function works above
+testlib.test(OTHER,"error2", not pcall(divtest, Int64(10), 0))
+testlib.test(OTHER,"error3", not pcall(divtest, Int64(10), Int64(0)))
+testlib.test(OTHER,"error4", not pcall(divtest, Int64(10), UInt64(0)))
+testlib.test(OTHER,"error5", not pcall(divtest, UInt64(10), 0))
+testlib.test(OTHER,"error6", not pcall(divtest, UInt64(10), Int64(0)))
+testlib.test(OTHER,"error7", not pcall(divtest, UInt64(10), UInt64(0)))
+testlib.test(OTHER,"error8", pcall(modtest, 17, 6)) -- not an error, but checking the mod function works above
+testlib.test(OTHER,"error9", not pcall(modtest, Int64(10), 0))
+testlib.test(OTHER,"error10", not pcall(modtest, Int64(10), Int64(0)))
+testlib.test(OTHER,"error11", not pcall(modtest, Int64(10), UInt64(0)))
+testlib.test(OTHER,"error12", not pcall(modtest, UInt64(10), 0))
+testlib.test(OTHER,"error13", not pcall(modtest, UInt64(10), Int64(0)))
+testlib.test(OTHER,"error14", not pcall(modtest, UInt64(10), UInt64(0)))
+
+testlib.getResults()
diff --git a/test/lua/listener.lua b/test/lua/listener.lua
new file mode 100644
index 0000000..8716f2a
--- /dev/null
+++ b/test/lua/listener.lua
@@ -0,0 +1,246 @@
+-- test script for various Lua functions
+-- use with dhcp.pcap in test/captures directory
+
+local testlib = require("testlib")
+
+------------- general test helper funcs ------------
+local FRAME = "frame"
+local ETH = "eth"
+local IP = "ip"
+local DHCP = "dhcp"
+local OTHER = "other"
+local PDISS = "postdissector"
+
+-- expected number of runs per type
+-- note ip (5 tests) only runs 3 times because it gets removed
+-- and dhcp (5 tests) only runs twice because the filter makes it run
+-- once and then it gets replaced with a different one for the second time
+local n_frames = 4
+local taptests = {
+ [FRAME]=5*n_frames,
+ [ETH]=5*n_frames,
+ [IP]=5*3,
+ [DHCP]=5*2,
+ [OTHER]=16,
+ [PDISS]=n_frames,
+}
+testlib.init(taptests)
+
+local pkt_fields = { [FRAME] = {}, [PDISS] = {} }
+local function getAllFieldInfos(group)
+ local fields = { all_field_infos() }
+ local fieldnames = {}
+ for i,v in ipairs(fields) do
+ fieldnames[i] = v.name
+ end
+ local pktnum = testlib.getPktCount(group)
+ pkt_fields[group][pktnum] = { ["num"] = #fields, ["fields"] = fieldnames }
+end
+
+local function dumpAllFieldInfos()
+ for i,v in ipairs(pkt_fields[FRAME]) do
+ print("In frame tap for packet ".. i ..":")
+ print(" number of fields = ".. v.num)
+ for _,name in ipairs(v.fields) do
+ print(" field = ".. name)
+ end
+ local w = pkt_fields[PDISS][i]
+ print("In postdissector for packet ".. i ..":")
+ print(" number of fields = ".. w.num)
+ for _,name in ipairs(w.fields) do
+ print(" field = ".. name)
+ end
+ end
+end
+
+local function checkAllFieldInfos()
+ for i,v in ipairs(pkt_fields[FRAME]) do
+ local numfields = v.num
+ if numfields ~= pkt_fields[PDISS][i].num then
+ print("Tap and postdissector do not have same number of fields!")
+ return false
+ end
+ if numfields < 100 then
+ print("Too few fields!")
+ return false
+ end
+ end
+ return true
+end
+
+
+---------
+-- the following are so we can use pcall (which needs a function to call)
+local function makeListener(...)
+ local foo = Listener.new(...)
+end
+
+local function setListener(tap,name,value)
+ tap[name] = value
+end
+
+local function getListener(tap,name)
+ local foo = tap[name]
+end
+
+------------- test script ------------
+testlib.testing(OTHER,"negative tests")
+testlib.test(OTHER,"Listener.new-1",not pcall(makeListener,"FooBARhowdy"))
+testlib.test(OTHER,"Listener.new-2",not pcall(makeListener,"ip","FooBARhowdy"))
+local tmptap = Listener.new()
+local func = function(...)
+ passed[OTHER] = 0
+ error("This shouldn't be called!")
+end
+testlib.test(OTHER,"Listener.set-3",pcall(setListener,tmptap,"packet",func))
+testlib.test(OTHER,"Listener.set-4",pcall(setListener,tmptap,"reset",func))
+testlib.test(OTHER,"Listener.set-5",pcall(setListener,tmptap,"draw",func))
+testlib.test(OTHER,"Listener.set-6",not pcall(setListener,Listener,"packet",func))
+testlib.test(OTHER,"Listener.set-7",not pcall(setListener,Listener,"reset",func))
+testlib.test(OTHER,"Listener.set-8",not pcall(setListener,Listener,"draw",func))
+testlib.test(OTHER,"Listener.set-9",not pcall(setListener,Listener,"foobar",func))
+
+testlib.test(OTHER,"Listener.get-10",not pcall(getListener,tmptap,"packet",func))
+testlib.test(OTHER,"Listener.get-11",not pcall(getListener,tmptap,"reset",func))
+testlib.test(OTHER,"Listener.get-12",not pcall(getListener,tmptap,"draw",func))
+
+print("removing tmptap twice")
+testlib.test(OTHER,"Listener.remove-13",pcall(tmptap.remove,tmptap))
+testlib.test(OTHER,"Listener.remove-14",pcall(tmptap.remove,tmptap))
+
+testlib.test(OTHER,"typeof-15", typeof(tmptap) == "Listener")
+
+
+-- declare some field extractors
+local f_eth_src = Field.new("eth.src")
+local f_eth_dst = Field.new("eth.dst")
+local f_eth_mac = Field.new("eth.addr")
+local f_ip_src = Field.new("ip.src")
+local f_ip_dst = Field.new("ip.dst")
+local f_dhcp_hw = Field.new("dhcp.hw.mac_addr")
+local f_dhcp_opt = Field.new("dhcp.option.type")
+
+local tap_frame = Listener.new(nil,nil,true)
+local tap_eth = Listener.new("eth")
+local tap_ip = Listener.new("ip","dhcp")
+local tap_dhcp = Listener.new("dhcp","dhcp.option.dhcp == 1")
+
+local second_time = false
+
+function tap_frame.packet(pinfo,tvb,frame)
+ testlib.countPacket(FRAME)
+ testlib.testing(FRAME,"Frame")
+
+ testlib.test(FRAME,"arg-1", typeof(pinfo) == "Pinfo")
+ testlib.test(FRAME,"arg-2", typeof(tvb) == "Tvb")
+ testlib.test(FRAME,"arg-3", frame == nil)
+
+ testlib.test(FRAME,"pinfo.number-1",pinfo.number == testlib.getPktCount(FRAME))
+
+ -- check ether addr
+ local eth_src1 = tostring(f_eth_src().range)
+ local eth_src2 = tostring(tvb:range(6,6))
+ testlib.test(FRAME,"FieldInfo.range-1", eth_src1 == eth_src2)
+
+ getAllFieldInfos(FRAME)
+end
+
+function tap_eth.packet(pinfo,tvb,eth)
+ testlib.countPacket(ETH)
+
+ -- on the 4th run of eth, remove the ip one and add a new dhcp one
+ if testlib.getPktCount(ETH) == 4 then
+ testlib.testing(ETH,"removing ip tap, replacing dhcp tap")
+ tap_ip:remove()
+ tap_dhcp:remove()
+ tap_dhcp = Listener.new("dhcp")
+ tap_dhcp.packet = dhcp_packet
+ second_time = true
+ end
+
+ testlib.testing(ETH,"Eth")
+
+ testlib.test(ETH,"arg-1", typeof(pinfo) == "Pinfo")
+ testlib.test(ETH,"arg-2", typeof(tvb) == "Tvb")
+ testlib.test(ETH,"arg-3", type(eth) == "table")
+
+ testlib.test(ETH,"pinfo.number-1",pinfo.number == testlib.getPktCount(ETH))
+
+ -- check ether addr
+ local eth_src1 = tostring(f_eth_src().range)
+ local eth_src2 = tostring(tvb:range(6,6))
+ testlib.test(ETH,"FieldInfo.range-1", eth_src1 == eth_src2)
+end
+
+function tap_ip.packet(pinfo,tvb,ip)
+ testlib.countPacket(IP)
+ testlib.testing(IP,"IP")
+
+ testlib.test(IP,"arg-1", typeof(pinfo) == "Pinfo")
+ testlib.test(IP,"arg-2", typeof(tvb) == "Tvb")
+ testlib.test(IP,"arg-3", type(ip) == "table")
+
+ testlib.test(IP,"pinfo.number-1",pinfo.number == testlib.getPktCount(IP))
+
+ -- check ether addr
+ local eth_src1 = tostring(f_eth_src().range)
+ local eth_src2 = tostring(tvb:range(6,6))
+ testlib.test(IP,"FieldInfo.range-1", eth_src1 == eth_src2)
+end
+
+dhcp_packet = function (pinfo,tvb,dhcp)
+ testlib.countPacket(DHCP)
+ testlib.testing(DHCP,"DHCP")
+
+ testlib.test(DHCP,"arg-1", typeof(pinfo) == "Pinfo")
+ testlib.test(DHCP,"arg-2", typeof(tvb) == "Tvb")
+ testlib.test(DHCP,"arg-3", dhcp == nil)
+
+ if not second_time then
+ testlib.test(DHCP,"pinfo.number-1",pinfo.number == testlib.getPktCount(DHCP))
+ else
+ testlib.test(DHCP,"pinfo.number-1",pinfo.number == 4)
+ end
+
+ -- check ether addr
+ local eth_src1 = tostring(f_eth_src().range)
+ local eth_src2 = tostring(tvb:range(6,6))
+ testlib.test(DHCP,"FieldInfo.range-1", eth_src1 == eth_src2)
+end
+tap_dhcp.packet = dhcp_packet
+
+function tap_frame.reset()
+ -- reset never gets called in tshark (sadly)
+ --[[ XXX: this is no longer the case?!
+ if not GUI_ENABLED then
+ error("reset called!!")
+ end
+ --]]
+end
+
+function tap_frame.draw()
+ testlib.test(OTHER,"all_field_infos", checkAllFieldInfos())
+ testlib.getResults()
+end
+
+-- max_gap.lua
+-- create a gap.max field containing the maximum gap between two packets between two ip nodes
+
+-- we create a "protocol" for our tree
+local max_gap_p = Proto("gap","Gap in IP conversations")
+
+-- we create our fields
+local max_gap_field = ProtoField.float("gap.max")
+
+-- we add our fields to the protocol
+max_gap_p.fields = { max_gap_field }
+
+-- then we register max_gap_p as a postdissector
+register_postdissector(max_gap_p,true)
+function max_gap_p.dissector(tvb,pinfo,tree)
+ testlib.countPacket(PDISS)
+ getAllFieldInfos(PDISS)
+ testlib.pass(PDISS)
+end
+
+
diff --git a/test/lua/nstime.lua b/test/lua/nstime.lua
new file mode 100644
index 0000000..f7e2f66
--- /dev/null
+++ b/test/lua/nstime.lua
@@ -0,0 +1,140 @@
+-- test script for various Lua functions
+-- use with dhcp.pcap in test/captures directory
+
+local testlib = require("testlib")
+
+local FRAME = "frame"
+local PER_FRAME = "per-frame"
+local OTHER = "other"
+
+-- expected number of runs per type
+local n_frames = 4
+local taptests = {
+ [FRAME]=n_frames,
+ [PER_FRAME]=n_frames*5,
+ [OTHER]=44
+}
+testlib.init(taptests)
+
+---------
+-- the following are so we can use pcall (which needs a function to call)
+local function setNSTime(nst,name,value)
+ nst[name] = value
+end
+
+local function getNSTime(nst,name)
+ local foo = nst[name]
+end
+
+------------- test script ------------
+testlib.testing(OTHER,"negative tests")
+testlib.test(OTHER,"NSTime.new-1",not pcall(NSTime,"FooBARhowdy"))
+testlib.test(OTHER,"NSTime.new-2",not pcall(NSTime,"ip","FooBARhowdy"))
+local tmptime = NSTime()
+testlib.test(OTHER,"NSTime.set-3",pcall(setNSTime,tmptime,"secs",10))
+testlib.test(OTHER,"NSTime.set-4",not pcall(setNSTime,tmptime,"foobar",1000))
+testlib.test(OTHER,"NSTime.set-5",pcall(setNSTime,tmptime,"nsecs",123))
+testlib.test(OTHER,"NSTime.set-6",not pcall(setNSTime,NSTime,"secs",0))
+testlib.test(OTHER,"NSTime.set-7",not pcall(setNSTime,tmptime,"secs","foobar"))
+testlib.test(OTHER,"NSTime.set-8",not pcall(setNSTime,NSTime,"nsecs",0))
+testlib.test(OTHER,"NSTime.set-9",not pcall(setNSTime,tmptime,"nsecs","foobar"))
+
+testlib.test(OTHER,"NSTime.get-10",pcall(getNSTime,tmptime,"secs"))
+testlib.test(OTHER,"NSTime.get-11",pcall(getNSTime,tmptime,"nsecs"))
+testlib.test(OTHER,"NSTime.get-12",not pcall(getNSTime,NSTime,"secs"))
+testlib.test(OTHER,"NSTime.get-13",not pcall(getNSTime,NSTime,"nsecs"))
+
+
+testlib.testing(OTHER,"basic tests")
+local first = NSTime()
+local second = NSTime(100,100)
+local third = NSTime(0,100)
+testlib.test(OTHER,"NSTime.secs-14", first.secs == 0)
+testlib.test(OTHER,"NSTime.secs-15", second.secs == 100)
+testlib.test(OTHER,"NSTime.secs-16", third.secs == 0)
+
+testlib.test(OTHER,"NSTime.nsecs-17", first.nsecs == 0)
+testlib.test(OTHER,"NSTime.nsecs-18", second.nsecs == 100)
+testlib.test(OTHER,"NSTime.nsecs-19", third.nsecs == 100)
+
+testlib.test(OTHER,"NSTime.eq-20", first == NSTime())
+testlib.test(OTHER,"NSTime.neq-21", second ~= third)
+
+testlib.test(OTHER,"NSTime.add-22", first + second == second)
+testlib.test(OTHER,"NSTime.add-23", third + NSTime(100,0) == second)
+testlib.test(OTHER,"NSTime.add-24", NSTime(100) + NSTime(nil,100) == second)
+
+testlib.test(OTHER,"NSTime.lt-25", third < second)
+testlib.test(OTHER,"NSTime.gt-26", third > first)
+testlib.test(OTHER,"NSTime.le-27", second <= NSTime(100,100))
+
+testlib.test(OTHER,"NSTime.unm-28", -first == first)
+testlib.test(OTHER,"NSTime.unm-29", -(-second) == second)
+testlib.test(OTHER,"NSTime.unm-30", -second == NSTime(-100,-100))
+testlib.test(OTHER,"NSTime.unm-31", -third == NSTime(0,-100))
+
+testlib.test(OTHER,"NSTime.tostring-32", tostring(first) == "0.000000000")
+testlib.test(OTHER,"NSTime.tostring-33", tostring(second) == "100.000000100")
+testlib.test(OTHER,"NSTime.tostring-34", tostring(third) == "0.000000100")
+
+testlib.test(OTHER,"NSTime.tonumber-35", first:tonumber() == 0.0)
+testlib.test(OTHER,"NSTime.tonumber-36", second:tonumber() == 100.0000001)
+testlib.test(OTHER,"NSTime.tonumber-37", third:tonumber() == 0.0000001)
+
+testlib.testing(OTHER,"setters/getters")
+first.secs = 123
+first.nsecs = 100
+testlib.test(OTHER,"NSTime.set-38", first == NSTime(123,100))
+testlib.test(OTHER,"NSTime.get-39", first.secs == 123)
+testlib.test(OTHER,"NSTime.get-40", first.nsecs == 100)
+
+local minus0_4 = NSTime() - NSTime(0,400000000)
+testlib.test(OTHER,"NSTime.negative_tonumber-41", minus0_4:tonumber() == -0.4)
+testlib.test(OTHER,"NSTime.negative_tostring-42", tostring(minus0_4) == "-0.400000000")
+local minus0_4 = NSTime() - NSTime(1,400000000)
+testlib.test(OTHER,"NSTime.negative_tonumber-43", minus0_4:tonumber() == -1.4)
+testlib.test(OTHER,"NSTime.negative_tostring-44", tostring(minus0_4) == "-1.400000000")
+
+
+----------------------------------
+
+-- declare some field extractors
+local f_frame_time = Field.new("frame.time")
+local f_frame_time_rel = Field.new("frame.time_relative")
+local f_frame_time_delta = Field.new("frame.time_delta")
+
+local tap = Listener.new()
+
+local begin = NSTime()
+local now, previous
+
+function tap.packet(pinfo,tvb,frame)
+ testlib.countPacket(FRAME)
+ testlib.testing(FRAME,"NSTime in Frame")
+
+ local fi_now = f_frame_time()
+ local fi_rel = f_frame_time_rel()
+ local fi_delta = f_frame_time_delta()
+
+ testlib.test(PER_FRAME,"typeof-1", typeof(begin) == "NSTime")
+ testlib.test(PER_FRAME,"typeof-2", typeof(fi_now()) == "NSTime")
+
+ now = fi_now()
+ if testlib.getPktCount(FRAME) == 1 then
+ testlib.test(PER_FRAME,"__eq-1", begin == fi_delta())
+ testlib.test(PER_FRAME,"NSTime.secs-1", fi_delta().secs == 0)
+ testlib.test(PER_FRAME,"NSTime.nsecs-1", fi_delta().nsecs == 0)
+ begin = fi_now()
+ else
+ testlib.test(PER_FRAME,"__sub__eq-1", now - previous == fi_delta())
+ testlib.test(PER_FRAME,"__sub__eq-2", now - begin == fi_rel())
+ testlib.test(PER_FRAME,"__add-1", (previous - begin) + (now - previous) == fi_rel())
+ end
+ previous = now
+
+ testlib.pass(FRAME)
+end
+
+function tap.draw()
+ testlib.getResults()
+end
diff --git a/test/lua/pcap_file.lua b/test/lua/pcap_file.lua
new file mode 100644
index 0000000..b949a38
--- /dev/null
+++ b/test/lua/pcap_file.lua
@@ -0,0 +1,752 @@
+-- pcap_file_reader.lua
+--------------------------------------------------------------------------------
+--[[
+ This is a Wireshark Lua-based pcap capture file reader.
+ Author: Hadriel Kaplan
+
+ This "capture file" reader reads pcap files - the old style ones. Don't expect this to
+ be as good as the real thing; this is a simplistic implementation to show how to
+ create such file readers, and for testing purposes.
+
+ This script requires Wireshark v1.12 or newer.
+--]]
+--------------------------------------------------------------------------------
+
+-- do not modify this table
+local debug = {
+ DISABLED = 0,
+ LEVEL_1 = 1,
+ LEVEL_2 = 2
+}
+
+-- set this DEBUG to debug.LEVEL_1 to enable printing debug info
+-- set it to debug.LEVEL_2 to enable really verbose printing
+local DEBUG = debug.LEVEL_1
+
+
+local wireshark_name = "Wireshark"
+if not GUI_ENABLED then
+ wireshark_name = "Tshark"
+end
+
+-- verify Wireshark is new enough
+local major, minor, micro = get_version():match("(%d+)%.(%d+)%.(%d+)")
+if major and tonumber(major) <= 1 and ((tonumber(minor) <= 10) or (tonumber(minor) == 11 and tonumber(micro) < 3)) then
+ error( "Sorry, but your " .. wireshark_name .. " version (" .. get_version() .. ") is too old for this script!\n" ..
+ "This script needs " .. wireshark_name .. "version 1.12 or higher.\n" )
+end
+
+-- verify we have the Struct library in wireshark
+-- technically we should be able to do this with 'require', but Struct is a built-in
+assert(Struct.unpack, wireshark_name .. " does not have the Struct library!")
+
+--------------------------------------------------------------------------------
+-- early definitions
+-- throughout most of this file I try to pre-declare things to help ease
+-- reading it and following the logic flow, but some things just have to be done
+-- before others, so this sections has such things that cannot be avoided
+--------------------------------------------------------------------------------
+
+-- first some variable declarations for functions we'll define later
+local parse_file_header, parse_rec_header, read_common
+
+-- these will be set inside of parse_file_header(), but we're declaring them up here
+local default_settings =
+{
+ debug = DEBUG,
+ corrected_magic = 0xa1b2c3d4,
+ version_major = 2,
+ version_minor = 4,
+ timezone = 0,
+ sigfigs = 0,
+ read_snaplen = 0, -- the snaplen we read from file
+ snaplen = 0, -- the snaplen we use (limited by WTAP_MAX_PACKET_SIZE)
+ linktype = -1, -- the raw linktype number in the file header
+ wtap_type = wtap_encaps.UNKNOWN, -- the mapped internal wtap number based on linktype
+ endianess = ENC_BIG_ENDIAN,
+ time_precision = wtap_tsprecs.USEC,
+ rec_hdr_len = 16, -- default size of record header
+ rec_hdr_patt = "I4 I4 I4 I4", -- pattern for Struct to use
+ num_rec_fields = 4, -- number of vars in pattern
+}
+
+local dprint = function() end
+local dprint2 = function() end
+local function reset_debug()
+ if default_settings.debug > debug.DISABLED then
+ dprint = function(...)
+ print(table.concat({"Lua:", ...}," "))
+ end
+
+ if default_settings.debug > debug.LEVEL_1 then
+ dprint2 = dprint
+ end
+ end
+end
+-- call it now
+reset_debug()
+
+--------------------------------------------------------------------------------
+-- file reader handling functions for Wireshark to use
+--------------------------------------------------------------------------------
+
+----------------------------------------
+-- The read_open() is called by Wireshark once per file, to see if the file is this reader's type.
+-- Wireshark passes in (1) a File object and (2) CaptureInfo object to this function
+-- It expects in return either nil or false to mean it's not our file type, or true if it is
+-- In our case what this means is we figure out if the file has the magic header, and get the
+-- endianess of the file, and the encapsulation type of its frames/records
+local function read_open(file, capture)
+ dprint2("read_open() called")
+
+ local file_settings = parse_file_header(file)
+
+ if file_settings then
+
+ dprint2("read_open: success, file is for us")
+
+ -- save our state
+ capture.private_table = file_settings
+
+ -- if the file is for us, we MUST set the file position cursor to
+ -- where we want the first call to read() function to get it the next time
+ -- for example if we checked a few records to be sure it's or type
+ -- but in this simple example we only verify the file header (24 bytes)
+ -- and we want the file position to remain after that header for our read()
+ -- call, so we don't change it back
+ --file:seek("set",position)
+
+ -- these we can also set per record later during read operations
+ capture.time_precision = file_settings.time_precision
+ capture.encap = file_settings.wtap_type
+ capture.snapshot_length = file_settings.snaplen
+
+ return true
+ end
+
+ dprint2("read_open: file not for us")
+
+ -- if it's not for us, wireshark will reset the file position itself
+
+ return false
+end
+
+----------------------------------------
+-- Wireshark/tshark calls read() for each frame/record in the file
+-- It passes in (1) a File, (2) CaptureInfo, and (3) FrameInfo object to this function
+-- It expects in return the file offset position the record starts at,
+-- or nil/false if there's an error or end-of-file is reached.
+-- The offset position is used later: wireshark remembers it and gives
+-- it to seek_read() at various random times
+local function read(file, capture, frame)
+ dprint2("read() called")
+
+ -- call our common reader function
+ local position = file:seek()
+
+ if not read_common("read", file, capture, frame) then
+ -- this isnt' actually an error, because it might just mean we reached end-of-file
+ -- so let's test for that (read(0) is a special case in Lua, see Lua docs)
+ if file:read(0) ~= nil then
+ dprint("read: failed to call read_common")
+ else
+ dprint2("read: reached end of file")
+ end
+ return false
+ end
+
+ dprint2("read: succeess")
+
+ -- return the position we got to (or nil if we hit EOF/error)
+ return position
+end
+
+----------------------------------------
+-- Wireshark/tshark calls seek_read() for each frame/record in the file, at random times
+-- It passes in (1) a File, (2) CaptureInfo, (3) FrameInfo object, and the offset position number
+-- It expects in return true for successful parsing, or nil/false if there's an error.
+local function seek_read(file, capture, frame, offset)
+ dprint2("seek_read() called")
+
+ -- first move to the right position in the file
+ file:seek("set",offset)
+
+ if not read_common("seek_read", file, capture, frame) then
+ dprint("seek_read: failed to call read_common")
+ return false
+ end
+
+ return true
+end
+
+----------------------------------------
+-- Wireshark/tshark calls read_close() when it's closing the file completely
+-- It passes in (1) a File and (2) CaptureInfo object to this function
+-- this is a good opportunity to clean up any state you may have created during
+-- file reading. (in our case there's no real state)
+local function read_close(file, capture)
+ dprint2("read_close() called")
+ -- we don't really have to reset anything, because we used the
+ -- capture.private_table and wireshark clears it for us after this function
+ return true
+end
+
+----------------------------------------
+-- An often unused function, Wireshark calls this when the sequential walk-through is over
+-- (i.e., no more calls to read(), only to seek_read()).
+-- It passes in (1) a File and (2) CaptureInfo object to this function
+-- This gives you a chance to clean up any state you used during read() calls, but remember
+-- that there will be calls to seek_read() after this (in Wireshark, though not Tshark)
+local function seq_read_close(file, capture)
+ dprint2("First pass of read() calls are over, but there may be seek_read() calls after this")
+ return true
+end
+
+----------------------------------------
+-- ok, so let's create a FileHandler object
+local fh = FileHandler.new("Lua-based PCAP reader", "lua_pcap", "A Lua-based file reader for PCAP-type files","rms")
+
+-- set above functions to the FileHandler
+fh.read_open = read_open
+fh.read = read
+fh.seek_read = seek_read
+fh.read_close = read_close
+fh.seq_read_close = seq_read_close
+fh.extensions = "pcap;cap" -- this is just a hint
+
+-- and finally, register the FileHandler!
+register_filehandler(fh)
+
+dprint2("FileHandler registered")
+
+--------------------------------------------------------------------------------
+-- ok now for the boring stuff that actually does the work
+--------------------------------------------------------------------------------
+
+----------------------------------------
+-- in Lua, we have access to encapsulation types in the 'wtap_encaps' table, but
+-- those numbers don't actually necessarily match the numbers in pcap files
+-- for the encapsulation type, because the namespace got screwed up at some
+-- point in the past (blame LBL NRG, not wireshark for that)
+-- but I'm not going to create the full mapping of these two namespaces
+-- instead we'll just use this smaller table to map them
+-- these are taken from wiretap/pcap-common.c
+local pcap2wtap = {
+ [0] = wtap_encaps.NULL,
+ [1] = wtap_encaps.ETHERNET,
+ [6] = wtap_encaps.TOKEN_RING,
+ [8] = wtap_encaps.SLIP,
+ [9] = wtap_encaps.PPP,
+ [101] = wtap_encaps.RAW_IP,
+ [105] = wtap_encaps.IEEE_802_11,
+ [127] = wtap_encaps.IEEE_802_11_RADIOTAP,
+ [140] = wtap_encaps.MTP2,
+ [141] = wtap_encaps.MTP3,
+ [143] = wtap_encaps.DOCSIS,
+ [147] = wtap_encaps.USER0,
+ [148] = wtap_encaps.USER1,
+ [149] = wtap_encaps.USER2,
+ [150] = wtap_encaps.USER3,
+ [151] = wtap_encaps.USER4,
+ [152] = wtap_encaps.USER5,
+ [153] = wtap_encaps.USER6,
+ [154] = wtap_encaps.USER7,
+ [155] = wtap_encaps.USER8,
+ [156] = wtap_encaps.USER9,
+ [157] = wtap_encaps.USER10,
+ [158] = wtap_encaps.USER11,
+ [159] = wtap_encaps.USER12,
+ [160] = wtap_encaps.USER13,
+ [161] = wtap_encaps.USER14,
+ [162] = wtap_encaps.USER15,
+ [186] = wtap_encaps.USB,
+ [187] = wtap_encaps.BLUETOOTH_H4,
+ [189] = wtap_encaps.USB_LINUX,
+ [195] = wtap_encaps.IEEE802_15_4,
+}
+
+-- we can use the above to directly map very quickly
+-- but to map it backwards we'll use this, because I'm lazy:
+local function wtap2pcap(encap)
+ for k,v in pairs(pcap2wtap) do
+ if v == encap then
+ return k
+ end
+ end
+ return 0
+end
+
+----------------------------------------
+-- here are the "structs" we're going to parse, of the various records in a pcap file
+-- these pattern string gets used in calls to Struct.unpack()
+--
+-- we will prepend a '<' or '>' later, once we figure out what endian-ess the files are in
+--
+-- this is a constant for minimum we need to read before we figure out the filetype
+local FILE_HDR_LEN = 24
+-- a pcap file header struct
+-- this is: magic, version_major, version_minor, timezone, sigfigs, snaplen, encap type
+local FILE_HEADER_PATT = "I4 I2 I2 i4 I4 I4 I4"
+-- it's too bad Struct doesn't have a way to get the number of vars the pattern holds
+-- another thing to add to my to-do list?
+local NUM_HDR_FIELDS = 7
+
+-- these will hold the '<'/'>' prepended version of above
+--local file_header, rec_header
+
+-- snaplen/caplen can't be bigger than this
+local WTAP_MAX_PACKET_SIZE = 65535
+
+----------------------------------------
+-- different pcap file types have different magic values
+-- we need to know various things about them for various functions
+-- in this script, so this table holds all the info
+--
+-- See default_settings table above for the defaults used if this table
+-- doesn't override them.
+--
+-- Arguably, these magic types represent different "Protocols" to dissect later,
+-- but this script treats them all as "pcapfile" protocol.
+--
+-- From this table, we'll auto-create a value-string table for file header magic field
+local magic_spells =
+{
+ normal =
+ {
+ magic = 0xa1b2c3d4,
+ name = "Normal (Big-endian)",
+ },
+ swapped =
+ {
+ magic = 0xd4c3b2a1,
+ name = "Swapped Normal (Little-endian)",
+ endianess = ENC_LITTLE_ENDIAN,
+ },
+ modified =
+ {
+ -- this is for a ss991029 patched format only
+ magic = 0xa1b2cd34,
+ name = "Modified",
+ rec_hdr_len = 24,
+ rec_hdr_patt = "I4I4I4I4 I4 I2 I1 I1",
+ num_rec_fields = 8,
+ },
+ swapped_modified =
+ {
+ -- this is for a ss991029 patched format only
+ magic = 0x34cdb2a1,
+ name = "Swapped Modified",
+ rec_hdr_len = 24,
+ rec_hdr_patt = "I4I4I4I4 I4 I2 I1 I1",
+ num_rec_fields = 8,
+ endianess = ENC_LITTLE_ENDIAN,
+ },
+ nsecs =
+ {
+ magic = 0xa1b23c4d,
+ name = "Nanosecond",
+ time_precision = wtap_filetypes.TSPREC_NSEC,
+ },
+ swapped_nsecs =
+ {
+ magic = 0x4d3cb2a1,
+ name = "Swapped Nanosecond",
+ endianess = ENC_LITTLE_ENDIAN,
+ time_precision = wtap_filetypes.TSPREC_NSEC,
+ },
+}
+
+-- create a magic-to-spell entry table from above magic_spells table
+-- so we can find them faster during file read operations
+-- we could just add them right back into spells table, but this is cleaner
+local magic_values = {}
+for k,t in pairs(magic_spells) do
+ magic_values[t.magic] = t
+end
+
+-- the function which makes a copy of the default settings per file
+local function new_settings()
+ dprint2("creating new file_settings")
+ local file_settings = {}
+ for k,v in pairs(default_settings) do
+ file_settings[k] = v
+ end
+ return file_settings
+end
+
+-- set the file_settings that the magic value defines in magic_values
+local function set_magic_file_settings(magic)
+ local t = magic_values[magic]
+ if not t then
+ dprint("set_magic_file_settings: did not find magic settings for:",magic)
+ return false
+ end
+
+ local file_settings = new_settings()
+
+ -- the magic_values/spells table uses the same key names, so this is easy
+ for k,v in pairs(t) do
+ file_settings[k] = v
+ end
+
+ -- based on endianess, set the file_header and rec_header
+ -- and determine corrected_magic
+ if file_settings.endianess == ENC_BIG_ENDIAN then
+ file_settings.file_hdr_patt = '>' .. FILE_HEADER_PATT
+ file_settings.rec_hdr_patt = '>' .. file_settings.rec_hdr_patt
+ file_settings.corrected_magic = magic
+ else
+ file_settings.file_hdr_patt = '<' .. FILE_HEADER_PATT
+ file_settings.rec_hdr_patt = '<' .. file_settings.rec_hdr_patt
+ local m = Struct.pack(">I4", magic)
+ file_settings.corrected_magic = Struct.unpack("<I4", m)
+ end
+
+ file_settings.rec_hdr_len = Struct.size(file_settings.rec_hdr_patt)
+
+ return file_settings
+end
+
+----------------------------------------
+-- internal functions declared previously
+----------------------------------------
+
+----------------------------------------
+-- used by read_open(), this parses the file header
+parse_file_header = function(file)
+ dprint2("parse_file_header() called")
+
+ -- by default, file:read() gets the next "string", meaning ending with a newline \n
+ -- but we want raw byte reads, so tell it how many bytes to read
+ local line = file:read(FILE_HDR_LEN)
+
+ -- it's ok for us to not be able to read it, but we need to tell wireshark the
+ -- file's not for us, so return false
+ if not line then return false end
+
+ dprint2("parse_file_header: got this line:\n'", Struct.tohex(line,false,":"), "'")
+
+ -- let's peek at the magic int32, assuming it's big-endian
+ local magic = Struct.unpack(">I4", line)
+
+ local file_settings = set_magic_file_settings(magic)
+
+ if not file_settings then
+ dprint("magic was: '", magic, "', so not a known pcap file?")
+ return false
+ end
+
+ -- this is: magic, version_major, version_minor, timezone, sigfigs, snaplen, encap type
+ local fields = { Struct.unpack(file_settings.file_hdr_patt, line) }
+
+ -- sanity check; also note that Struct.unpack() returns the fields plus
+ -- a number of where in the line it stopped reading (i.e., the end in this case)
+ -- so we got back number of fields + 1
+ if #fields ~= NUM_HDR_FIELDS + 1 then
+ -- this should never happen, since we already told file:read() to grab enough bytes
+ dprint("parse_file_header: failed to read the file header")
+ return nil
+ end
+
+ -- fields[1] is the magic, which we already parsed and saved before, but just to be sure
+ -- our endianess is set right, we validate what we got is what we expect now that
+ -- endianess has been corrected
+ if fields[1] ~= file_settings.corrected_magic then
+ dprint ("parse_file_header: endianess screwed up? Got:'", fields[1],
+ "', but wanted:", file_settings.corrected_magic)
+ return nil
+ end
+
+ file_settings.version_major = fields[2]
+ file_settings.version_minor = fields[3]
+ file_settings.timezone = fields[4]
+ file_settings.sigfigs = fields[5]
+ file_settings.read_snaplen = fields[6]
+ file_settings.linktype = fields[7]
+
+ -- wireshark only supports version 2.0 and later
+ if fields[2] < 2 then
+ dprint("got version =",VERSION_MAJOR,"but only version 2 or greater supported")
+ return false
+ end
+
+ -- convert pcap file interface type to wtap number type
+ file_settings.wtap_type = pcap2wtap[file_settings.linktype]
+ if not file_settings.wtap_type then
+ dprint("file nettype", file_settings.linktype,
+ "couldn't be mapped to wireshark wtap type")
+ return false
+ end
+
+ file_settings.snaplen = file_settings.read_snaplen
+ if file_settings.snaplen > WTAP_MAX_PACKET_SIZE then
+ file_settings.snaplen = WTAP_MAX_PACKET_SIZE
+ end
+
+ dprint2("read_file_header: got magic='", magic,
+ "', major version='", file_settings.version_major,
+ "', minor='", file_settings.version_minor,
+ "', timezone='", file_settings.timezone,
+ "', sigfigs='", file_settings.sigfigs,
+ "', read_snaplen='", file_settings.read_snaplen,
+ "', snaplen='", file_settings.snaplen,
+ "', nettype ='", file_settings.linktype,
+ "', wtap ='", file_settings.wtap_type)
+
+ --ok, it's a pcap file
+ dprint2("parse_file_header: success")
+ return file_settings
+end
+
+----------------------------------------
+-- this is used by both read() and seek_read()
+-- the calling function to this should have already set the file position correctly
+read_common = function(funcname, file, capture, frame)
+ dprint2(funcname,": read_common() called")
+
+ -- get the state info
+ local file_settings = capture.private_table
+
+ -- first parse the record header, which will set the FrameInfo fields
+ if not parse_rec_header(funcname, file, file_settings, frame) then
+ dprint2(funcname, ": read_common: hit end of file or error")
+ return false
+ end
+
+ frame.encap = file_settings.wtap_type
+
+ -- now we need to get the packet bytes from the file record into the frame...
+ -- we *could* read them into a string using file:read(numbytes), and then
+ -- set them to frame.data so that wireshark gets it...
+ -- but that would mean the packet's string would be copied into Lua
+ -- and then sent right back into wireshark, which is gonna slow things
+ -- down; instead FrameInfo has a read_data() method, which makes
+ -- wireshark read directly from the file into the frame buffer, so we use that
+ if not frame:read_data(file, frame.captured_length) then
+ dprint(funcname, ": read_common: failed to read data from file into buffer")
+ return false
+ end
+
+ return true
+end
+
+----------------------------------------
+-- the function to parse individual records
+parse_rec_header = function(funcname, file, file_settings, frame)
+ dprint2(funcname,": parse_rec_header() called")
+
+ local line = file:read(file_settings.rec_hdr_len)
+
+ -- it's ok for us to not be able to read it, if it's end of file
+ if not line then return false end
+
+ -- this is: time_sec, time_usec, capture_len, original_len
+ local fields = { Struct.unpack(file_settings.rec_hdr_patt, line) }
+
+ -- sanity check; also note that Struct.unpack() returns the fields plus
+ -- a number of where in the line it stopped reading (i.e., the end in this case)
+ -- so we got back number of fields + 1
+ if #fields ~= file_settings.num_rec_fields + 1 then
+ dprint(funcname, ": parse_rec_header: failed to read the record header, got:",
+ #fields, ", expected:", file_settings.num_rec_fields)
+ return nil
+ end
+
+ local nsecs = fields[2]
+
+ if file_settings.time_precision == wtap_filetypes.TSPREC_USEC then
+ nsecs = nsecs * 1000
+ elseif file_settings.time_precision == wtap_filetypes.TSPREC_MSEC then
+ nsecs = nsecs * 1000000
+ end
+
+ frame.time = NSTime(fields[1], nsecs)
+
+ local caplen, origlen = fields[3], fields[4]
+
+ -- sanity check, verify captured length isn't more than original length
+ if caplen > origlen then
+ dprint("captured length of", caplen, "is bigger than original length of", origlen)
+ -- swap them, a cool Lua ability
+ caplen, origlen = origlen, caplen
+ end
+
+ if caplen > WTAP_MAX_PACKET_SIZE then
+ dprint("Got a captured_length of", caplen, "which is too big")
+ caplen = WTAP_MAX_PACKET_SIZE
+ end
+
+ frame.rec_type = wtap_rec_types.PACKET
+
+ frame.captured_length = caplen
+ frame.original_length = origlen
+
+ frame.flags = wtap_presence_flags.TS + wtap_presence_flags.CAP_LEN -- for timestamp|cap_len
+
+ dprint2(funcname,": parse_rec_header() returning")
+ return true
+end
+
+
+
+--------------------------------------------------------------------------------
+-- file writer handling functions for Wireshark to use
+--------------------------------------------------------------------------------
+
+-- file encaps we can handle writing
+local canwrite = {
+ [ wtap_encaps.NULL ] = true,
+ [ wtap_encaps.ETHERNET ] = true,
+ [ wtap_encaps.PPP ] = true,
+ [ wtap_encaps.RAW_IP ] = true,
+ [ wtap_encaps.IEEE_802_11 ] = true,
+ [ wtap_encaps.MTP2 ] = true,
+ [ wtap_encaps.MTP3 ] = true,
+ -- etc., etc.
+}
+
+-- we can't reuse the variables we used in the reader, because this script might be used to both
+-- open a file for reading and write it out, at the same time, so we cerate another file_settings
+-- instance.
+-- set the file_settings for the little-endian version in magic_spells
+local function create_writer_file_settings()
+ dprint2("create_writer_file_settings called")
+ local t = magic_spells.swapped
+
+ local file_settings = new_settings()
+
+ -- the magic_values/spells table uses the same key names, so this is easy
+ for k,v in pairs(t) do
+ file_settings[k] = v
+ end
+
+ -- based on endianess, set the file_header and rec_header
+ -- and determine corrected_magic
+ if file_settings.endianess == ENC_BIG_ENDIAN then
+ file_settings.file_hdr_patt = '>' .. FILE_HEADER_PATT
+ file_settings.rec_hdr_patt = '>' .. file_settings.rec_hdr_patt
+ file_settings.corrected_magic = file_settings.magic
+ else
+ file_settings.file_hdr_patt = '<' .. FILE_HEADER_PATT
+ file_settings.rec_hdr_patt = '<' .. file_settings.rec_hdr_patt
+ local m = Struct.pack(">I4", file_settings.magic)
+ file_settings.corrected_magic = Struct.unpack("<I4", m)
+ end
+
+ file_settings.rec_hdr_len = Struct.size(file_settings.rec_hdr_patt)
+
+ return file_settings
+end
+
+----------------------------------------
+-- The can_write_encap() function is called by Wireshark when it wants to write out a file,
+-- and needs to see if this file writer can handle the packet types in the window.
+-- We need to return true if we can handle it, else false
+local function can_write_encap(encap)
+ dprint2("can_write_encap() called with encap=",encap)
+ return canwrite[encap] or false
+end
+
+local function write_open(file, capture)
+ dprint2("write_open() called")
+
+ local file_settings = create_writer_file_settings()
+
+ -- write out file header
+ local hdr = Struct.pack(file_settings.file_hdr_patt,
+ file_settings.corrected_magic,
+ file_settings.version_major,
+ file_settings.version_minor,
+ file_settings.timezone,
+ file_settings.sigfigs,
+ capture.snapshot_length,
+ wtap2pcap(capture.encap))
+
+ if not hdr then
+ dprint("write_open: error generating file header")
+ return false
+ end
+
+ dprint2("write_open generating:", Struct.tohex(hdr))
+
+ if not file:write(hdr) then
+ dprint("write_open: error writing file header to file")
+ return false
+ end
+
+ -- save settings
+ capture.private_table = file_settings
+
+ return true
+end
+
+local function write(file, capture, frame)
+ dprint2("write() called")
+
+ -- get file settings
+ local file_settings = capture.private_table
+ if not file_settings then
+ dprint("write() failed to get private table file settings")
+ return false
+ end
+
+ -- write out record header: time_sec, time_usec, capture_len, original_len
+
+ -- first get times
+ local nstime = frame.time
+
+ -- pcap format is in usecs, but wireshark's internal is nsecs
+ local nsecs = nstime.nsecs
+
+ if file_settings.time_precision == wtap_filetypes.TSPREC_USEC then
+ nsecs = nsecs / 1000
+ elseif file_settings.time_precision == wtap_filetypes.TSPREC_MSEC then
+ nsecs = nsecs / 1000000
+ end
+
+ local hdr = Struct.pack(file_settings.rec_hdr_patt,
+ nstime.secs,
+ nsecs,
+ frame.captured_length,
+ frame.original_length)
+
+ if not hdr then
+ dprint("write: error generating record header")
+ return false
+ end
+
+ if not file:write(hdr) then
+ dprint("write: error writing record header to file")
+ return false
+ end
+
+ -- we could write the packet data the same way, by getting frame.data and writing it out
+ -- but we can avoid copying those bytes into Lua by using the write_data() function
+ if not frame:write_data(file) then
+ dprint("write: error writing record data to file")
+ return false
+ end
+
+ return true
+end
+
+local function write_close(file, capture)
+ dprint2("write_close() called")
+ dprint2("Good night, and good luck")
+ return true
+end
+
+-- ok, so let's create another FileHandler object
+local fh2 = FileHandler.new("Lua-based PCAP writer", "lua_pcap2", "A Lua-based file writer for PCAP-type files","wms")
+
+-- set above functions to the FileHandler
+fh2.can_write_encap = can_write_encap
+fh2.write_open = write_open
+fh2.write = write
+fh2.write_close = write_close
+fh2.extensions = "pcap;cap" -- this is just a hint
+
+-- and finally, register the FileHandler!
+register_filehandler(fh2)
+
+dprint2("Second FileHandler registered")
diff --git a/test/lua/pinfo.lua b/test/lua/pinfo.lua
new file mode 100644
index 0000000..52bd3d0
--- /dev/null
+++ b/test/lua/pinfo.lua
@@ -0,0 +1,220 @@
+-- test script for Pinfo and Address functions
+-- use with dhcp.pcap in test/captures directory
+
+------------- general test helper funcs ------------
+local testlib = require("testlib")
+local FRAME = "frame"
+local DENIED = "denied"
+local GETTER = "getter"
+local SETTER = "setter"
+local ADDR = "address"
+local OTHER = "other"
+
+-- expected number of runs per type
+local n_frames = 4
+local taptests = {
+ [FRAME]=n_frames,
+ [DENIED]=n_frames*32,
+ [GETTER]=n_frames*39,
+ [SETTER]=n_frames*15,
+ [ADDR]=n_frames*6,
+ [OTHER]=n_frames*2,
+}
+testlib.init(taptests)
+
+---------
+-- the following are so we can use pcall (which needs a function to call)
+local function setPinfo(pinfo,name,value)
+ pinfo[name] = value
+end
+
+local function getPinfo(pinfo,name)
+ local foo = pinfo[name]
+end
+
+------------- test script ------------
+
+
+local tap = Listener.new()
+
+
+function tap.packet(pinfo,tvb)
+ testlib.countPacket(FRAME)
+ testlib.testing(FRAME,"Pinfo in Frame")
+
+ testlib.test(OTHER,"typeof-1", typeof(pinfo) == "Pinfo")
+
+ testlib.test(OTHER,"tostring-1", tostring(pinfo) == "a Pinfo")
+
+ testlib.testing(FRAME,"negative tests")
+
+ -- try to set read-only attributes
+ testlib.test(DENIED,"Pinfo.number-set-1",not pcall(setPinfo,pinfo,"number",0))
+ testlib.test(DENIED,"Pinfo.len-set-1",not pcall(setPinfo,pinfo,"len",0))
+ testlib.test(DENIED,"Pinfo.caplen-set-1",not pcall(setPinfo,pinfo,"caplen",0))
+ testlib.test(DENIED,"Pinfo.rel_ts-set-1",not pcall(setPinfo,pinfo,"rel_ts",0))
+ testlib.test(DENIED,"Pinfo.delta_ts-set-1",not pcall(setPinfo,pinfo,"delta_ts",0))
+ testlib.test(DENIED,"Pinfo.delta_dis_ts-set-1",not pcall(setPinfo,pinfo,"delta_dis_ts",0))
+ testlib.test(DENIED,"Pinfo.visited-set-1",not pcall(setPinfo,pinfo,"visited",0))
+ testlib.test(DENIED,"Pinfo.lo-set-1",not pcall(setPinfo,pinfo,"lo",0))
+ testlib.test(DENIED,"Pinfo.hi-set-1",not pcall(setPinfo,pinfo,"hi",0))
+ testlib.test(DENIED,"Pinfo.port_type-set-1",not pcall(setPinfo,pinfo,"port_type",0))
+ testlib.test(DENIED,"Pinfo.match-set-1",not pcall(setPinfo,pinfo,"match",0))
+ testlib.test(DENIED,"Pinfo.curr_proto-set-1",not pcall(setPinfo,pinfo,"curr_proto",0))
+ testlib.test(DENIED,"Pinfo.columns-set-1",not pcall(setPinfo,pinfo,"columns",0))
+ testlib.test(DENIED,"Pinfo.cols-set-1",not pcall(setPinfo,pinfo,"cols",0))
+ testlib.test(DENIED,"Pinfo.private-set-1",not pcall(setPinfo,pinfo,"private",0))
+ testlib.test(DENIED,"Pinfo.fragmented-set-1",not pcall(setPinfo,pinfo,"fragmented",0))
+ testlib.test(DENIED,"Pinfo.in_error_pkt-set-1",not pcall(setPinfo,pinfo,"in_error_pkt",0))
+ testlib.test(DENIED,"Pinfo.match_uint-set-1",not pcall(setPinfo,pinfo,"match_uint",0))
+ testlib.test(DENIED,"Pinfo.match_string-set-1",not pcall(setPinfo,pinfo,"match_string",0))
+
+ -- wrong type being set
+ testlib.test(DENIED,"Pinfo.src-set-1",not pcall(setPinfo,pinfo,"src","foobar"))
+ testlib.test(DENIED,"Pinfo.dst-set-1",not pcall(setPinfo,pinfo,"dst","foobar"))
+ testlib.test(DENIED,"Pinfo.dl_src-set-1",not pcall(setPinfo,pinfo,"dl_src","foobar"))
+ testlib.test(DENIED,"Pinfo.dl_dst-set-1",not pcall(setPinfo,pinfo,"dl_dst","foobar"))
+ testlib.test(DENIED,"Pinfo.net_src-set-1",not pcall(setPinfo,pinfo,"net_src","foobar"))
+ testlib.test(DENIED,"Pinfo.net_dst-set-1",not pcall(setPinfo,pinfo,"net_dst","foobar"))
+ testlib.test(DENIED,"Pinfo.src_port-set-1",not pcall(setPinfo,pinfo,"src_port","foobar"))
+ testlib.test(DENIED,"Pinfo.dst_port-set-1",not pcall(setPinfo,pinfo,"dst_port","foobar"))
+ testlib.test(DENIED,"Pinfo.can_desegment-set-1",not pcall(setPinfo,pinfo,"can_desegment","foobar"))
+ testlib.test(DENIED,"Pinfo.desegment_len-set-1",not pcall(setPinfo,pinfo,"desegment_len","foobar"))
+ testlib.test(DENIED,"Pinfo.desegment_offset-set-1",not pcall(setPinfo,pinfo,"desegment_offset","foobar"))
+
+ -- invalid attribute names
+ testlib.test(DENIED,"Pinfo.set-1",not pcall(setPinfo,pinfo,"foobar","foobar"))
+ testlib.test(DENIED,"Pinfo.get-12",not pcall(getPinfo,pinfo,"foobar"))
+
+ testlib.testing(FRAME,"basic getter tests")
+
+ local pktlen, srcip, dstip, srcport, dstport
+
+ if pinfo.number == 1 or pinfo.number == 3 then
+ pktlen = 314
+ srcip = "0.0.0.0"
+ dstip = "255.255.255.255"
+ srcport = 68
+ dstport = 67
+ else
+ pktlen = 342
+ srcip = "192.168.0.1"
+ dstip = "192.168.0.10"
+ srcport = 67
+ dstport = 68
+ end
+
+ testlib.test(GETTER,"Pinfo.number-get-1",pinfo.number == testlib.getPktCount(FRAME))
+ testlib.test(GETTER,"Pinfo.len-get-1",pinfo.len == pktlen)
+ testlib.test(GETTER,"Pinfo.caplen-get-1",pinfo.caplen == pktlen)
+ testlib.test(GETTER,"Pinfo.visited-get-1",pinfo.visited == true)
+ testlib.test(GETTER,"Pinfo.lo-get-1",tostring(pinfo.lo) == srcip)
+ testlib.test(GETTER,"Pinfo.lo-get-2",typeof(pinfo.lo) == "Address")
+ testlib.test(GETTER,"Pinfo.hi-get-1",tostring(pinfo.hi) == dstip)
+ testlib.test(GETTER,"Pinfo.hi-get-2",typeof(pinfo.hi) == "Address")
+ testlib.test(GETTER,"Pinfo.port_type-get-1",pinfo.port_type == 3)
+ testlib.test(GETTER,"Pinfo.match-get-1",pinfo.match == 0)
+ testlib.test(GETTER,"Pinfo.curr_proto-get-1",tostring(pinfo.curr_proto) == "<Missing Protocol Name>")
+ testlib.test(GETTER,"Pinfo.columns-get-1",tostring(pinfo.columns) == "Columns")
+ testlib.test(GETTER,"Pinfo.columns-get-2",typeof(pinfo.columns) == "Columns")
+ testlib.test(GETTER,"Pinfo.cols-get-1",tostring(pinfo.cols) == "Columns")
+ testlib.test(GETTER,"Pinfo.cols-get-2",typeof(pinfo.cols) == "Columns")
+ testlib.test(GETTER,"Pinfo.private-get-1",type(pinfo.private) == "userdata")
+ testlib.test(GETTER,"Pinfo.fragmented-get-1",pinfo.fragmented == false)
+
+ testlib.test(GETTER,"Pinfo.in_error_pkt-get-1",pinfo.in_error_pkt == false)
+ testlib.test(GETTER,"Pinfo.match_uint-get-1",pinfo.match_uint == 0)
+ testlib.test(GETTER,"Pinfo.match_string-get-1",pinfo.match_string == nil)
+
+ testlib.test(GETTER,"Pinfo.src-get-1",tostring(pinfo.src) == srcip)
+ testlib.test(GETTER,"Pinfo.src-get-2",typeof(pinfo.src) == "Address")
+ testlib.test(GETTER,"Pinfo.dst-get-1",tostring(pinfo.dst) == dstip)
+ testlib.test(GETTER,"Pinfo.dst-get-2",typeof(pinfo.dst) == "Address")
+
+ testlib.test(GETTER,"Pinfo.dl_src-get-1",typeof(pinfo.dl_src) == "Address")
+ testlib.test(GETTER,"Pinfo.dl_dst-get-1",typeof(pinfo.dl_dst) == "Address")
+ testlib.test(GETTER,"Pinfo.net_src-get-1",tostring(pinfo.net_src) == srcip)
+ testlib.test(GETTER,"Pinfo.net_src-get-2",typeof(pinfo.net_src) == "Address")
+ testlib.test(GETTER,"Pinfo.net_dst-get-1",tostring(pinfo.net_dst) == dstip)
+ testlib.test(GETTER,"Pinfo.net_dst-get-2",typeof(pinfo.net_dst) == "Address")
+ testlib.test(GETTER,"Pinfo.src_port-get-1",pinfo.src_port == srcport)
+ testlib.test(GETTER,"Pinfo.dst_port-get-1",pinfo.dst_port == dstport)
+ testlib.test(GETTER,"Pinfo.can_desegment-get-1",pinfo.can_desegment == 0)
+ testlib.test(GETTER,"Pinfo.desegment_len-get-1",pinfo.desegment_len == 0)
+ testlib.test(GETTER,"Pinfo.desegment_offset-get-1",pinfo.desegment_offset == 0)
+
+ testlib.test(GETTER,"pinfo.p2p_dir", pinfo.p2p_dir == P2P_DIR_UNKNOWN)
+
+ if pinfo.number == 1 then
+ testlib.test(GETTER,"Pinfo.rel_ts-get-1",pinfo.rel_ts == 0)
+ testlib.test(GETTER,"Pinfo.delta_ts-get-1",pinfo.delta_ts == 0)
+ testlib.test(GETTER,"Pinfo.delta_dis_ts-get-1",pinfo.delta_dis_ts == 0)
+ elseif pinfo.number == 2 then
+ testlib.test(GETTER,"Pinfo.rel_ts-get-1",pinfo.rel_ts == 0.000295)
+ testlib.test(GETTER,"Pinfo.delta_ts-get-1",pinfo.delta_ts == 0.000295)
+ testlib.test(GETTER,"Pinfo.delta_dis_ts-get-1",pinfo.delta_dis_ts == 0.000295)
+ elseif pinfo.number == 3 then
+ testlib.test(GETTER,"Pinfo.rel_ts-get-1",pinfo.rel_ts == 0.070031)
+ testlib.test(GETTER,"Pinfo.delta_ts-get-1",pinfo.delta_ts == 0.069736)
+ testlib.test(GETTER,"Pinfo.delta_dis_ts-get-1",pinfo.delta_dis_ts == 0.069736)
+ elseif pinfo.number == 4 then
+ testlib.test(GETTER,"Pinfo.rel_ts-get-1",pinfo.rel_ts == 0.070345)
+ testlib.test(GETTER,"Pinfo.delta_ts-get-1",pinfo.delta_ts == 0.000314)
+ testlib.test(GETTER,"Pinfo.delta_dis_ts-get-1",pinfo.delta_dis_ts == 0.000314)
+ end
+
+
+ testlib.testing(FRAME,"basic setter tests")
+
+ local tmp = pinfo.src
+ pinfo.src = pinfo.dst
+ pinfo.dst = tmp
+ testlib.test(SETTER,"Pinfo.src-set-1",tostring(pinfo.src) == dstip)
+ testlib.test(SETTER,"Pinfo.src-set-1",typeof(pinfo.src) == "Address")
+ testlib.test(SETTER,"Pinfo.dst-set-1",tostring(pinfo.dst) == srcip)
+ testlib.test(SETTER,"Pinfo.dst-set-1",typeof(pinfo.dst) == "Address")
+
+ local dl_dst_val = tostring(pinfo.dl_dst)
+ local dl_src_val = tostring(pinfo.dl_src)
+ tmp = pinfo.dl_src
+ pinfo.dl_src = pinfo.dl_dst
+ pinfo.dl_dst = tmp
+ testlib.test(SETTER,"Pinfo.dl_src-set-1",tostring(pinfo.dl_src) == dl_dst_val)
+ testlib.test(SETTER,"Pinfo.dl_dst-set-1",tostring(pinfo.dl_dst) == dl_src_val)
+
+ tmp = pinfo.net_src
+ pinfo.net_src = pinfo.net_dst
+ pinfo.net_dst = tmp
+ testlib.test(SETTER,"Pinfo.net_src-set-1",tostring(pinfo.net_src) == dstip)
+ testlib.test(SETTER,"Pinfo.net_src-set-1",typeof(pinfo.net_src) == "Address")
+ testlib.test(SETTER,"Pinfo.net_dst-set-1",tostring(pinfo.net_dst) == srcip)
+ testlib.test(SETTER,"Pinfo.net_dst-set-1",typeof(pinfo.net_dst) == "Address")
+
+ tmp = pinfo.src_port
+ pinfo.src_port = pinfo.dst_port
+ pinfo.dst_port = tmp
+ testlib.test(SETTER,"Pinfo.src_port-set-1",pinfo.src_port == dstport)
+ testlib.test(SETTER,"Pinfo.dst_port-set-1",pinfo.dst_port == srcport)
+
+ pinfo.can_desegment = 12
+ testlib.test(SETTER,"Pinfo.can_desegment-set-1",pinfo.can_desegment == 12)
+ pinfo.desegment_len = 34
+ testlib.test(SETTER,"Pinfo.desegment_len-set-1",pinfo.desegment_len == 34)
+ pinfo.desegment_offset = 45
+ testlib.test(SETTER,"Pinfo.desegment_offset-set-1",pinfo.desegment_offset == 45)
+
+ testlib.testing(FRAME,"Address functions")
+ testlib.test(ADDR,"Address-eq-1", pinfo.lo == pinfo.dst)
+ testlib.test(ADDR,"Address-eq-2", pinfo.lo ~= pinfo.hi)
+ testlib.test(ADDR,"Address-lt-1", pinfo.lo < pinfo.hi)
+ testlib.test(ADDR,"Address-lt-2", pinfo.hi > pinfo.lo)
+ testlib.test(ADDR,"Address-le-1", pinfo.lo <= pinfo.hi)
+ testlib.test(ADDR,"Address-le-2", pinfo.lo <= pinfo.dst)
+
+ testlib.pass(FRAME)
+
+end
+
+function tap.draw()
+ testlib.getResults()
+end
diff --git a/test/lua/proto.lua b/test/lua/proto.lua
new file mode 100644
index 0000000..cc03898
--- /dev/null
+++ b/test/lua/proto.lua
@@ -0,0 +1,211 @@
+----------------------------------------
+-- script-name: proto.lua
+-- Test the Proto/ProtoField API
+----------------------------------------
+
+------------- general test helper funcs ------------
+local testlib = require("testlib")
+
+local OTHER = "other"
+
+-- expected number of runs per type
+local taptests = {
+ [OTHER]=48
+}
+testlib.init(taptests)
+
+---------
+-- the following are so we can use pcall (which needs a function to call)
+local function callFunc(func,...)
+ func(...)
+end
+
+local function callObjFuncGetter(vart,varn,tobj,name,...)
+ vart[varn] = tobj[name](...)
+end
+
+local function setValue(tobj,name,value)
+ tobj[name] = value
+end
+
+local function getValue(tobj,name)
+ local foo = tobj[name]
+end
+
+------------- test script ------------
+
+----------------------------------------
+-- creates a Proto object, but doesn't register it yet
+testlib.testing(OTHER,"Proto creation")
+
+testlib.test(OTHER,"Proto.__call", pcall(callFunc,Proto,"foo","Foo Protocol"))
+testlib.test(OTHER,"Proto.__call", pcall(callFunc,Proto,"foo1","Foo1 Protocol"))
+testlib.test(OTHER,"Proto.__call", not pcall(callFunc,Proto,"","Bar Protocol"))
+testlib.test(OTHER,"Proto.__call", not pcall(callFunc,Proto,nil,"Bar Protocol"))
+testlib.test(OTHER,"Proto.__call", not pcall(callFunc,Proto,"bar",""))
+testlib.test(OTHER,"Proto.__call", not pcall(callFunc,Proto,"bar",nil))
+
+
+local dns = Proto("mydns","MyDNS Protocol")
+
+testlib.test(OTHER,"Proto.__tostring", tostring(dns) == "Proto: MYDNS")
+
+----------------------------------------
+-- multiple ways to do the same thing: create a protocol field (but not register it yet)
+-- the abbreviation should always have "<myproto>." before the specific abbreviation, to avoid collisions
+testlib.testing(OTHER,"ProtoField creation")
+
+local pfields = {} -- a table to hold fields, so we can pass them back/forth through pcall()
+--- variable -- what dissector.lua did, so we almost match it
+local pf_trasaction_id = 1 -- ProtoField.new("Transaction ID", "mydns.trans_id", ftypes.UINT16)
+local pf_flags = 2 -- ProtoField.new("Flags", "mydns.flags", ftypes.UINT16, nil, base.HEX)
+local pf_num_questions = 3 -- ProtoField.uint16("mydns.num_questions", "Number of Questions")
+local pf_num_answers = 4 -- ProtoField.uint16("mydns.num_answers", "Number of Answer RRs")
+local pf_num_authority_rr = 5 -- ProtoField.uint16("mydns.num_authority_rr", "Number of Authority RRs")
+local pf_num_additional_rr = 6 -- ProtoField.uint16("mydns.num_additional_rr", "Number of Additional RRs")
+
+testlib.test(OTHER,"ProtoField.new",pcall(callObjFuncGetter, pfields,pf_trasaction_id, ProtoField,"new", "Transaction ID", "mydns.trans_id", ftypes.INT16,nil,"base.DEC"))
+testlib.test(OTHER,"ProtoField.new",pcall(callObjFuncGetter, pfields,pf_flags, ProtoField,"new", "Flags", "mydns.flags", ftypes.UINT16, nil, "base.HEX"))
+
+-- tries to register a field that already exists (from the real dns proto dissector) but with incompatible type
+testlib.test(OTHER,"ProtoField.new_duplicate_bad",not pcall(callObjFuncGetter, pfields,10, ProtoField,"new", "Flags", "dns.flags", ftypes.INT16, nil, "base.HEX"))
+testlib.test(OTHER,"ProtoField.int16_duplicate_bad",not pcall(callObjFuncGetter, pfields,10, ProtoField,"int16", "dns.id","Transaction ID"))
+-- now compatible (but different type)
+testlib.test(OTHER,"ProtoField.new_duplicate_ok",pcall(callObjFuncGetter, pfields,10, ProtoField,"new", "Flags", "dns.flags", ftypes.UINT32, nil, "base.HEX"))
+testlib.test(OTHER,"ProtoField.uint16_duplicate_ok",pcall(callObjFuncGetter, pfields,10, ProtoField,"uint16", "dns.id","Transaction ID"))
+
+-- invalid valuestring arg
+testlib.test(OTHER,"ProtoField.new_invalid_valuestring",not pcall(callObjFuncGetter, pfields,10, ProtoField,"new", "Transaction ID", "mydns.trans_id", ftypes.INT16,"howdy","base.DEC"))
+-- invalid ftype
+testlib.test(OTHER,"ProtoField.new_invalid_ftype",not pcall(callObjFuncGetter, pfields,10, ProtoField,"new", "Transaction ID", "mydns.trans_id", 9999))
+-- invalid description
+--testlib.test(OTHER,"ProtoField.new_invalid_description",not pcall(callObjFuncGetter, pfields,10, ProtoField,"new", "", "mydns.trans_id", ftypes.INT16))
+testlib.test(OTHER,"ProtoField.new_invalid_description",not pcall(callObjFuncGetter, pfields,10, ProtoField,"new", nil, "mydns.trans_id", ftypes.INT16))
+
+testlib.test(OTHER,"ProtoField.new_invalid_abbr",not pcall(callObjFuncGetter, pfields,10, ProtoField,"new", "trans id", "", ftypes.INT16))
+testlib.test(OTHER,"ProtoField.new_invalid_abbr",not pcall(callObjFuncGetter, pfields,10, ProtoField,"new", "trans id", nil, ftypes.INT16))
+
+testlib.test(OTHER,"ProtoField.int16",pcall(callObjFuncGetter, pfields,pf_num_questions, ProtoField,"int16", "mydns.num_questions", "Number of Questions"))
+testlib.test(OTHER,"ProtoField.int16",pcall(callObjFuncGetter, pfields,pf_num_answers, ProtoField,"int16", "mydns.num_answers", "Number of Answer RRs",base.DEC))
+testlib.test(OTHER,"ProtoField.int16",pcall(callObjFuncGetter, pfields,pf_num_authority_rr, ProtoField,"int16", "mydns.num_authority_rr", "Number of Authority RRs",base.DEC))
+testlib.test(OTHER,"ProtoField.int16",pcall(callObjFuncGetter, pfields,pf_num_additional_rr, ProtoField,"int16", "mydns.num_additional_rr", "Number of Additional RRs"))
+
+-- now undo the table thingy
+pf_trasaction_id = pfields[pf_trasaction_id]
+pf_flags = pfields[pf_flags]
+pf_num_questions = pfields[pf_num_questions]
+pf_num_answers = pfields[pf_num_answers]
+pf_num_authority_rr = pfields[pf_num_authority_rr]
+pf_num_additional_rr = pfields[pf_num_additional_rr]
+
+-- within the flags field, we want to parse/show the bits separately
+-- note the "base" argument becomes the size of the bitmask'ed field when ftypes.BOOLEAN is used
+-- the "mask" argument is which bits we want to use for this field (e.g., base=16 and mask=0x8000 means we want the top bit of a 16-bit field)
+-- again the following shows different ways of doing the same thing basically
+local pf_flag_response = ProtoField.new("Response", "mydns.flags.response", ftypes.BOOLEAN, {"this is a response","this is a query"}, 16, 0x8000, "is the message a response?")
+local pf_flag_opcode = ProtoField.new("Opcode", "mydns.flags.opcode", ftypes.UINT16, nil, base.DEC, 0x7800, "operation code")
+local pf_flag_authoritative = ProtoField.new("Authoritative", "mydns.flags.authoritative", ftypes.BOOLEAN, nil, 16, 0x0400, "is the response authoritative?")
+local pf_flag_truncated = ProtoField.bool("mydns.flags.truncated", "Truncated", 16, nil, 0x0200, "is the message truncated?")
+local pf_flag_recursion_desired = ProtoField.bool("mydns.flags.recursion_desired", "Recursion desired", 16, {"yes","no"}, 0x0100, "do the query recursivley?")
+local pf_flag_recursion_available = ProtoField.bool("mydns.flags.recursion_available", "Recursion available", 16, nil, 0x0080, "does the server support recursion?")
+local pf_flag_z = ProtoField.uint16("mydns.flags.z", "World War Z - Reserved for future use", base.HEX, nil, 0x0040, "when is it the future?")
+local pf_flag_authenticated = ProtoField.bool("mydns.flags.authenticated", "Authenticated", 16, {"yes","no"}, 0x0020, "did the server DNSSEC authenticate?")
+local pf_flag_checking_disabled = ProtoField.bool("mydns.flags.checking_disabled", "Checking disabled", 16, nil, 0x0010)
+
+-- no, these aren't all the DNS response codes - this is just an example
+local rcodes = {
+ [0] = "No Error",
+ [1] = "Format Error",
+ [2] = "Server Failure",
+ [3] = "Non-Existent Domain",
+ [9] = "Server Not Authoritative for zone"
+}
+-- the above rcodes table is used in this next ProtoField
+local pf_flag_rcode = ProtoField.uint16("mydns.flags.rcode", "Response code", base.DEC, rcodes, 0x000F)
+local pf_query = ProtoField.new("Query", "mydns.query", ftypes.BYTES)
+local pf_query_name = ProtoField.new("Name", "mydns.query.name", ftypes.STRING)
+local pf_query_name_len = ProtoField.new("Name Length", "mydns.query.name.len", ftypes.UINT8)
+local pf_query_label_count = ProtoField.new("Label Count", "mydns.query.label.count", ftypes.UINT8)
+local rrtypes = { [1] = "A (IPv4 host address)", [2] = "NS (authoritative name server)", [28] = "AAAA (for geeks only)" }
+local pf_query_type = ProtoField.uint16("mydns.query.type", "Type", base.DEC, rrtypes)
+-- again, not all class types are listed here
+local classes = {
+ [0] = "Reserved",
+ [1] = "IN (Internet)",
+ [2] = "The 1%",
+ [5] = "First class",
+ [6] = "Business class",
+ [65535] = "Cattle class"
+}
+local pf_query_class = ProtoField.uint16("mydns.query.class", "Class", base.DEC, classes, nil, "keep it classy folks")
+
+
+testlib.testing(OTHER,"Proto functions")
+
+----------------------------------------
+-- this actually registers the ProtoFields above, into our new Protocol
+-- in a real script I wouldn't do it this way; I'd build a table of fields programaticaly
+-- and then set dns.fields to it, so as to avoid forgetting a field
+local myfields = { pf_trasaction_id, pf_flags,
+ pf_num_questions, pf_num_answers, pf_num_authority_rr, pf_num_additional_rr,
+ pf_flag_response, pf_flag_opcode, pf_flag_authoritative,
+ pf_flag_truncated, pf_flag_recursion_desired, pf_flag_recursion_available,
+ pf_flag_z, pf_flag_authenticated, pf_flag_checking_disabled, pf_flag_rcode,
+ pf_query, pf_query_name, pf_query_name_len, pf_query_label_count, pf_query_type, pf_query_class }
+
+--dns.fields = myfields
+testlib.test(OTHER,"Proto.fields-set", pcall(setValue,dns,"fields",myfields))
+testlib.test(OTHER,"Proto.fields-get", pcall(getValue,dns,"fields"))
+testlib.test(OTHER,"Proto.fields-get", #dns.fields == #myfields)
+
+local pf_foo = ProtoField.uint16("myfoo.com", "Fooishly", base.DEC, rcodes, 0x000F)
+
+local foo = Proto("myfoo","MyFOO Protocol")
+local bar = Proto("mybar","MyBAR Protocol")
+
+testlib.test(OTHER,"Proto.fields-set", pcall(setValue,foo,"fields",pf_foo))
+testlib.test(OTHER,"Proto.fields-get", #foo.fields == 1)
+testlib.test(OTHER,"Proto.fields-get", foo.fields[1] == pf_foo)
+
+testlib.test(OTHER,"Proto.fields-set", not pcall(setValue,bar,"fields","howdy"))
+testlib.test(OTHER,"Proto.fields-set", not pcall(setValue,bar,"fields",nil))
+testlib.test(OTHER,"Proto.fields-get", #bar.fields == 0)
+
+testlib.test(OTHER,"Proto.name-get", foo.name == "MYFOO")
+testlib.test(OTHER,"Proto.name-set", not pcall(setValue,foo,"name","howdy"))
+
+testlib.test(OTHER,"Proto.description-get", foo.description == "MyFOO Protocol")
+testlib.test(OTHER,"Proto.description-set", not pcall(setValue,foo,"description","howdy"))
+
+testlib.test(OTHER,"Proto.prefs-get", typeof(foo.prefs) == "Prefs")
+testlib.test(OTHER,"Proto.prefs-set", not pcall(setValue,foo,"prefs","howdy"))
+
+local function dummy()
+ setFailed(OTHER)
+ error("dummy function called!")
+ return
+end
+
+-- can't get this because we haven't set it yet
+testlib.test(OTHER,"Proto.dissector-get", not pcall(getValue,foo,"dissector"))
+-- now set it
+testlib.test(OTHER,"Proto.dissector-set", pcall(setValue,foo,"dissector",dummy))
+testlib.test(OTHER,"Proto.dissector-set", not pcall(setValue,foo,"dissector","howdy"))
+testlib.test(OTHER,"Proto.dissector-get", pcall(getValue,foo,"dissector"))
+
+testlib.test(OTHER,"Proto.prefs_changed-set", pcall(setValue,foo,"prefs_changed",dummy))
+testlib.test(OTHER,"Proto.prefs_changed-get", not pcall(getValue,foo,"prefs_changed"))
+testlib.test(OTHER,"Proto.prefs_changed-set", not pcall(setValue,foo,"prefs_changed","howdy"))
+
+local function dummy_init()
+ testlib.test(OTHER,"Proto.init-called",true)
+end
+
+testlib.test(OTHER,"Proto.init-set", pcall(setValue,foo,"init",dummy_init))
+testlib.test(OTHER,"Proto.init-set", pcall(setValue,bar,"init",dummy_init))
+
+testlib.test(OTHER,"Proto.init-get", not pcall(getValue,foo,"init"))
+testlib.test(OTHER,"Proto.init-set", not pcall(setValue,foo,"init","howdy"))
+
+testlib.getResults()
+
diff --git a/test/lua/protobuf_test_called_by_custom_dissector.lua b/test/lua/protobuf_test_called_by_custom_dissector.lua
new file mode 100644
index 0000000..7a16365
--- /dev/null
+++ b/test/lua/protobuf_test_called_by_custom_dissector.lua
@@ -0,0 +1,68 @@
+do
+ local protobuf_dissector = Dissector.get("protobuf")
+
+ -- Create protobuf dissector based on UDP or TCP.
+ -- The UDP dissector will take the whole tvb as a message.
+ -- The TCP dissector will parse tvb as format:
+ -- [4bytes length][a message][4bytes length][a message]...
+ -- @param name The name of the new dissector.
+ -- @param desc The description of the new dissector.
+ -- @param for_udp Register the new dissector to UDP table.(Enable 'Decode as')
+ -- @param for_tcp Register the new dissector to TCP table.(Enable 'Decode as')
+ -- @param msgtype Message type. This must be the root message defined in your .proto file.
+ local function create_protobuf_dissector(name, desc, for_udp, for_tcp, msgtype)
+ local proto = Proto(name, desc)
+ local f_length = ProtoField.uint32(name .. ".length", "Length", base.DEC)
+ proto.fields = { f_length }
+
+ proto.dissector = function(tvb, pinfo, tree)
+ local subtree = tree:add(proto, tvb())
+ if for_udp and pinfo.port_type == 3 then -- UDP
+ if msgtype ~= nil then
+ pinfo.private["pb_msg_type"] = "message," .. msgtype
+ end
+ pcall(Dissector.call, protobuf_dissector, tvb, pinfo, subtree)
+ elseif for_tcp and pinfo.port_type == 2 then -- TCP
+ local offset = 0
+ local remaining_len = tvb:len()
+ while remaining_len > 0 do
+ if remaining_len < 4 then -- head not enough
+ pinfo.desegment_offset = offset
+ pinfo.desegment_len = DESEGMENT_ONE_MORE_SEGMENT
+ return -1
+ end
+
+ local data_len = tvb(offset, 4):uint()
+
+ if remaining_len - 4 < data_len then -- data not enough
+ pinfo.desegment_offset = offset
+ pinfo.desegment_len = data_len - (remaining_len - 4)
+ return -1
+ end
+ subtree:add(f_length, tvb(offset, 4))
+
+ if msgtype ~= nil then
+ pinfo.private["pb_msg_type"] = "message," .. msgtype
+ end
+ pcall(Dissector.call, protobuf_dissector,
+ tvb(offset + 4, data_len):tvb(), pinfo, subtree)
+
+ offset = offset + 4 + data_len
+ remaining_len = remaining_len - 4 - data_len
+ end
+ end
+ pinfo.columns.protocol:set(name)
+ end
+
+ if for_udp then DissectorTable.get("udp.port"):add(0, proto) end
+ if for_tcp then DissectorTable.get("tcp.port"):add(0, proto) end
+ return proto
+ end
+
+ -- default pure protobuf udp and tcp dissector without message type
+ create_protobuf_dissector("protobuf_udp", "Protobuf UDP")
+ create_protobuf_dissector("protobuf_tcp", "Protobuf TCP")
+ -- add more protobuf dissectors with message types
+ create_protobuf_dissector("AddrBook", "Tutorial AddressBook",
+ true, true, "tutorial.AddressBook")
+end
diff --git a/test/lua/protobuf_test_field_subdissector_table.lua b/test/lua/protobuf_test_field_subdissector_table.lua
new file mode 100644
index 0000000..379b2f5
--- /dev/null
+++ b/test/lua/protobuf_test_field_subdissector_table.lua
@@ -0,0 +1,6 @@
+-- Test protobuf_field dissector table
+do
+ local protobuf_field_table = DissectorTable.get("protobuf_field")
+ local png_dissector = Dissector.get("png")
+ protobuf_field_table:add("tutorial.Person.portrait_image", png_dissector)
+end
diff --git a/test/lua/protofield.lua b/test/lua/protofield.lua
new file mode 100644
index 0000000..9d2223f
--- /dev/null
+++ b/test/lua/protofield.lua
@@ -0,0 +1,236 @@
+----------------------------------------
+-- script-name: protofield.lua
+-- test the ProtoField API
+----------------------------------------
+
+local testlib = require("testlib")
+
+local FRAME = "frame"
+local PER_FRAME = "per-frame"
+local OTHER = "other"
+
+-- expected number of runs
+local n_frames = 4
+local taptests = {
+ [FRAME]=n_frames,
+ [PER_FRAME]=n_frames*8,
+ [OTHER]=50,
+}
+testlib.init(taptests)
+
+------------- test script ------------
+
+----------------------------------------
+local test_proto = Proto.new("test", "Test Proto")
+test_proto.fields.time_field = ProtoField.uint16("test.time", "Time", base.UNIT_STRING, {" sec", " secs"})
+test_proto.fields.dist_field = ProtoField.uint16("test.dist", "Distance", base.UNIT_STRING, {" km"})
+test_proto.fields.filtered_field = ProtoField.uint16("test.filtered", "Filtered Field", base.DEC)
+
+-- Field type: CHAR
+success = pcall(ProtoField.new, "char", "test.char0", ftypes.CHAR)
+testlib.test(OTHER,"ProtoField-char", success)
+
+success = pcall(ProtoField.new, "char base NONE without valuestring", "test.char1", ftypes.CHAR, nil, base.NONE)
+testlib.test(OTHER,"ProtoField-char-without-valuestring", not success)
+
+success = pcall(ProtoField.new, "char base NONE with valuestring", "test.char2", ftypes.CHAR, {1, "Value"}, base.NONE)
+testlib.test(OTHER,"ProtoField-char-with-valuestring", success)
+
+success = pcall(ProtoField.new, "char base DEC", "test.char3", ftypes.CHAR, nil, base.DEC)
+testlib.test(OTHER,"ProtoField-char-base-dec", not success)
+
+success = pcall(ProtoField.new, "char base UNIT_STRING", "test.char4", ftypes.CHAR, {" m"}, base.UNIT_STRING)
+testlib.test(OTHER,"ProtoField-char-unit-string", not success)
+
+success = pcall(ProtoField.new, "char base RANGE_STRING", "test.char5", ftypes.CHAR, {{1, 2, "Value"}}, base.RANGE_STRING)
+testlib.test(OTHER,"ProtoField-char-range-string", success)
+
+-- Field type: BOOLEAN/UINT64 with (64 bit) mask
+success = pcall(ProtoField.new, "boolean", "test.boolean0", ftypes.BOOLEAN, nil, base.HEX, 0x1)
+testlib.test(OTHER,"ProtoField-new-bool-mask-trivial", success)
+
+success = pcall(ProtoField.new, "boolean", "test.boolean1", ftypes.BOOLEAN, nil, base.HEX, "1")
+testlib.test(OTHER,"ProtoField-new-bool-mask-string", success)
+
+success = pcall(ProtoField.new, "boolean", "test.boolean2", ftypes.BOOLEAN, nil, base.HEX, UInt64(0x00000001, 0x0))
+testlib.test(OTHER,"ProtoField-new-bool-mask-uint64", success)
+
+success = pcall(ProtoField.new, "boolean", "test.boolean3", ftypes.BOOLEAN, nil, base.NONE, "invalid") -- 0
+testlib.test(OTHER,"ProtoField-new-bool-mask-string-invalid", success)
+
+success = pcall(ProtoField.new, "boolean", "test.boolean4", ftypes.BOOLEAN, nil, base.HEX, "-1") -- 0xFFFFFFFFFFFFFFFF
+testlib.test(OTHER,"ProtoField-new-bool-mask-negative", success)
+
+success = pcall(ProtoField.new, "boolean", "test.boolean5", ftypes.BOOLEAN, nil, base.NONE)
+testlib.test(OTHER,"ProtoField-new-bool-mask-none", success)
+
+success = pcall(ProtoField.new, "boolean", "test.boolean6", ftypes.BOOLEAN, nil, base.NONE, nil)
+testlib.test(OTHER,"ProtoField-new-bool-mask-nil", success)
+
+success = pcall(ProtoField.bool, "test.boolean10", nil, 64, nil, 0x1)
+testlib.test(OTHER,"ProtoField-bool-mask-trivial", success)
+
+success = pcall(ProtoField.bool, "test.boolean11", nil, 64, nil, "1")
+testlib.test(OTHER,"ProtoField-bool-mask-string", success)
+
+success = pcall(ProtoField.bool, "test.boolean12", nil, 64, nil, UInt64(0x00000001, 0x0))
+testlib.test(OTHER,"ProtoField-bool-mask-uint64", success)
+
+success = pcall(ProtoField.bool, "test.boolean13", nil, base.NONE, nil, "invalid") -- 0
+testlib.test(OTHER,"ProtoField-bool-mask-string-invalid", success)
+
+success = pcall(ProtoField.bool, "test.boolean14", nil, 64, nil, "-1") -- 0xFFFFFFFFFFFFFFFF
+testlib.test(OTHER,"ProtoField-bool-mask-negative", success)
+
+success = pcall(ProtoField.bool, "test.boolean15", nil, base.NONE, nil)
+testlib.test(OTHER,"ProtoField-bool-mask-none", success)
+
+success = pcall(ProtoField.bool, "test.boolean16", nil, base.NONE, nil, nil)
+testlib.test(OTHER,"ProtoField-bool-mask-nil", success)
+
+success = pcall(ProtoField.new, "uint64", "test.uint64_0", ftypes.UINT64, nil, base.HEX, 0x1)
+testlib.test(OTHER,"ProtoField-new-uint64-mask-trivial", success)
+
+success = pcall(ProtoField.new, "uint64", "test.uint64_1", ftypes.UINT64, nil, base.HEX, "1")
+testlib.test(OTHER,"ProtoField-new-uint64-mask-string", success)
+
+success = pcall(ProtoField.new, "uint64", "test.uint64_2", ftypes.UINT64, nil, base.HEX, UInt64(0x00000001, 0x0))
+testlib.test(OTHER,"ProtoField-new-uint64-mask-uint64", success)
+
+success = pcall(ProtoField.new, "uint64", "test.uint64_3", ftypes.UINT64, nil, base.NONE, "invalid") -- 0
+testlib.test(OTHER,"ProtoField-new-uint64-mask-string-invalid", success)
+
+success = pcall(ProtoField.new, "uint64", "test.uint64_4", ftypes.UINT64, nil, base.HEX, "-1") -- 0xFFFFFFFFFFFFFFFF
+testlib.test(OTHER,"ProtoField-new-uint64-mask-negative", success)
+
+success = pcall(ProtoField.new, "uint64", "test.uint64_5", ftypes.UINT64, nil, base.NONE)
+testlib.test(OTHER,"ProtoField-new-uint64-mask-none", success)
+
+success = pcall(ProtoField.new, "uint64", "test.uint64_6", ftypes.UINT64, nil, base.NONE, nil)
+testlib.test(OTHER,"ProtoField-new-uint64-mask-nil", success)
+
+success = pcall(ProtoField.uint64, "test.uint64_10", nil, base.HEX, nil, 0x1)
+testlib.test(OTHER,"ProtoField-uint64-mask-trivial", success)
+
+success = pcall(ProtoField.uint64, "test.uint64_11", nil, base.HEX, nil, "1")
+testlib.test(OTHER,"ProtoField-uint64-mask-string", success)
+
+success = pcall(ProtoField.uint64, "test.uint64_12", nil, base.HEX, nil, UInt64(0x00000001, 0x0))
+testlib.test(OTHER,"ProtoField-uint64-mask-uint64", success)
+
+success = pcall(ProtoField.uint64, "test.uint64_13", nil, base.DEC, nil, "invalid") -- 0
+testlib.test(OTHER,"ProtoField-uint64-mask-string-invalid", success)
+
+success = pcall(ProtoField.uint64, "test.uint64_14", nil, base.DEC, nil, "-1") -- 0xFFFFFFFFFFFFFFFF
+testlib.test(OTHER,"ProtoField-uint64-mask-negative", success)
+
+success = pcall(ProtoField.uint64, "test.uint64_15", nil, base.DEC, nil)
+testlib.test(OTHER,"ProtoField-uint64-mask-none", success)
+
+success = pcall(ProtoField.uint64, "test.uint64_16", nil, base.DEC, nil, nil)
+testlib.test(OTHER,"ProtoField-uint64-mask-nil", success)
+
+
+-- Field name: empty, illegal, incompatible
+success = pcall(ProtoField.int8, nil, "empty field name 1")
+testlib.test(OTHER,"ProtoField-empty-field-name-1", not success)
+
+success = pcall(ProtoField.int8, "", "empty field name 2")
+testlib.test(OTHER,"ProtoField-empty-field-name-2", not success)
+
+success = pcall(ProtoField.int8, "test.$", "illegal field name")
+testlib.test(OTHER,"ProtoField-illegal-field-name", not success)
+
+success = pcall(ProtoField.int8, "frame.time", "incompatible field name")
+testlib.test(OTHER,"ProtoField-incompatible-field-name", not success)
+
+-- Actual name: empty
+success = pcall(ProtoField.int8, "test.empty_name_1")
+testlib.test(OTHER,"ProtoField-empty-name-1", success) -- will use abbrev
+
+success = pcall(ProtoField.int8, "test.empty_name_2", "")
+testlib.test(OTHER,"ProtoField-empty-name-2", not success)
+
+-- Signed integer base values, only base.DEC should work
+success = pcall(ProtoField.int8, "test.int.base_none", "int base NONE", base.NONE)
+testlib.test(OTHER,"ProtoField-int-base-none", not success)
+
+success = pcall(ProtoField.int8, "test.int.base_dec", "int base DEC", base.DEC)
+testlib.test(OTHER,"ProtoField-int-base-dec", success)
+
+success = pcall(ProtoField.int8, "test.int.base_hex", "int base HEX", base.HEX)
+testlib.test(OTHER,"ProtoField-int-base-hex", not success)
+
+success = pcall(ProtoField.int8, "test.int.base_oct", "int base OCT", base.OCT)
+testlib.test(OTHER,"ProtoField-int-base-oct", not success)
+
+success = pcall(ProtoField.int8, "test.int.base_dec_hex", "int base DEC_HEX", base.DEC_HEX)
+testlib.test(OTHER,"ProtoField-int-base-dec-hex", not success)
+
+success = pcall(ProtoField.int8, "test.int.base_hex_dec", "int base HEX_DEC", base.HEX_DEC)
+testlib.test(OTHER,"ProtoField-int-base-hex-dec", not success)
+
+-- Passing no table should not work
+success = pcall(ProtoField.uint16, "test.bad0", "Bad0", base.UNIT_STRING)
+testlib.test(OTHER,"ProtoField-unitstring-no-table", not success)
+
+-- Passing an empty table should not work
+success = pcall(ProtoField.uint16, "test.bad1", "Bad1", base.UNIT_STRING, {})
+testlib.test(OTHER,"ProtoField-unitstring-empty-table", not success)
+
+-- Passing userdata should not work
+success = pcall(ProtoField.uint16, "test.bad2", "Bad2", base.UNIT_STRING, {test_proto})
+testlib.test(OTHER,"ProtoField-unitstring-userdata", not success)
+
+-- Too many items are not supported
+success = pcall(ProtoField.uint16, "test.bad3", "Bad3", base.UNIT_STRING, {"too", "many", "items"})
+testlib.test(OTHER,"ProtoField-unitstring-too-many-items", not success)
+
+local numinits = 0
+function test_proto.init()
+ numinits = numinits + 1
+ if numinits == 2 then
+ testlib.getResults()
+ end
+end
+
+-- Test expected text with singular and plural forms
+function test_proto.dissector(tvb, pinfo, tree)
+ local ti
+ testlib.countPacket(FRAME)
+
+ local tvb1 = ByteArray.new("00 00"):tvb("Tvb1")
+ ti = tree:add(test_proto.fields.time_field, tvb1())
+ testlib.test(PER_FRAME,"Time: 0 secs", ti.text == "Time: 0 secs")
+ ti = tree:add(test_proto.fields.dist_field, tvb1())
+ testlib.test(PER_FRAME,"Distance: 0 km", ti.text == "Distance: 0 km")
+
+ local tvb2 = ByteArray.new("00 01"):tvb("Tvb2")
+ ti = tree:add(test_proto.fields.time_field, tvb2())
+ testlib.test(PER_FRAME,"Time: 1 sec", ti.text == "Time: 1 sec")
+ ti = tree:add(test_proto.fields.dist_field, tvb2())
+ testlib.test(PER_FRAME,"Distance: 1 km", ti.text == "Distance: 1 km")
+
+ local tvb3 = ByteArray.new("ff ff"):tvb("Tvb3")
+ ti = tree:add(test_proto.fields.time_field, tvb3())
+ testlib.test(PER_FRAME,"Time: 65535 secs", ti.text == "Time: 65535 secs")
+ ti = tree:add(test_proto.fields.dist_field, tvb3())
+ testlib.test(PER_FRAME,"Distance: 65535 km", ti.text == "Distance: 65535 km")
+
+ ti = tree:add(test_proto.fields.filtered_field, tvb2())
+ -- Note that this file should be loaded in tshark twice. Once with a visible
+ -- tree (-V) and once without a visible tree.
+ if tree.visible then
+ -- Tree is visible so both fields should be referenced
+ testlib.test(PER_FRAME,"Visible tree: Time is referenced", tree:referenced(test_proto.fields.time_field) == true)
+ testlib.test(PER_FRAME,"Visible tree: Filtered field is referenced", tree:referenced(test_proto.fields.filtered_field) == true)
+ else
+ -- Tree is not visible so only the field that appears in a filter should be referenced
+ testlib.test(PER_FRAME,"Invisible tree: Time is NOT referenced", tree:referenced(test_proto.fields.time_field) == false)
+ testlib.test(PER_FRAME,"Invisible tree: Filtered field is referenced", tree:referenced(test_proto.fields.filtered_field) == true)
+ end
+ testlib.pass(FRAME)
+end
+
+DissectorTable.get("udp.port"):add(65333, test_proto)
+DissectorTable.get("udp.port"):add(65346, test_proto)
diff --git a/test/lua/script_args.lua b/test/lua/script_args.lua
new file mode 100644
index 0000000..56ca3fb
--- /dev/null
+++ b/test/lua/script_args.lua
@@ -0,0 +1,24 @@
+----------------------------------------
+-- This just verifies the number of args it got is what it expected.
+-- The first arg should be a number, for how many total args to expect,
+-- including itself.
+
+local testlib = require("testlib")
+
+local ARGS = "args"
+testlib.init({ [ARGS]=3 })
+
+-----------------------------
+
+testlib.testing("Command-line args")
+
+local arg={...} -- get passed-in args
+
+testlib.test(ARGS, "arg1", arg ~= nil and #arg > 0)
+
+local numargs = tonumber(arg[1])
+testlib.test(ARGS, "arg2", numargs ~= nil)
+
+testlib.test(ARGS, "arg3", #arg == numargs)
+
+testlib.getResults()
diff --git a/test/lua/struct.lua b/test/lua/struct.lua
new file mode 100644
index 0000000..bc6a2fb
--- /dev/null
+++ b/test/lua/struct.lua
@@ -0,0 +1,367 @@
+
+-- This is a test script for tshark/wireshark.
+-- This script runs inside tshark/wireshark, so to run it do:
+-- wireshark -X lua_script:<path_to_testdir>/lua/struct.lua
+-- tshark -r bogus.cap -X lua_script:<path_to_testdir>/lua/struct.lua
+
+-- Tests Struct functions
+
+local testlib = require("testlib")
+local OTHER = "other"
+
+--
+-- auxiliary function to print an hexadecimal `dump' of a given string
+-- (not used by the test)
+--
+local function tohex(s, sep)
+ local patt = "%02x" .. (sep or "")
+ s = string.gsub(s, "(.)", function(c)
+ return string.format(patt, string.byte(c))
+ end)
+ if sep then s = s:sub(1,-(sep:len()+1)) end
+ return s
+end
+
+local function bp (s)
+ s = tohex(s)
+ print(s)
+end
+
+
+-----------------------------
+
+print("Lua version: ".._VERSION)
+
+testlib.init({ [OTHER] = 0 })
+
+testlib.testing(OTHER, "Struct library")
+
+local lib = Struct
+testlib.test(OTHER,"global",_G.Struct == lib)
+
+for name, val in pairs(lib) do
+ print("\t"..name.." = "..type(val))
+end
+
+testlib.test(OTHER,"class1",type(lib) == 'table')
+testlib.test(OTHER,"class2",type(lib.pack) == 'function')
+testlib.test(OTHER,"class3",type(lib.unpack) == 'function')
+testlib.test(OTHER,"class4",type(lib.size) == 'function')
+
+
+local val1 = "\42\00\00\00\00\00\00\01\00\00\00\02\00\00\00\03\00\00\00\04"
+local fmt1_le = "<!4biii4i4"
+local fmt1_be = ">!4biii4i4"
+local fmt1_64le = "<!4ieE"
+local fmt1_64be = ">!4ieE"
+local fmt2_be = ">!4bi(ii4)i"
+
+testlib.testing(OTHER, "basic size")
+
+testlib.test(OTHER,"basic_size1", lib.size(fmt1_le) == string.len(val1))
+testlib.test(OTHER,"basic_size2", lib.size(fmt1_le) == Struct.size(fmt1_be))
+testlib.test(OTHER,"basic_size3", lib.size(fmt1_le) == Struct.size(fmt1_64le))
+testlib.test(OTHER,"basic_size4", lib.size(fmt2_be) == Struct.size(fmt1_64le))
+
+testlib.testing(OTHER, "basic values")
+
+testlib.test(OTHER,"basic_values1", lib.values(fmt1_le) == 5)
+testlib.test(OTHER,"basic_values2", lib.values(fmt1_be) == lib.values(fmt1_le))
+testlib.test(OTHER,"basic_values3", lib.values(fmt1_64le) == 3)
+testlib.test(OTHER,"basic_values4", lib.values(fmt2_be) == lib.values(fmt1_64le))
+testlib.test(OTHER,"basic_values4", lib.values(" (I) s x i XxX c0") == 3)
+
+testlib.testing(OTHER, "tohex")
+local val1hex = "2A:00:00:00:00:00:00:01:00:00:00:02:00:00:00:03:00:00:00:04"
+testlib.test(OTHER,"tohex1", Struct.tohex(val1) == tohex(val1):upper())
+testlib.test(OTHER,"tohex2", Struct.tohex(val1,true) == tohex(val1))
+testlib.test(OTHER,"tohex3", Struct.tohex(val1,false,":") == val1hex)
+testlib.test(OTHER,"tohex4", Struct.tohex(val1,true,":") == val1hex:lower())
+
+testlib.testing(OTHER, "fromhex")
+testlib.test(OTHER,"fromhex1", Struct.fromhex(val1hex,":") == val1)
+local val1hex2 = val1hex:gsub(":","")
+testlib.test(OTHER,"fromhex2", Struct.fromhex(val1hex2) == val1)
+testlib.test(OTHER,"fromhex3", Struct.fromhex(val1hex2:lower()) == val1)
+
+testlib.testing(OTHER, "basic unpack")
+local ret1, ret2, ret3, ret4, ret5, pos = lib.unpack(fmt1_le, val1)
+testlib.test(OTHER,"basic_unpack1", ret1 == 42 and ret2 == 0x01000000 and ret3 == 0x02000000 and ret4 == 0x03000000 and ret5 == 0x04000000)
+testlib.test(OTHER,"basic_unpack_position1", pos == string.len(val1) + 1)
+
+ret1, ret2, ret3, ret4, ret5, pos = lib.unpack(fmt1_be, val1)
+testlib.test(OTHER,"basic_unpack2", ret1 == 42 and ret2 == 1 and ret3 == 2 and ret4 == 3 and ret5 == 4)
+testlib.test(OTHER,"basic_unpack_position2", pos == string.len(val1) + 1)
+
+ret1, ret2, ret3, pos = lib.unpack(fmt1_64le, val1)
+testlib.test(OTHER,"basic_unpack3", ret1 == 42 and ret2 == Int64.new( 0x01000000, 0x02000000) and ret3 == UInt64.new( 0x03000000, 0x04000000))
+print(typeof(ret2),typeof(ret3))
+testlib.test(OTHER,"basic_unpack3b", typeof(ret2) == "Int64" and typeof(ret3) == "UInt64")
+testlib.test(OTHER,"basic_unpack_position3", pos == string.len(val1) + 1)
+
+ret1, ret2, ret3, pos = lib.unpack(fmt1_64be, val1)
+testlib.test(OTHER,"basic_unpack4", ret1 == 0x2A000000 and ret2 == Int64.new( 2, 1) and ret3 == UInt64.new( 4, 3))
+testlib.test(OTHER,"basic_unpack4b", typeof(ret2) == "Int64" and typeof(ret3) == "UInt64")
+testlib.test(OTHER,"basic_unpack_position4", pos == string.len(val1) + 1)
+
+ret1, ret2, ret3, pos = lib.unpack(fmt2_be, val1)
+testlib.test(OTHER,"basic_unpack5", ret1 == 42 and ret2 == 1 and ret3 == 4)
+testlib.test(OTHER,"basic_unpack_position5", pos == string.len(val1) + 1)
+
+testlib.testing(OTHER, "basic pack")
+local pval1 = lib.pack(fmt1_le, lib.unpack(fmt1_le, val1))
+testlib.test(OTHER,"basic_pack1", pval1 == val1)
+testlib.test(OTHER,"basic_pack2", val1 == lib.pack(fmt1_be, lib.unpack(fmt1_be, val1)))
+testlib.test(OTHER,"basic_pack3", val1 == lib.pack(fmt1_64le, lib.unpack(fmt1_64le, val1)))
+testlib.test(OTHER,"basic_pack4", val1 == lib.pack(fmt1_64be, lib.unpack(fmt1_64be, val1)))
+testlib.test(OTHER,"basic_pack5", lib.pack(fmt2_be, lib.unpack(fmt1_be, val1)) == lib.pack(">!4biiii", 42, 1, 0, 0, 2))
+
+----------------------------------
+-- following comes from:
+-- https://github.com/LuaDist/struct/blob/master/teststruct.lua
+-- unfortunately many of his tests assumed a local machine word
+-- size of 4 bytes for long and such, so I had to muck with this
+-- to make it handle 64-bit compiles.
+-- $Id: teststruct.lua,v 1.2 2008/04/18 20:06:01 roberto Exp $
+
+
+-- some pack/unpack commands are host-size dependent, so we need to pad
+local l_pad, ln_pad = "",""
+if lib.size("l") == 8 then
+ -- the machine running this script uses a long of 8 bytes
+ l_pad = "\00\00\00\00"
+ ln_pad = "\255\255\255\255"
+end
+
+local a,b,c,d,e,f,x
+
+testlib.testing(OTHER, "pack")
+testlib.test(OTHER,"pack_I",#Struct.pack("I", 67324752) == 4)
+
+testlib.test(OTHER,"pack_b1",lib.pack('b', 10) == string.char(10))
+testlib.test(OTHER,"pack_b2",lib.pack('bbb', 10, 20, 30) == string.char(10, 20, 30))
+
+testlib.test(OTHER,"pack_h1",lib.pack('<h', 10) == string.char(10, 0))
+testlib.test(OTHER,"pack_h2",lib.pack('>h', 10) == string.char(0, 10))
+testlib.test(OTHER,"pack_h3",lib.pack('<h', -10) == string.char(256-10, 256-1))
+
+testlib.test(OTHER,"pack_l1",lib.pack('<l', 10) == string.char(10, 0, 0, 0)..l_pad)
+testlib.test(OTHER,"pack_l2",lib.pack('>l', 10) == l_pad..string.char(0, 0, 0, 10))
+testlib.test(OTHER,"pack_l3",lib.pack('<l', -10) == string.char(256-10, 256-1, 256-1, 256-1)..ln_pad)
+
+testlib.testing(OTHER, "unpack")
+testlib.test(OTHER,"unpack_h1",lib.unpack('<h', string.char(10, 0)) == 10)
+testlib.test(OTHER,"unpack_h2",lib.unpack('>h', string.char(0, 10)) == 10)
+testlib.test(OTHER,"unpack_h3",lib.unpack('<h', string.char(256-10, 256-1)) == -10)
+
+testlib.test(OTHER,"unpack_l1",lib.unpack('<l', string.char(10, 0, 0, 1)..l_pad) == 10 + 2^(3*8))
+testlib.test(OTHER,"unpack_l2",lib.unpack('>l', l_pad..string.char(0, 1, 0, 10)) == 10 + 2^(2*8))
+testlib.test(OTHER,"unpack_l3",lib.unpack('<l', string.char(256-10, 256-1, 256-1, 256-1)..ln_pad) == -10)
+
+-- limits
+lims = {{'B', 255}, {'b', 127}, {'b', -128},
+ {'I1', 255}, {'i1', 127}, {'i1', -128},
+ {'H', 2^16 - 1}, {'h', 2^15 - 1}, {'h', -2^15},
+ {'I2', 2^16 - 1}, {'i2', 2^15 - 1}, {'i2', -2^15},
+ {'L', 2^32 - 1}, {'l', 2^31 - 1}, {'l', -2^31},
+ {'I4', 2^32 - 1}, {'i4', 2^31 - 1}, {'i4', -2^31},
+ }
+
+for _, a in pairs{'', '>', '<'} do
+ local i = 1
+ for _, l in pairs(lims) do
+ local fmt = a .. l[1]
+ testlib.test(OTHER,"limit"..i.."("..l[1]..")", lib.unpack(fmt, lib.pack(fmt, l[2])) == l[2])
+ i = i + 1
+ end
+end
+
+
+testlib.testing(OTHER, "fixed-sized ints")
+-- tests for fixed-sized ints
+local num = 1
+for _, i in pairs{1,2,4} do
+ x = lib.pack('<i'..i, -3)
+ testlib.test(OTHER,"pack_fixedlen"..num, string.len(x) == i)
+ testlib.test(OTHER,"pack_fixed"..num, x == string.char(256-3) .. string.rep(string.char(256-1), i-1))
+ testlib.test(OTHER,"unpack_fixed"..num, lib.unpack('<i'..i, x) == -3)
+ num = num + 1
+end
+
+
+testlib.testing(OTHER, "alignment")
+-- alignment
+d = lib.pack("d", 5.1)
+ali = {[1] = string.char(1)..d,
+ [2] = string.char(1, 0)..d,
+ [4] = string.char(1, 0, 0, 0)..d,
+ [8] = string.char(1, 0, 0, 0, 0, 0, 0, 0)..d,
+ }
+
+num = 1
+for a,r in pairs(ali) do
+ testlib.test(OTHER,"pack_align"..num, lib.pack("!"..a.."bd", 1, 5.1) == r)
+ local x,y = lib.unpack("!"..a.."bd", r)
+ testlib.test(OTHER,"unpack_align"..num, x == 1 and y == 5.1)
+ num = num + 1
+end
+
+
+testlib.testing(OTHER, "string")
+-- strings
+testlib.test(OTHER,"string_pack1",lib.pack("c", "alo alo") == "a")
+testlib.test(OTHER,"string_pack2",lib.pack("c4", "alo alo") == "alo ")
+testlib.test(OTHER,"string_pack3",lib.pack("c5", "alo alo") == "alo a")
+testlib.test(OTHER,"string_pack4",lib.pack("!4b>c7", 1, "alo alo") == "\1alo alo")
+testlib.test(OTHER,"string_pack5",lib.pack("!2<s", "alo alo") == "alo alo\0")
+testlib.test(OTHER,"string_pack6",lib.pack(" c0 ", "alo alo") == "alo alo")
+num = 1
+for _, f in pairs{"B", "l", "i2", "f", "d"} do
+ for _, s in pairs{"", "a", "alo", string.rep("x", 200)} do
+ local x = lib.pack(f.."c0", #s, s)
+ testlib.test(OTHER,"string_unpack"..num, lib.unpack(f.."c0", x) == s)
+ num = num + 1
+ end
+end
+
+
+testlib.testing(OTHER, "indeces")
+-- indices
+x = lib.pack("!>iiiii", 1, 2, 3, 4, 5)
+local i = 1
+local k = 1
+num = 1
+while i < #x do
+ local v, j = lib.unpack("!>i", x, i)
+ testlib.test(OTHER,"index_unpack"..num, j == i + 4 and v == k)
+ i = j; k = k + 1
+ num = num + 1
+end
+
+testlib.testing(OTHER, "absolute")
+-- alignments are relative to 'absolute' positions
+x = lib.pack("!8 xd", 12)
+testlib.test(OTHER,"absolute_unpack1",lib.unpack("!8d", x, 3) == 12)
+
+
+testlib.test(OTHER,"absolute_pack1",lib.pack("<lhbxxH", -2, 10, -10, 250) ==
+ string.char(254, 255, 255, 255) ..ln_pad.. string.char(10, 0, 246, 0, 0, 250, 0))
+
+a,b,c,d = lib.unpack("<lhbxxH",
+ string.char(254, 255, 255, 255) ..ln_pad.. string.char(10, 0, 246, 0, 0, 250, 0))
+testlib.test(OTHER,"absolute_unpack2",a == -2 and b == 10 and c == -10 and d == 250)
+
+testlib.test(OTHER,"absolute_pack2",lib.pack(">lBxxH", -20, 10, 250) ==
+ ln_pad..string.char(255, 255, 255, 236, 10, 0, 0, 0, 250))
+
+
+testlib.testing(OTHER, "position")
+
+a, b, c, d = lib.unpack(">lBxxH",
+ ln_pad..string.char(255, 255, 255, 236, 10, 0, 0, 0, 250))
+-- the 'd' return val is position in string, so will depend on size of long 'l'
+local vald = 10 + string.len(l_pad)
+testlib.test(OTHER,"position_unpack1",a == -20 and b == 10 and c == 250 and d == vald)
+
+a,b,c,d,e = lib.unpack(">fdfH",
+ '000'..lib.pack(">fdfH", 3.5, -24e-5, 200.5, 30000),
+ 4)
+testlib.test(OTHER,"position_unpack2",a == 3.5 and b == -24e-5 and c == 200.5 and d == 30000 and e == 22)
+
+a,b,c,d,e = lib.unpack("<fdxxfH",
+ '000'..lib.pack("<fdxxfH", -13.5, 24e5, 200.5, 300),
+ 4)
+testlib.test(OTHER,"position_unpack3",a == -13.5 and b == 24e5 and c == 200.5 and d == 300 and e == 24)
+
+x = lib.pack(">I2fi4I2", 10, 20, -30, 40001)
+testlib.test(OTHER,"position_pack1",string.len(x) == 2+4+4+2)
+testlib.test(OTHER,"position_unpack4",lib.unpack(">f", x, 3) == 20)
+a,b,c,d = lib.unpack(">i2fi4I2", x)
+testlib.test(OTHER,"position_unpack5",a == 10 and b == 20 and c == -30 and d == 40001)
+
+testlib.testing(OTHER, "string length")
+local s = "hello hello"
+x = lib.pack(" b c0 ", string.len(s), s)
+testlib.test(OTHER,"stringlen_unpack1",lib.unpack("bc0", x) == s)
+x = lib.pack("Lc0", string.len(s), s)
+testlib.test(OTHER,"stringlen_unpack2",lib.unpack(" L c0 ", x) == s)
+x = lib.pack("cc3b", s, s, 0)
+testlib.test(OTHER,"stringlen_pack1",x == "hhel\0")
+testlib.test(OTHER,"stringlen_unpack3",lib.unpack("xxxxb", x) == 0)
+
+testlib.testing(OTHER, "padding")
+testlib.test(OTHER,"padding_pack1",lib.pack("<!l", 3) == string.char(3, 0, 0, 0)..l_pad)
+testlib.test(OTHER,"padding_pack2",lib.pack("<!xl", 3) == l_pad..string.char(0, 0, 0, 0, 3, 0, 0, 0)..l_pad)
+testlib.test(OTHER,"padding_pack3",lib.pack("<!xxl", 3) == l_pad..string.char(0, 0, 0, 0, 3, 0, 0, 0)..l_pad)
+testlib.test(OTHER,"padding_pack4",lib.pack("<!xxxl", 3) == l_pad..string.char(0, 0, 0, 0, 3, 0, 0, 0)..l_pad)
+
+testlib.test(OTHER,"padding_unpack1",lib.unpack("<!l", string.char(3, 0, 0, 0)..l_pad) == 3)
+testlib.test(OTHER,"padding_unpack2",lib.unpack("<!xl", l_pad..string.char(0, 0, 0, 0, 3, 0, 0, 0)..l_pad) == 3)
+testlib.test(OTHER,"padding_unpack3",lib.unpack("<!xxl", l_pad..string.char(0, 0, 0, 0, 3, 0, 0, 0)..l_pad) == 3)
+testlib.test(OTHER,"padding_unpack4",lib.unpack("<!xxxl", l_pad..string.char(0, 0, 0, 0, 3, 0, 0, 0)..l_pad) == 3)
+
+testlib.testing(OTHER, "format")
+testlib.test(OTHER,"format_pack1",lib.pack("<!2 b l h", 2, 3, 5) == string.char(2, 0, 3, 0)..l_pad..string.char(0, 0, 5, 0))
+a,b,c = lib.unpack("<!2blh", string.char(2, 0, 3, 0)..l_pad..string.char(0, 0, 5, 0))
+testlib.test(OTHER,"format_pack2",a == 2 and b == 3 and c == 5)
+
+testlib.test(OTHER,"format_pack3",lib.pack("<!8blh", 2, 3, 5) == string.char(2, 0, 0, 0)..l_pad..string.char(3, 0, 0, 0)..l_pad..string.char(5, 0))
+
+a,b,c = lib.unpack("<!8blh", string.char(2, 0, 0, 0)..l_pad..string.char(3, 0, 0, 0)..l_pad..string.char(5, 0))
+testlib.test(OTHER,"format_pack4",a == 2 and b == 3 and c == 5)
+
+testlib.test(OTHER,"format_pack5",lib.pack(">sh", "aloi", 3) == "aloi\0\0\3")
+testlib.test(OTHER,"format_pack6",lib.pack(">!sh", "aloi", 3) == "aloi\0\0\0\3")
+
+x = "aloi\0\0\0\0\3\2\0\0"
+a, b, c = lib.unpack("<!si4", x)
+testlib.test(OTHER,"format_unpack1",a == "aloi" and b == 2*256+3 and c == string.len(x)+1)
+
+x = lib.pack("!4sss", "hi", "hello", "bye")
+a,b,c = lib.unpack("sss", x)
+testlib.test(OTHER,"format_unpack2",a == "hi" and b == "hello" and c == "bye")
+a, i = lib.unpack("s", x, 1)
+testlib.test(OTHER,"format_unpack3",a == "hi")
+a, i = lib.unpack("s", x, i)
+testlib.test(OTHER,"format_unpack4",a == "hello")
+a, i = lib.unpack("s", x, i)
+testlib.test(OTHER,"format_unpack5",a == "bye")
+
+
+
+-- test for weird conditions
+testlib.testing(OTHER, "weird conditions")
+testlib.test(OTHER,"weird_pack1",lib.pack(">>>h <!!!<h", 10, 10) == string.char(0, 10, 10, 0))
+testlib.test(OTHER,"weird_pack2",not pcall(lib.pack, "!3l", 10))
+testlib.test(OTHER,"weird_pack3",not pcall(lib.pack, "3", 10))
+testlib.test(OTHER,"weird_pack4",not pcall(lib.pack, "i33", 10))
+testlib.test(OTHER,"weird_pack5",not pcall(lib.pack, "I33", 10))
+testlib.test(OTHER,"weird_pack6",lib.pack("") == "")
+testlib.test(OTHER,"weird_pack7",lib.pack(" ") == "")
+testlib.test(OTHER,"weird_pack8",lib.pack(">>><<<!!") == "")
+testlib.test(OTHER,"weird_unpack1",not pcall(lib.unpack, "c0", "alo"))
+testlib.test(OTHER,"weird_unpack2",not pcall(lib.unpack, "s", "alo"))
+testlib.test(OTHER,"weird_unpack3",lib.unpack("s", "alo\0") == "alo")
+testlib.test(OTHER,"weird_pack9",not pcall(lib.pack, "c4", "alo"))
+testlib.test(OTHER,"weird_pack10",pcall(lib.pack, "c3", "alo"))
+testlib.test(OTHER,"weird_unpack4",not pcall(lib.unpack, "c4", "alo"))
+testlib.test(OTHER,"weird_unpack5",pcall(lib.unpack, "c3", "alo"))
+testlib.test(OTHER,"weird_unpack6",not pcall(lib.unpack, "bc0", "\4alo"))
+testlib.test(OTHER,"weird_unpack7",pcall(lib.unpack, "bc0", "\3alo"))
+
+testlib.test(OTHER,"weird_unpack8",not pcall(lib.unpack, "b", "alo", 4))
+testlib.test(OTHER,"weird_unpack9",lib.unpack("b", "alo\3", 4) == 3)
+
+testlib.test(OTHER,"weird_pack11",not pcall(lib.pack, "\250\22", "alo"))
+testlib.test(OTHER,"weird_pack12",not pcall(lib.pack, 1, "alo"))
+testlib.test(OTHER,"weird_pack13",not pcall(lib.pack, nil, "alo"))
+testlib.test(OTHER,"weird_pack14",not pcall(lib.pack, {}, "alo"))
+testlib.test(OTHER,"weird_pack15",not pcall(lib.pack, true, "alo"))
+testlib.test(OTHER,"weird_unpack10",not pcall(lib.unpack, "\250\22", "\3alo"))
+testlib.test(OTHER,"weird_unpack11",not pcall(lib.unpack, 1, "\3alo"))
+testlib.test(OTHER,"weird_unpack12",not pcall(lib.unpack, nil, "\3alo"))
+testlib.test(OTHER,"weird_unpack13",not pcall(lib.unpack, {}, "\3alo"))
+testlib.test(OTHER,"weird_unpack14",not pcall(lib.unpack, true, "\3alo"))
+
+-- done
+testlib.getResults()
diff --git a/test/lua/testlib.lua b/test/lua/testlib.lua
new file mode 100644
index 0000000..e002c8b
--- /dev/null
+++ b/test/lua/testlib.lua
@@ -0,0 +1,174 @@
+----------------------------------------
+-- library name: testlib.lua
+--
+-- Provides common functions for other lua test scripts to use.
+----------------------------------------
+--[[
+ This library aims to codify the most common practices used in testing
+ Wireshark's lua features. The intent is to reduce boilerplate code
+ so test scripts can focus on test cases.
+
+ Tests are nominally classified into named groups.
+ (In practice, most test files just use a single group called "other",
+ but this should be tidied up at some point.)
+ A test script must call testlib.init() with a table of
+ group names and the number of tests expected to be run in each group.
+ This number can be zero if you want to declare a group but don't
+ need to check that a specific number of tests is run.
+
+ Suggested use (abridged):
+
+ local testlib = require("testlib")
+ testlib.init({ other = 3 })
+ testlib.testing("other", "example tests")
+ testlib.test("other", "firsttest", 1+1 == 2)
+ testlib.test("other", "funccall", pcall(my_function, func_args), "function should succeed")
+ testlib.test("other", "funccall", not pcall(my_function2, func_args), "function expected to give error")
+ testlib.getResults()
+
+ For information on specific functions, keep reading.
+--]]
+
+----------------------------------------
+-- This is the module object, which will be returned at the end of this file.
+local M = {
+ ["groups"] = {},
+}
+
+----------------------------------------
+-- Initialize the test suite. Define one or more testing groups,
+-- giving the expected number of tests to run for each.
+-- (Telling it to "expect" zero tests for a group just skips
+-- the check that a specific number of tests ran in that group.)
+-- May be called repeatedly if you want to define group names
+-- at runtime.
+M.init = function(t)
+ for group, expected in pairs(t) do
+ M.groups[group] = {
+ ["expected"] = expected,
+ ["passed"] = 0,
+ ["failed"] = 0,
+ ["total"] = 0,
+ ["packets"] = 0,
+ }
+ end
+end
+
+----------------------------------------
+-- Indicate a passed test in the named group.
+M.pass = function(group)
+ M.groups[group].passed = M.groups[group].passed + 1
+ M.groups[group].total = M.groups[group].total + 1
+end
+
+----------------------------------------
+-- Indicate a failed test in the named group.
+M.fail = function(group)
+ M.groups[group].failed = M.groups[group].failed + 1
+ M.groups[group].total = M.groups[group].total + 1
+end
+
+----------------------------------------
+-- There are some tests which track the number of packets they're testing.
+-- Use this function to count a single packet as being "seen" by a group.
+M.countPacket = function(group)
+ M.groups[group].packets = M.groups[group].packets + 1
+end
+
+----------------------------------------
+-- Get the number of packets that have been counted under the named group.
+M.getPktCount = function(group)
+ return M.groups[group].packets
+end
+
+----------------------------------------
+-- Print a banner reporting test progress.
+-- Has no material affect on test progression, but is useful for
+-- understanding the test results.
+M.testing = function(group, msg)
+ if msg == nil then
+ msg, group = group, nil
+ end
+ if group then
+ if M.groups[group].packets > 0 then
+ print(string.format("\n-------- Testing %s -- %s for packet # %d --------\n",
+ group, msg, M.groups[group].packets))
+ else
+ print(string.format("\n-------- Testing %s -- %s --------\n",
+ group, msg))
+ end
+ else
+ print(string.format("\n-------- Testing %s --------\n", msg))
+ end
+end
+
+----------------------------------------
+-- Core function: test a condition, report and track its status.
+-- The output format shown here is what was commonly used in test scripts,
+-- but can be changed.
+M.test = function(group, name, cond, msg)
+ -- io.stdout:write() doesn't add a newline like print() does
+ io.stdout:write(string.format("test %s --> %s-%d-%d...",
+ group, name, M.groups[group].total, M.groups[group].packets))
+ if cond then
+ io.stdout:write("passed\n")
+ M.pass(group)
+ return true
+ else
+ io.stdout:write("failed!\n")
+ M.fail(group)
+ if msg then
+ print(string.format("Got the following error: '%s'", msg))
+ end
+ -- Using error() causes the entire test script to abort.
+ -- This is how the lua test suite typically operates.
+ -- If a test script wants to continue with subsequent tests
+ -- after a failed test, this behaviour could be made
+ -- configurable in this module.
+ error(name .. " test failed!")
+ return false
+ end
+end
+
+----------------------------------------
+-- Call this at the finale of a test script to output the results of testing.
+-- This is where the number of tests run is compared to what was expected,
+-- if applicable.
+-- Scripts which run over empty.pcap will usually call this at the end of
+-- the file.
+-- Scripts which test by creating a protocol object will call this from
+-- the object's .init() method *the second time it is called*.
+-- Others usually call it in a tap listener's .draw() method,
+-- which tshark calls once when it reaches the end of the pcap.
+M.getResults = function()
+ local rv = true
+ print("\n===== Test Results =====")
+ for group, num in pairs(M.groups) do
+ if num.expected > 0 and num.total ~= num.expected then
+ rv = false
+ print("Something didn't run or ran too much... tests failed!")
+ print(string.format("%s: expected %d tests but ran %d tests",
+ group, num.expected, num.total))
+ end
+ if num.failed > 0 then
+ rv = false
+ print(string.format("%s: passed %d/%d, FAILED %d/%d",
+ group, num.passed, num.total, num.failed, num.total))
+ else
+ print(string.format("%s: passed %d/%d",
+ group, num.passed, num.total))
+ end
+ end
+ if rv then
+ -- The python wrapper which performs our lua testing
+ -- expects to see this string in the output if there were no failures.
+ print("All tests passed!")
+ else
+ print("Some tests failed!")
+ end
+ return rv
+end
+
+----------------------------------------
+-- That's the end of this library. Return the module we've created.
+return M
diff --git a/test/lua/try_heuristics.lua b/test/lua/try_heuristics.lua
new file mode 100644
index 0000000..fcd6d09
--- /dev/null
+++ b/test/lua/try_heuristics.lua
@@ -0,0 +1,61 @@
+-- Define a new protocol that runs TCP heuristics and on failure runs UDP heuristics
+--
+-- This expects to be run against dns_port.pcap, so it should end up resolving all packets to DNS with the UDP heuristic
+local test_proto = Proto("test", "Test Protocol")
+
+-- Have all tests passed so far?
+-- Anything that fails should set this to false, which will suppress the "".
+all_ok = true
+
+-- The number of frames expected
+-- Final test status is output with last frame
+LAST_FRAME = 4
+
+function test_proto.dissector(buf, pinfo, root)
+ print("Dissector function run")
+
+ orig_proto_name = tostring(pinfo.cols.protocol)
+
+ -- Run TCP heuristic dissectors
+ -- Dissection should fail, and the protocol name should be unchanged
+ tcp_success = DissectorTable.try_heuristics("tcp", buf, pinfo, root)
+ curr_proto_name = tostring(pinfo.cols.protocol)
+
+ if tcp_success then
+ all_ok = false
+ print("tcp heuristics were not expected to report success, but did!")
+ end
+
+ if curr_proto_name ~= orig_proto_name then
+ all_ok = false
+ print("after tcp heuristics were run, protocol " .. orig_proto_name .. " was not expected to change, but became " .. curr_proto_name .. "!")
+ end
+
+ -- Run UDP heuristic dissectors
+ -- Dissection should succeed, and the protocol name should be changed to DNS
+ udp_success = DissectorTable.try_heuristics("udp", buf, pinfo, root)
+ curr_proto_name = tostring(pinfo.cols.protocol)
+
+ if not udp_success then
+ all_ok = false
+ print("udp heuristics were expected to report success, but did not!")
+ end
+
+ if curr_proto_name ~= "DNS" then
+ all_ok = false
+ print("after udp heuristics were run, protocol should be changed to DNS, but became " .. curr_proto_name .. "!")
+ end
+
+ -- If we're on the last frame, report success or failure
+ if pinfo.number == LAST_FRAME then
+ if all_ok then
+ print("All tests passed!")
+ else
+ print("Some tests failed!")
+ end
+ end
+end
+
+-- Invoke test_proto on the expected UDP traffic
+DissectorTable.get("udp.port"):add(65333, test_proto)
+DissectorTable.get("udp.port"):add(65346, test_proto)
diff --git a/test/lua/tvb.lua b/test/lua/tvb.lua
new file mode 100644
index 0000000..baf702c
--- /dev/null
+++ b/test/lua/tvb.lua
@@ -0,0 +1,922 @@
+----------------------------------------
+-- script-name: tvb.lua
+-- This tests the Tvb/TvbRange and proto_add_XXX_item API.
+----------------------------------------
+local testlib = require("testlib")
+
+local FRAME = "frame"
+local OTHER = "other"
+
+-- expected number of runs per type
+--
+-- CHANGE THIS TO MATCH HOW MANY TESTS THERE ARE
+--
+-- The number of tests in a specific category (other than FRAME) is the
+-- number of times execute() is called by any function below testing().
+-- From the user's perspective, it can be calculated with the following
+-- formula:
+--
+-- N = number of execute() you call +
+-- number of verifyFields() * (1 + number of fields) +
+-- number of verifyResults() * (1 + 2 * number of values)
+--
+-- if one happens to know the number of fields and the number of values.
+--
+local n_frames = 1
+local taptests = { [FRAME]=n_frames, [OTHER]=391*n_frames }
+
+testlib.init(taptests)
+
+------------- test script ------------
+
+----------------------------------------
+-- creates a Proto object for our testing
+local test_proto = Proto("test","Test Protocol")
+
+local numinits = 0
+function test_proto.init()
+ numinits = numinits + 1
+ if numinits == 2 then
+ testlib.getResults()
+ end
+end
+
+
+----------------------------------------
+-- a table of all of our Protocol's fields
+range_string = {
+ { 0, 200, "The first part" },
+ { 201, 233, "The second part" },
+ { 234, 255, "The last part" },
+}
+
+local testfield =
+{
+ basic =
+ {
+ STRING = ProtoField.string ("test.basic.string", "Basic string"),
+ BOOLEAN = ProtoField.bool ("test.basic.boolean", "Basic boolean", 16, {"yes","no"}, 0x0001),
+ UINT8 = ProtoField.uint8 ("test.basic.uint8", "Basic uint8 with range string", base.RANGE_STRING, range_string ),
+ UINT16 = ProtoField.uint16 ("test.basic.uint16", "Basic uint16"),
+ UINT32 = ProtoField.uint32 ("test.basic.uint32", "Basic uint32 test with a unit string", base.UINT_STRING, { "femtoFarads" }),
+ INT24 = ProtoField.int24 ("test.basic.uint24", "Basic uint24"),
+ BYTES = ProtoField.bytes ("test.basic.bytes", "Basic Bytes"),
+ UINT_BYTES = ProtoField.ubytes ("test.basic.ubytes", "Basic Uint Bytes"),
+ OID = ProtoField.oid ("test.basic.oid", "Basic OID"),
+ REL_OID = ProtoField.rel_oid("test.basic.rel_oid", "Basic Relative OID"),
+ ABSOLUTE_LOCAL = ProtoField.absolute_time("test.basic.absolute.local","Basic absolute local"),
+ ABSOLUTE_UTC = ProtoField.absolute_time("test.basic.absolute.utc", "Basic absolute utc", base.UTC),
+ IPv4 = ProtoField.ipv4 ("test.basic.ipv4", "Basic ipv4 address"),
+ IPv6 = ProtoField.ipv6 ("test.basic.ipv6", "Basic ipv6 address"),
+ ETHER = ProtoField.ether ("test.basic.ether", "Basic ethernet address"),
+ -- GUID = ProtoField.guid ("test.basic.guid", "Basic GUID"),
+ },
+
+ time =
+ {
+ ABSOLUTE_LOCAL = ProtoField.absolute_time("test.time.absolute.local","Time absolute local"),
+ ABSOLUTE_UTC = ProtoField.absolute_time("test.time.absolute.utc", "Time absolute utc", base.UTC),
+ },
+
+ bytes =
+ {
+ BYTES = ProtoField.bytes ("test.bytes.bytes", "Bytes"),
+ UINT_BYTES = ProtoField.ubytes ("test.bytes.ubytes", "Uint Bytes"),
+ OID = ProtoField.oid ("test.bytes.oid", "OID"),
+ REL_OID = ProtoField.rel_oid("test.bytes.rel_oid", "Relative OID"),
+ -- GUID = ProtoField.guid ("test.bytes.guid", "GUID"),
+ },
+}
+
+-- create a flat array table of the above that can be registered
+local pfields = {}
+for _,t in pairs(testfield) do
+ for k,v in pairs(t) do
+ pfields[#pfields+1] = v
+ end
+end
+
+-- register them
+test_proto.fields = pfields
+
+print("test_proto ProtoFields registered")
+
+
+local getfield =
+{
+ basic =
+ {
+ STRING = Field.new ("test.basic.string"),
+ BOOLEAN = Field.new ("test.basic.boolean"),
+ UINT8 = Field.new ("test.basic.uint8"),
+ UINT16 = Field.new ("test.basic.uint16"),
+ INT24 = Field.new ("test.basic.uint24"),
+ BYTES = Field.new ("test.basic.bytes"),
+ UINT_BYTES = Field.new ("test.basic.ubytes"),
+ OID = Field.new ("test.basic.oid"),
+ REL_OID = Field.new ("test.basic.rel_oid"),
+ ABSOLUTE_LOCAL = Field.new ("test.basic.absolute.local"),
+ ABSOLUTE_UTC = Field.new ("test.basic.absolute.utc"),
+ IPv4 = Field.new ("test.basic.ipv4"),
+ IPv6 = Field.new ("test.basic.ipv6"),
+ ETHER = Field.new ("test.basic.ether"),
+ -- GUID = Field.new ("test.basic.guid"),
+ },
+
+ time =
+ {
+ ABSOLUTE_LOCAL = Field.new ("test.time.absolute.local"),
+ ABSOLUTE_UTC = Field.new ("test.time.absolute.utc"),
+ },
+
+ bytes =
+ {
+ BYTES = Field.new ("test.bytes.bytes"),
+ UINT_BYTES = Field.new ("test.bytes.ubytes"),
+ OID = Field.new ("test.bytes.oid"),
+ REL_OID = Field.new ("test.bytes.rel_oid"),
+ -- GUID = Field.new ("test.bytes.guid"),
+ },
+}
+
+print("test_proto Fields created")
+
+local function addMatchFields(match_fields, ... )
+ match_fields[#match_fields + 1] = { ... }
+end
+
+local function getFieldInfos(name)
+ local base, field = name:match("([^.]+)%.(.+)")
+ if not base or not field then
+ error("failed to get base.field from '" .. name .. "'")
+ end
+ local t = { getfield[base][field]() }
+ return t
+end
+
+local function verifyFields(name, match_fields)
+ local finfos = getFieldInfos(name)
+
+ testlib.test(OTHER, "verify-fields-size-" .. name, #finfos == #match_fields,
+ "#finfos=" .. #finfos .. ", #match_fields=" .. #match_fields)
+
+ for i, t in ipairs(match_fields) do
+ if type(t) ~= 'table' then
+ error("verifyFields didn't get a table inside the matches table")
+ end
+ if #t ~= 1 then
+ error("verifyFields matches table's table is not size 1")
+ end
+ local result = finfos[i]()
+ local value = t[1]
+ print(
+ name .. " got:",
+ "\n\tfinfos [" .. i .. "]='" .. tostring( result ) .. "'",
+ "\n\tmatches[" .. i .. "]='" .. tostring( value ) .. "'"
+ )
+ testlib.test(OTHER, "verify-fields-value-" .. name .. "-" .. i, result == value )
+ end
+end
+
+
+local function addMatchValues(match_values, ... )
+ match_values[#match_values + 1] = { ... }
+end
+
+local function addMatchFieldValues(match_fields, match_values, match_both, ...)
+ addMatchFields(match_fields, match_both)
+ addMatchValues(match_values, match_both, ...)
+end
+
+local result_values = {}
+local function resetResults()
+ result_values = {}
+end
+
+local function treeAddPField(...)
+ local t = { pcall ( TreeItem.add_packet_field, ... ) }
+ if t[1] == nil then
+ return nil, t[2]
+ end
+ -- it gives back a TreeItem, then the results
+ if typeof(t[2]) ~= 'TreeItem' then
+ return nil, "did not get a TreeItem returned from TreeItem.add_packet_field, "..
+ "got a '" .. typeof(t[2]) .."'"
+ end
+
+ if #t ~= 4 then
+ return nil, "did not get 3 return values from TreeItem.add_packet_field"
+ end
+
+ result_values[#result_values + 1] = { t[3], t[4] }
+
+ return true
+end
+
+local function verifyResults(name, match_values)
+ testlib.test(OTHER, "verify-results-size-" .. name, #result_values == #match_values,
+ "#result_values=" .. #result_values ..
+ ", #match_values=" .. #match_values)
+
+ for j, t in ipairs(match_values) do
+ if type(t) ~= 'table' then
+ error("verifyResults didn't get a table inside the matches table")
+ end
+ for i, match in ipairs(t) do
+ local r = result_values[j][i]
+ print(
+ name .. " got:",
+ "\n\tresults[" .. j .. "][" .. i .. "]='" .. tostring( r ) .. "'",
+ "\n\tmatches[" .. j .. "][" .. i .. "]='" .. tostring( match ) .. "'"
+ )
+ local result_type, match_type
+ if type(match) == 'userdata' then
+ match_type = typeof(match)
+ else
+ match_type = type(match)
+ end
+ if type(r) == 'userdata' then
+ result_type = typeof(r)
+ else
+ result_type = type(r)
+ end
+ testlib.test(OTHER, "verify-results-type-" .. name .. "-" .. i, result_type == match_type )
+ testlib.test(OTHER, "verify-results-value-" .. name .. "-" .. i, r == match )
+ end
+ end
+end
+
+-- Compute the difference in seconds between local time and UTC
+-- from http://lua-users.org/wiki/TimeZone
+local function get_timezone()
+ local now = os.time()
+ return os.difftime(now, os.time(os.date("!*t", now)))
+end
+local timezone = get_timezone()
+print ("timezone = " .. timezone)
+
+----------------------------------------
+-- The following creates the callback function for the dissector.
+-- The 'tvbuf' is a Tvb object, 'pktinfo' is a Pinfo object, and 'root' is a TreeItem object.
+function test_proto.dissector(tvbuf,pktinfo,root)
+
+ testlib.countPacket(FRAME)
+ testlib.countPacket(OTHER)
+
+ testlib.testing(OTHER, "Basic string")
+
+ local tree = root:add(test_proto, tvbuf:range(0,tvbuf:len()))
+
+ -- create a fake Tvb to use for testing
+ local teststring = "this is the string for the first test"
+ local bytearray = ByteArray.new(teststring, true)
+ local tvb_string = bytearray:tvb("Basic string")
+
+ local function callTreeAdd(tree,...)
+ tree:add(...)
+ end
+
+ local string_match_fields = {}
+
+ testlib.test(OTHER, "basic-tvb_get_string", tvb_string:range():string() == teststring )
+
+ testlib.test(OTHER, "basic-string", tree:add(testfield.basic.STRING, tvb_string:range(0,tvb_string:len())) ~= nil )
+ addMatchFields(string_match_fields, teststring)
+
+ testlib.test(OTHER, "basic-string", pcall (callTreeAdd, tree, testfield.basic.STRING, tvb_string:range() ) )
+ addMatchFields(string_match_fields, teststring)
+
+ verifyFields("basic.STRING", string_match_fields)
+
+----------------------------------------
+ testlib.testing(OTHER, "Basic boolean")
+
+ local barray_bytes_hex = "00FF00018000"
+ local barray_bytes = ByteArray.new(barray_bytes_hex)
+ local tvb_bytes = barray_bytes:tvb("Basic bytes")
+ local bool_match_fields = {}
+
+ testlib.test(OTHER, "basic-boolean", pcall (callTreeAdd, tree, testfield.basic.BOOLEAN, tvb_bytes:range(0,2)) )
+ addMatchFields(bool_match_fields, true)
+
+ testlib.test(OTHER, "basic-boolean", pcall (callTreeAdd, tree, testfield.basic.BOOLEAN, tvb_bytes:range(2,2)) )
+ addMatchFields(bool_match_fields, true)
+
+ testlib.test(OTHER, "basic-boolean", pcall (callTreeAdd, tree, testfield.basic.BOOLEAN, tvb_bytes:range(4,2)) )
+ addMatchFields(bool_match_fields, false)
+
+ verifyFields("basic.BOOLEAN", bool_match_fields )
+
+----------------------------------------
+ testlib.testing(OTHER, "Basic uint16")
+
+ local uint16_match_fields = {}
+
+ testlib.test(OTHER, "basic-uint16", pcall (callTreeAdd, tree, testfield.basic.UINT16, tvb_bytes:range(0,2)) )
+ addMatchFields(uint16_match_fields, 255)
+
+ testlib.test(OTHER, "basic-uint16", pcall (callTreeAdd, tree, testfield.basic.UINT16, tvb_bytes:range(2,2)) )
+ addMatchFields(uint16_match_fields, 1)
+
+ testlib.test(OTHER, "basic-uint16", pcall (callTreeAdd, tree, testfield.basic.UINT16, tvb_bytes:range(4,2)) )
+ addMatchFields(uint16_match_fields, 32768)
+
+ verifyFields("basic.UINT16", uint16_match_fields)
+
+----------------------------------------
+ testlib.testing(OTHER, "Basic uint16-le")
+
+ local function callTreeAddLE(tree,...)
+ tree:add_le(...)
+ end
+
+ testlib.test(OTHER, "basic-uint16-le", pcall (callTreeAddLE, tree, testfield.basic.UINT16, tvb_bytes:range(0,2)) )
+ addMatchFields(uint16_match_fields, 65280)
+
+ testlib.test(OTHER, "basic-uint16-le", pcall (callTreeAddLE, tree, testfield.basic.UINT16, tvb_bytes:range(2,2)) )
+ addMatchFields(uint16_match_fields, 256)
+
+ testlib.test(OTHER, "basic-uint16-le", pcall (callTreeAddLE, tree, testfield.basic.UINT16, tvb_bytes:range(4,2)) )
+ addMatchFields(uint16_match_fields, 128)
+
+ verifyFields("basic.UINT16", uint16_match_fields)
+
+----------------------------------------
+ testlib.testing(OTHER, "Basic int24")
+
+ local int24_match_fields = {}
+
+ testlib.test(OTHER, "basic-int24", pcall (callTreeAdd, tree, testfield.basic.INT24, tvb_bytes:range(0,3)) )
+ addMatchFields(int24_match_fields, 65280)
+
+ testlib.test(OTHER, "basic-int24", pcall (callTreeAdd, tree, testfield.basic.INT24, tvb_bytes:range(3,3)) )
+ addMatchFields(int24_match_fields, 98304)
+
+ verifyFields("basic.INT24", int24_match_fields)
+
+----------------------------------------
+ testlib.testing(OTHER, "Basic int24-le")
+
+ testlib.test(OTHER, "basic-int24", pcall (callTreeAddLE, tree, testfield.basic.INT24, tvb_bytes:range(0,3)) )
+ addMatchFields(int24_match_fields, 65280)
+
+ testlib.test(OTHER, "basic-int24", pcall (callTreeAddLE, tree, testfield.basic.INT24, tvb_bytes:range(3,3)) )
+ addMatchFields(int24_match_fields, 32769)
+
+ verifyFields("basic.INT24", int24_match_fields)
+
+----------------------------------------
+ testlib.testing(OTHER, "Basic bytes")
+
+ local bytes_match_fields = {}
+
+ testlib.test(OTHER, "basic-tvb_get_string_bytes",
+ string.lower(tostring(tvb_bytes:range():bytes())) == string.lower(barray_bytes_hex))
+
+ testlib.test(OTHER, "basic-bytes", pcall (callTreeAdd, tree, testfield.basic.BYTES, tvb_bytes:range()) )
+ addMatchFields(bytes_match_fields, barray_bytes)
+
+ -- TODO: it's silly that tree:add_packet_field() requires an encoding argument
+ -- need to fix that separately in a bug fix
+ testlib.test(OTHER, "add_pfield-bytes", treeAddPField(tree, testfield.basic.BYTES,
+ tvb_bytes:range(), ENC_BIG_ENDIAN))
+ addMatchFields(bytes_match_fields, barray_bytes)
+
+ verifyFields("basic.BYTES", bytes_match_fields)
+
+----------------------------------------
+ testlib.testing(OTHER, "Basic uint bytes")
+
+ local len_string = string.format("%02x", barray_bytes:len())
+ local barray_uint_bytes = ByteArray.new(len_string) .. barray_bytes
+ local tvb_uint_bytes = barray_uint_bytes:tvb("Basic UINT_BYTES")
+ local uint_bytes_match_fields = {}
+
+ testlib.test(OTHER, "basic-uint-bytes", pcall (callTreeAdd, tree, testfield.basic.UINT_BYTES,
+ tvb_uint_bytes:range(0,1)) )
+ addMatchFields(uint_bytes_match_fields, barray_bytes)
+
+ testlib.test(OTHER, "add_pfield-uint-bytes", treeAddPField(tree, testfield.basic.UINT_BYTES,
+ tvb_uint_bytes:range(0,1), ENC_BIG_ENDIAN) )
+ addMatchFields(uint_bytes_match_fields, barray_bytes)
+
+ verifyFields("basic.UINT_BYTES", uint_bytes_match_fields)
+
+----------------------------------------
+ testlib.testing(OTHER, "Basic OID")
+
+ -- note: the tvb being dissected and compared isn't actually a valid OID.
+ -- tree:add() and tree:add_packet-field() don't care about its validity right now.
+
+ local oid_match_fields = {}
+
+ testlib.test(OTHER, "basic-oid", pcall(callTreeAdd, tree, testfield.basic.OID, tvb_bytes:range()) )
+ addMatchFields(oid_match_fields, barray_bytes)
+
+ testlib.test(OTHER, "add_pfield-oid", treeAddPField(tree, testfield.basic.OID,
+ tvb_bytes:range(), ENC_BIG_ENDIAN) )
+ addMatchFields(oid_match_fields, barray_bytes)
+
+ verifyFields("basic.OID", oid_match_fields)
+
+----------------------------------------
+ testlib.testing(OTHER, "Basic REL_OID")
+
+ -- note: the tvb being dissected and compared isn't actually a valid OID.
+ -- tree:add() and tree:add_packet-field() don't care about its validity right now.
+
+ local rel_oid_match_fields = {}
+
+ testlib.test(OTHER, "basic-rel-oid", pcall(callTreeAdd, tree, testfield.basic.REL_OID, tvb_bytes:range()))
+ addMatchFields(rel_oid_match_fields, barray_bytes)
+
+ testlib.test(OTHER, "add_pfield-rel_oid", treeAddPField(tree, testfield.basic.REL_OID,
+ tvb_bytes:range(), ENC_BIG_ENDIAN) )
+ addMatchFields(rel_oid_match_fields, barray_bytes)
+
+ verifyFields("basic.REL_OID", rel_oid_match_fields)
+
+ -- TODO: a FT_GUID is not really a ByteArray, so we can't simply treat it as one
+ -- local barray_guid = ByteArray.new("00FF0001 80001234 567890AB CDEF00FF")
+ -- local tvb_guid = barray_guid:tvb("Basic GUID")
+ -- local guid_match_fields = {}
+
+ -- testlib.test(OTHER, "basic-guid", pcall(callTreeAdd, tree, testfield.basic.GUID, tvb_guid:range()) )
+ -- addMatchFields(guid_match_fields, barray_guid)
+
+ -- testlib.test(OTHER, "add_pfield-guid", treeAddPField(tree, testfield.basic.GUID,
+ -- tvb_guid:range(), ENC_BIG_ENDIAN) )
+ -- addMatchFields(guid_match_fields, barray_guid)
+
+ -- verifyFields("basic.GUID", guid_match_fields)
+
+----------------------------------------
+ testlib.testing(OTHER, "tree:add ipv6")
+
+ local tvb = ByteArray.new("20010db8 00000000 0000ff00 00428329"):tvb("IPv6")
+ local IPv6 = testfield.basic.IPv6
+ local ipv6_match_fields = {}
+
+ testlib.test(OTHER, "ipv6", pcall (callTreeAdd, tree, IPv6, tvb:range(0,16)))
+ addMatchFields(ipv6_match_fields, Address.ipv6('2001:0db8:0000:0000:0000:ff00:0042:8329'))
+
+ verifyFields("basic.IPv6", ipv6_match_fields)
+
+----------------------------------------
+ testlib.testing(OTHER, "tree:add ipv4")
+
+ local tvb = ByteArray.new("7f000001"):tvb("IPv4")
+ local IPv4 = testfield.basic.IPv4
+ local ipv4_match_fields = {}
+
+ testlib.test(OTHER, "ipv4", pcall (callTreeAdd, tree, IPv4, tvb:range(0,4)))
+ addMatchFields(ipv4_match_fields, Address.ip('127.0.0.1'))
+
+ -- TODO: currently, tree:add_le only works for numeric values, not IPv4
+ -- addresses. Test this in the future.
+
+ -- testlib.test(OTHER, "ipv4", pcall (callTreeAddLE, tree, IPv4, tvb:range(0,4)))
+ -- addMatchFields(ipv4_match_fields, Address.ip('1.0.0.127'))
+
+ verifyFields("basic.IPv4", ipv4_match_fields)
+
+----------------------------------------
+ testlib.testing(OTHER, "tree:add ether")
+
+ local tvb = ByteArray.new("010203040506"):tvb("Ether")
+ local tvb0 = ByteArray.new("000000000000"):tvb("Ether0")
+ local ether = testfield.basic.ETHER
+ local ether_match_fields = {}
+
+ testlib.test(OTHER, "ether", pcall (callTreeAdd, tree, ether, tvb:range(0,6)))
+ addMatchFields(ether_match_fields, Address.ether('01:02:03:04:05:06'))
+
+ testlib.test(OTHER, "ether0", pcall (callTreeAdd, tree, ether, tvb0:range(0,6)))
+ addMatchFields(ether_match_fields, Address.ether('11:22:33'))
+
+ verifyFields("basic.ETHER", ether_match_fields)
+
+----------------------------------------
+ testlib.testing(OTHER, "tree:add_packet_field Bytes")
+
+ resetResults()
+ bytes_match_fields = {}
+ local bytes_match_values = {}
+
+ -- something to make this easier to read
+ local function addMatch(...)
+ addMatchFieldValues(bytes_match_fields, bytes_match_values, ...)
+ end
+
+ local bytesstring1 = "deadbeef0123456789DEADBEEFabcdef"
+ local bytesstring = ByteArray.new(bytesstring1) -- the binary version of above, for comparing
+ local bytestvb1 = ByteArray.new(bytesstring1, true):tvb("Bytes hex-string 1")
+ local bytesstring2 = " de:ad:be:ef:01:23:45:67:89:DE:AD:BE:EF:ab:cd:ef"
+ local bytestvb2 = ByteArray.new(bytesstring2 .. "-f0-00 foobar", true):tvb("Bytes hex-string 2")
+
+ local bytestvb1_decode = bytestvb1:range():bytes(ENC_STR_HEX + ENC_SEP_NONE + ENC_SEP_COLON + ENC_SEP_DASH)
+ testlib.test(OTHER, "tvb_get_string_bytes", string.lower(tostring(bytestvb1_decode)) == string.lower(tostring(bytesstring1)))
+
+ testlib.test(OTHER, "add_pfield-bytes1", treeAddPField(tree, testfield.bytes.BYTES,
+ bytestvb1:range(),
+ ENC_STR_HEX + ENC_SEP_NONE +
+ ENC_SEP_COLON + ENC_SEP_DASH))
+ addMatch(bytesstring, string.len(bytesstring1))
+
+ testlib.test(OTHER, "add_pfield-bytes2", treeAddPField(tree, testfield.bytes.BYTES,
+ bytestvb2:range(),
+ ENC_STR_HEX + ENC_SEP_NONE +
+ ENC_SEP_COLON + ENC_SEP_DASH))
+ addMatch(bytesstring, string.len(bytesstring2))
+
+ verifyResults("add_pfield-bytes", bytes_match_values)
+ verifyFields("bytes.BYTES", bytes_match_fields)
+
+ -- extra test of ByteArray
+ local b64padded = ByteArray.new("dGVzdA==", true):base64_decode():raw()
+ local b64unpadded = ByteArray.new("dGVzdA", true):base64_decode():raw()
+ testlib.test(OTHER, "bytearray_base64_padded", b64padded == "test")
+ testlib.test(OTHER, "bytearray_base64_unpadded", b64unpadded == "test")
+
+
+----------------------------------------
+ testlib.testing(OTHER, "tree:add_packet_field OID")
+
+ resetResults()
+ bytes_match_fields = {}
+ bytes_match_values = {}
+
+ testlib.test(OTHER, "add_pfield-oid1", treeAddPField(tree, testfield.bytes.OID,
+ bytestvb1:range(),
+ ENC_STR_HEX + ENC_SEP_NONE +
+ ENC_SEP_COLON + ENC_SEP_DASH))
+ addMatch(bytesstring, string.len(bytesstring1))
+
+ testlib.test(OTHER, "add_pfield-oid2", treeAddPField(tree, testfield.bytes.OID,
+ bytestvb2:range(),
+ ENC_STR_HEX + ENC_SEP_NONE +
+ ENC_SEP_COLON + ENC_SEP_DASH))
+ addMatch(bytesstring, string.len(bytesstring2))
+
+ verifyResults("add_pfield-oid", bytes_match_values)
+ verifyFields("bytes.OID", bytes_match_fields)
+
+
+----------------------------------------
+ testlib.testing(OTHER, "tree:add_packet_field REL_OID")
+
+ resetResults()
+ bytes_match_fields = {}
+ bytes_match_values = {}
+
+ testlib.test(OTHER, "add_pfield-rel_oid1", treeAddPField(tree, testfield.bytes.REL_OID,
+ bytestvb1:range(),
+ ENC_STR_HEX + ENC_SEP_NONE +
+ ENC_SEP_COLON + ENC_SEP_DASH))
+ addMatch(bytesstring, string.len(bytesstring1))
+
+ testlib.test(OTHER, "add_pfield-rel_oid2", treeAddPField(tree, testfield.bytes.REL_OID,
+ bytestvb2:range(),
+ ENC_STR_HEX + ENC_SEP_NONE +
+ ENC_SEP_COLON + ENC_SEP_DASH))
+ addMatch(bytesstring, string.len(bytesstring2))
+
+ verifyResults("add_pfield-rel_oid", bytes_match_values)
+ verifyFields("bytes.REL_OID", bytes_match_fields)
+
+
+----------------------------------------
+ testlib.testing(OTHER, "tree:add Time")
+
+ local tvb = ByteArray.new("00000000 00000000 0000FF0F 00FF000F"):tvb("Time")
+ local ALOCAL = testfield.time.ABSOLUTE_LOCAL
+ local alocal_match_fields = {}
+
+ testlib.test(OTHER, "time-local", pcall (callTreeAdd, tree, ALOCAL, tvb:range(0,8)) )
+ addMatchFields(alocal_match_fields, NSTime())
+
+ testlib.test(OTHER, "time-local", pcall (callTreeAdd, tree, ALOCAL, tvb:range(8,8)) )
+ addMatchFields(alocal_match_fields, NSTime( 0x0000FF0F, 0x00FF000F) )
+
+ testlib.test(OTHER, "time-local-le", pcall (callTreeAddLE, tree, ALOCAL, tvb:range(0,8)) )
+ addMatchFields(alocal_match_fields, NSTime())
+
+ testlib.test(OTHER, "time-local-le", pcall (callTreeAddLE, tree, ALOCAL, tvb:range(8,8)) )
+ addMatchFields(alocal_match_fields, NSTime( 0x0FFF0000, 0x0F00FF00 ) )
+
+ verifyFields("time.ABSOLUTE_LOCAL", alocal_match_fields)
+
+ local AUTC = testfield.time.ABSOLUTE_UTC
+ local autc_match_fields = {}
+
+ testlib.test(OTHER, "time-utc", pcall (callTreeAdd, tree, AUTC, tvb:range(0,8)) )
+ addMatchFields(autc_match_fields, NSTime())
+
+ testlib.test(OTHER, "time-utc", pcall (callTreeAdd, tree, AUTC, tvb:range(8,8)) )
+ addMatchFields(autc_match_fields, NSTime( 0x0000FF0F, 0x00FF000F) )
+
+ testlib.test(OTHER, "time-utc-le", pcall (callTreeAddLE, tree, AUTC, tvb:range(0,8)) )
+ addMatchFields(autc_match_fields, NSTime())
+
+ testlib.test(OTHER, "time-utc-le", pcall (callTreeAddLE, tree, AUTC, tvb:range(8,8)) )
+ addMatchFields(autc_match_fields, NSTime( 0x0FFF0000, 0x0F00FF00 ) )
+
+ verifyFields("time.ABSOLUTE_UTC", autc_match_fields )
+
+----------------------------------------
+ testlib.testing(OTHER, "tree:add_packet_field Time bytes")
+
+ resetResults()
+ local autc_match_values = {}
+
+ -- something to make this easier to read
+ addMatch = function(...)
+ addMatchFieldValues(autc_match_fields, autc_match_values, ...)
+ end
+
+ -- tree:add_packet_field(ALOCAL, tvb:range(0,8), ENC_BIG_ENDIAN)
+ testlib.test(OTHER, "add_pfield-time-bytes-local", treeAddPField ( tree, AUTC, tvb:range(0,8), ENC_BIG_ENDIAN) )
+ addMatch( NSTime(), 8)
+
+ testlib.test(OTHER, "add_pfield-time-bytes-local", treeAddPField ( tree, AUTC, tvb:range(8,8), ENC_BIG_ENDIAN) )
+ addMatch( NSTime( 0x0000FF0F, 0x00FF000F), 16)
+
+ testlib.test(OTHER, "add_pfield-time-bytes-local-le", treeAddPField ( tree, AUTC, tvb:range(0,8), ENC_LITTLE_ENDIAN) )
+ addMatch( NSTime(), 8)
+
+ testlib.test(OTHER, "add_pfield-time-bytes-local-le", treeAddPField ( tree, AUTC, tvb:range(8,8), ENC_LITTLE_ENDIAN) )
+ addMatch( NSTime( 0x0FFF0000, 0x0F00FF00 ), 16)
+
+ verifyFields("time.ABSOLUTE_UTC", autc_match_fields)
+
+ verifyResults("add_pfield-time-bytes-local", autc_match_values)
+
+----------------------------------------
+ testlib.testing(OTHER, "tree:add_packet_field Time string ENC_ISO_8601_DATE_TIME")
+
+ resetResults()
+ autc_match_values = {}
+
+ local datetimestring1 = "2013-03-01T22:14:48+00:00" -- this is 1362176088 seconds epoch time
+ local tvb1 = ByteArray.new(datetimestring1, true):tvb("Date_Time string 1")
+ local datetimestring2 = " 2013-03-02T03:14:48+05:00" -- this is 1362176088 seconds epoch time
+ local tvb2 = ByteArray.new(datetimestring2 .. " foobar", true):tvb("Date_Time string 2")
+ local datetimestring3 = " 2013-03-01T16:44-05:30" -- this is 1362176040 seconds epoch time
+ local tvb3 = ByteArray.new(datetimestring3, true):tvb("Date_Time string 3")
+ local datetimestring4 = "2013-03-02T01:44:00+03:30" -- this is 1362176040 seconds epoch time
+ local tvb4 = ByteArray.new(datetimestring4, true):tvb("Date_Time string 4")
+ local datetimestring5 = "2013-03-01T22:14:48Z" -- this is 1362176088 seconds epoch time
+ local tvb5 = ByteArray.new(datetimestring5, true):tvb("Date_Time string 5")
+ local datetimestring6 = "2013-03-01T22:14Z" -- this is 1362176040 seconds epoch time
+ local tvb6 = ByteArray.new(datetimestring6, true):tvb("Date_Time string 6")
+
+ testlib.test(OTHER, "add_pfield-datetime-local", treeAddPField ( tree, AUTC, tvb1:range(), ENC_ISO_8601_DATE_TIME) )
+ addMatch( NSTime( 1362176088, 0), string.len(datetimestring1))
+
+ testlib.test(OTHER, "add_pfield-datetime-local", treeAddPField ( tree, AUTC, tvb2:range(), ENC_ISO_8601_DATE_TIME) )
+ addMatch( NSTime( 1362176088, 0), string.len(datetimestring2))
+
+ testlib.test(OTHER, "add_pfield-datetime-local", treeAddPField ( tree, AUTC, tvb3:range(), ENC_ISO_8601_DATE_TIME) )
+ addMatch( NSTime( 1362176040, 0), string.len(datetimestring3))
+
+ testlib.test(OTHER, "add_pfield-datetime-local", treeAddPField ( tree, AUTC, tvb4:range(), ENC_ISO_8601_DATE_TIME) )
+ addMatch( NSTime( 1362176040, 0), string.len(datetimestring4))
+
+ testlib.test(OTHER, "add_pfield-datetime-local", treeAddPField ( tree, AUTC, tvb5:range(), ENC_ISO_8601_DATE_TIME) )
+ addMatch( NSTime( 1362176088, 0), string.len(datetimestring5))
+
+ testlib.test(OTHER, "add_pfield-datetime-local", treeAddPField ( tree, AUTC, tvb6:range(), ENC_ISO_8601_DATE_TIME) )
+ addMatch( NSTime( 1362176040, 0), string.len(datetimestring6))
+
+ verifyFields("time.ABSOLUTE_UTC", autc_match_fields)
+
+ verifyResults("add_pfield-datetime-local", autc_match_values)
+
+----------------------------------------
+ testlib.testing(OTHER, "tree:add_packet_field Time string ENC_ISO_8601_DATE")
+
+ resetResults()
+ autc_match_values = {}
+
+ local datestring1 = "2013-03-01" -- this is 1362096000 seconds epoch time
+ local d_tvb1 = ByteArray.new(datestring1, true):tvb("Date string 1")
+ local datestring2 = " 2013-03-01" -- this is 1362096000 seconds epoch time
+ local d_tvb2 = ByteArray.new(datestring2 .. " foobar", true):tvb("Date string 2")
+
+ testlib.test(OTHER, "add_pfield-date-local", treeAddPField ( tree, AUTC, d_tvb1:range(), ENC_ISO_8601_DATE) )
+ addMatch( NSTime( 1362096000, 0), string.len(datestring1))
+
+ testlib.test(OTHER, "add_pfield-date-local", treeAddPField ( tree, AUTC, d_tvb2:range(), ENC_ISO_8601_DATE) )
+ addMatch( NSTime( 1362096000, 0), string.len(datestring2))
+
+ verifyFields("time.ABSOLUTE_UTC", autc_match_fields)
+
+ verifyResults("add_pfield-date-local", autc_match_values)
+
+----------------------------------------
+ testlib.testing(OTHER, "tree:add_packet_field Time string ENC_ISO_8601_TIME")
+
+ resetResults()
+ autc_match_values = {}
+
+ local timestring1 = "22:14:48" -- this is 80088 seconds
+ local t_tvb1 = ByteArray.new(timestring1, true):tvb("Time string 1")
+ local timestring2 = " 22:14:48" -- this is 80088 seconds
+ local t_tvb2 = ByteArray.new(timestring2 .. " foobar", true):tvb("Time string 2")
+
+ local now = os.date("!*t")
+ now.hour = 22
+ now.min = 14
+ now.sec = 48
+ local timebase = os.time( now )
+ timebase = timebase + timezone
+ print ("timebase = " .. tostring(timebase) .. ", timezone=" .. timezone)
+
+ testlib.test(OTHER, "add_pfield-time-local", treeAddPField ( tree, AUTC, t_tvb1:range(), ENC_ISO_8601_TIME) )
+ addMatch( NSTime( timebase, 0), string.len(timestring1))
+
+ testlib.test(OTHER, "add_pfield-time-local", treeAddPField ( tree, AUTC, t_tvb2:range(), ENC_ISO_8601_TIME) )
+ addMatch( NSTime( timebase, 0), string.len(timestring2))
+
+ verifyFields("time.ABSOLUTE_UTC", autc_match_fields)
+
+ verifyResults("add_pfield-time-local", autc_match_values)
+
+----------------------------------------
+ testlib.testing(OTHER, "tree:add_packet_field Time string ENC_IMF_DATE_TIME")
+
+ resetResults()
+ autc_match_values = {}
+
+ local imfstring1 = "Fri, 01 Mar 13 22:14:48 GMT" -- this is 1362176088 seconds epoch time
+ local imf_tvb1 = ByteArray.new(imfstring1, true):tvb("Internet Message Format Time string 1")
+ local imfstring2 = " Fri, 01 Mar 13 22:14:48 GMT" -- this is 1362176088 seconds epoch time
+ local imf_tvb2 = ByteArray.new(imfstring2 .. " foobar", true):tvb("Internet Message Format Time string 2")
+ local imfstring3 = "Fri, 01 Mar 2013 22:14:48 GMT" -- this is 1362176088 seconds epoch time
+ local imf_tvb3 = ByteArray.new(imfstring3, true):tvb("Internet Message Format Time string 3")
+ local imfstring4 = " Fri, 01 Mar 2013 22:14:48 GMT" -- this is 1362176088 seconds epoch time
+ local imf_tvb4 = ByteArray.new(imfstring4 .. " foobar", true):tvb("Internet Message Format Time string 4")
+
+ testlib.test(OTHER, "add_pfield-time-local", treeAddPField ( tree, AUTC, imf_tvb1:range(), ENC_IMF_DATE_TIME) )
+ addMatch( NSTime( 1362176088, 0), string.len(imfstring1))
+
+ testlib.test(OTHER, "add_pfield-time-local", treeAddPField ( tree, AUTC, imf_tvb2:range(), ENC_IMF_DATE_TIME) )
+ addMatch( NSTime( 1362176088, 0), string.len(imfstring2))
+
+ testlib.test(OTHER, "add_pfield-time-local", treeAddPField ( tree, AUTC, imf_tvb3:range(), ENC_IMF_DATE_TIME) )
+ addMatch( NSTime( 1362176088, 0), string.len(imfstring3))
+
+ testlib.test(OTHER, "add_pfield-time-local", treeAddPField ( tree, AUTC, imf_tvb4:range(), ENC_IMF_DATE_TIME) )
+ addMatch( NSTime( 1362176088, 0), string.len(imfstring4))
+
+ verifyFields("time.ABSOLUTE_UTC", autc_match_fields)
+
+ verifyResults("add_pfield-imf-date-time-local", autc_match_values)
+
+----------------------------------------
+ testlib.testing(OTHER, "tree:add_packet_field Time string ENC_ISO_8601_DATE_TIME_BASIC")
+
+ resetResults()
+ autc_match_values = {}
+
+ local datetimestring1 = "20130301T221448+0000" -- this is 1362176088 seconds epoch time
+ local tvb1 = ByteArray.new(datetimestring1, true):tvb("Date_Time string 1")
+ local datetimestring2 = " 20130301171448-0500" -- this is 1362176088 seconds epoch time
+ local tvb2 = ByteArray.new(datetimestring2 .. " foobar", true):tvb("Date_Time string 2")
+ local datetimestring3 = " 20130301T1644-0530" -- this is 1362176040 seconds epoch time
+ local tvb3 = ByteArray.new(datetimestring3, true):tvb("Date_Time string 3")
+ local datetimestring4 = "20130302 014400+0330" -- this is 1362176040 seconds epoch time
+ local tvb4 = ByteArray.new(datetimestring4, true):tvb("Date_Time string 4")
+ local datetimestring5 = "20130301T221448Z" -- this is 1362176088 seconds epoch time
+ local tvb5 = ByteArray.new(datetimestring5, true):tvb("Date_Time string 5")
+ local datetimestring6 = "201303012214Z" -- this is 1362176040 seconds epoch time
+ local tvb6 = ByteArray.new(datetimestring6, true):tvb("Date_Time string 6")
+
+ testlib.test(OTHER, "add_pfield-datetime-local", treeAddPField ( tree, AUTC, tvb1:range(), ENC_ISO_8601_DATE_TIME_BASIC) )
+ addMatch( NSTime( 1362176088, 0), string.len(datetimestring1))
+
+ testlib.test(OTHER, "add_pfield-datetime-local", treeAddPField ( tree, AUTC, tvb2:range(), ENC_ISO_8601_DATE_TIME_BASIC) )
+ addMatch( NSTime( 1362176088, 0), string.len(datetimestring2))
+
+ testlib.test(OTHER, "add_pfield-datetime-local", treeAddPField ( tree, AUTC, tvb3:range(), ENC_ISO_8601_DATE_TIME_BASIC) )
+ addMatch( NSTime( 1362176040, 0), string.len(datetimestring3))
+
+ testlib.test(OTHER, "add_pfield-datetime-local", treeAddPField ( tree, AUTC, tvb4:range(), ENC_ISO_8601_DATE_TIME_BASIC) )
+ addMatch( NSTime( 1362176040, 0), string.len(datetimestring4))
+
+ testlib.test(OTHER, "add_pfield-datetime-local", treeAddPField ( tree, AUTC, tvb5:range(), ENC_ISO_8601_DATE_TIME_BASIC) )
+ addMatch( NSTime( 1362176088, 0), string.len(datetimestring5))
+
+ testlib.test(OTHER, "add_pfield-datetime-local", treeAddPField ( tree, AUTC, tvb6:range(), ENC_ISO_8601_DATE_TIME_BASIC) )
+ addMatch( NSTime( 1362176040, 0), string.len(datetimestring6))
+
+ verifyFields("time.ABSOLUTE_UTC", autc_match_fields)
+
+ verifyResults("add_pfield-datetime-local", autc_match_values)
+
+----------------------------------------
+ testlib.testing(OTHER, "TvbRange subsets")
+
+ resetResults()
+
+ local offset = 5
+ local len = 10
+ local b_offset = 3
+ local b_len = 2
+ local range
+ local range_raw
+ local expected
+
+ -- This is the same data from the "tree:add_packet_field Bytes" test
+ -- copied here for clarity
+ local bytesstring1 = "deadbeef0123456789DEADBEEFabcdef"
+ local bytestvb1 = ByteArray.new(bytesstring1, true):tvb("Bytes hex-string 1")
+
+ -- tvbrange with no offset or length (control test case)
+ range = bytestvb1()
+ range_raw = range:raw()
+ expected = range:bytes():raw()
+ testlib.test(OTHER, "tvbrange_raw", range_raw == expected,
+ string.format('range_raw="%s" expected="%s"', range_raw, expected))
+ range_raw = range:raw(b_offset)
+ expected = range:bytes():raw(b_offset)
+ testlib.test(OTHER, "tvbrange_raw_offset", range_raw == expected,
+ string.format('range_raw="%s" expected="%s"', range_raw, expected))
+ range_raw = range:raw(0, b_len)
+ expected = range:bytes():raw(0, b_len)
+ testlib.test(OTHER, "tvbrange_raw_len", range_raw == expected,
+ string.format('range_raw="%s" expected="%s"', range_raw, expected))
+ range_raw = range:raw(b_offset, b_len)
+ expected = range:bytes():raw(b_offset, b_len)
+ testlib.test(OTHER, "tvbrange_raw_offset_len", range_raw == expected,
+ string.format('range_raw="%s" expected="%s"', range_raw, expected))
+
+ -- tvbrange with len only
+ range = bytestvb1(0, len)
+ range_raw = range:raw()
+ expected = range:bytes():raw()
+ testlib.test(OTHER, "tvbrange_len_raw", range_raw == expected,
+ string.format('range_raw="%s" expected="%s"', range_raw, expected))
+ range_raw = range:raw(b_offset)
+ expected = range:bytes():raw(b_offset)
+ testlib.test(OTHER, "tvbrange_len_raw_offset", range_raw == expected,
+ string.format('range_raw="%s" expected="%s"', range_raw, expected))
+ range_raw = range:raw(0, b_len)
+ expected = range:bytes():raw(0, b_len)
+ testlib.test(OTHER, "tvbrange_len_raw_len", range_raw == expected,
+ string.format('range_raw="%s" expected="%s"', range_raw, expected))
+ range_raw = range:raw(b_offset, b_len)
+ expected = range:bytes():raw(b_offset, b_len)
+ testlib.test(OTHER, "tvbrange_len_raw_offset_len", range_raw == expected,
+ string.format('range_raw="%s" expected="%s"', range_raw, expected))
+
+ -- tvbrange with offset only
+ range = bytestvb1(offset)
+ range_raw = range:raw()
+ expected = range:bytes():raw()
+ testlib.test(OTHER, "tvbrange_offset_raw", range_raw == expected,
+ string.format('range_raw="%s" expected="%s"', range_raw, expected))
+ range_raw = range:raw(b_offset)
+ expected = range:bytes():raw(b_offset)
+ testlib.test(OTHER, "tvbrange_offset_raw_offset", range_raw == expected,
+ string.format('range_raw="%s" expected="%s"', range_raw, expected))
+ range_raw = range:raw(0, b_len)
+ expected = range:bytes():raw(0, b_len)
+ testlib.test(OTHER, "tvbrange_offset_raw_len", range_raw == expected,
+ string.format('range_raw="%s" expected="%s"', range_raw, expected))
+ range_raw = range:raw(b_offset, b_len)
+ expected = range:bytes():raw(b_offset, b_len)
+ testlib.test(OTHER, "tvbrange_offset_raw_offset_len", range_raw == expected,
+ string.format('range_raw="%s" expected="%s"', range_raw, expected))
+
+ -- tvbrange with offset and len
+ range = bytestvb1(offset, len)
+ range_raw = range:raw()
+ expected = range:bytes():raw()
+ testlib.test(OTHER, "tvbrange_offset_len_raw", range_raw == expected,
+ string.format('range_raw="%s" expected="%s"', range_raw, expected))
+ range_raw = range:raw(b_offset)
+ expected = range:bytes():raw(b_offset)
+ testlib.test(OTHER, "tvbrange_offset_len_raw_offset", range_raw == expected,
+ string.format('range_raw="%s" expected="%s"', range_raw, expected))
+ range_raw = range:raw(0, b_len)
+ expected = range:bytes():raw(0, b_len)
+ testlib.test(OTHER, "tvbrange_offset_len_raw_len", range_raw == expected,
+ string.format('range_raw="%s" expected="%s"', range_raw, expected))
+ range_raw = range:raw(b_offset, b_len)
+ expected = range:bytes():raw(b_offset, b_len)
+ testlib.test(OTHER, "tvbrange_offset_len_raw_offset_len", range_raw == expected,
+ string.format('range_raw="%s" expected="%s"', range_raw, expected))
+
+----------------------------------------
+
+ testlib.pass(FRAME)
+end
+
+----------------------------------------
+-- we want to have our protocol dissection invoked for a specific UDP port,
+-- so get the udp dissector table and add our protocol to it
+DissectorTable.get("udp.port"):add(65333, test_proto)
+DissectorTable.get("udp.port"):add(65346, test_proto)
+
+print("test_proto dissector registered")
diff --git a/test/lua/unicode.lua b/test/lua/unicode.lua
new file mode 100644
index 0000000..3510e57
--- /dev/null
+++ b/test/lua/unicode.lua
@@ -0,0 +1,55 @@
+--
+-- Unicode tests
+--
+
+local errors = 0
+
+function assertEqual(what, a, b)
+ if a == b then
+ return true
+ end
+ print('ERROR:', what)
+ print('Expected:', tostring(a))
+ print(' Actual:', tostring(b))
+ errors = errors + 1
+end
+
+-- script name check
+local scriptname = (debug.getinfo(1, 'S').source or ''):gsub("^@.*[/\\]", "")
+assertEqual('script name', 'script-Ф-€-中.lua', scriptname)
+
+-- loadfile
+local code, err = loadfile('load-Ф-€-中.lua')
+assertEqual('loadfile', nil, err)
+assertEqual('loadfile contents', 'Contents of Ф-€-中', code and code())
+
+-- dofile
+local ok, result = pcall(dofile, 'load-Ф-€-中.lua')
+assertEqual('dofile pcall', true, ok)
+assertEqual('dofile contents', 'Contents of Ф-€-中', result)
+
+-- io.open (read)
+local fr, err = io.open('load-Ф-€-中.lua')
+assertEqual('io.open (read)', nil, err)
+assertEqual('io.read', 'return "Contents of Ф-€-中"\n', fr and fr:read('*a'))
+if fr then fr:close() end
+
+-- io.open (write)
+local fw, err = io.open('written-by-lua-Ф-€-中.txt', 'w')
+assertEqual('io.open (write)', nil, err)
+if fw then
+ local _, err = fw:write('Feedback from Lua: Ф-€-中\n')
+ assertEqual('io.write', nil, err)
+end
+if fw then fw:close() end
+
+-- Check for Unicode in personal plugins directory path.
+local pdir_expected = 'unicode-Ф-€-中-testcases'
+local pdir = Dir.personal_plugins_path()
+pdir = pdir:gsub('.*[/\\]unicode-.*-.*-testcases[/\\].*', pdir_expected)
+assertEqual('Unicode in Dir.personal_plugins_path', pdir_expected, pdir)
+
+if errors ~= 0 then
+ error('Failed tests: ' .. errors)
+end
+print("All tests passed!")
diff --git a/test/lua/util.lua b/test/lua/util.lua
new file mode 100644
index 0000000..0578a2a
--- /dev/null
+++ b/test/lua/util.lua
@@ -0,0 +1,118 @@
+-- test script for wslua utility functions
+
+local testlib = require("testlib")
+
+local GET_PREF = "get"
+local SET_PREF = "set"
+local RESET_PREF = "reset"
+local OTHER = "other"
+testlib.init( {
+ [GET_PREF] = 14,
+ [SET_PREF] = 37,
+ [RESET_PREF] = 11,
+ [OTHER] = 0
+} )
+
+local console_open
+
+--------------------------
+
+-- Note: This tests expects some specific default values
+testlib.testing("get_preference")
+
+success = pcall(get_preference)
+testlib.test(GET_PREF,"get_preference-empty-0", not success)
+testlib.test(GET_PREF,"get_preference-empty-1",get_preference("") == nil)
+testlib.test(GET_PREF,"get_preference-unknown-0",get_preference("g") == nil)
+testlib.test(GET_PREF,"get_preference-unknown-1",get_preference("gui") == nil)
+testlib.test(GET_PREF,"get_preference-unknown-2",get_preference("gui.") == nil)
+testlib.test(GET_PREF,"get_preference-unknown-3",get_preference("gui.ask") == nil)
+testlib.test(GET_PREF,"get_preference-unknown-4",get_preference("ugi.ask_unsaved") == nil)
+testlib.test(GET_PREF,"get_preference-uint-0",get_preference("gui.fileopen.preview") == 3)
+testlib.test(GET_PREF,"get_preference-bool-0",get_preference("gui.ask_unsaved") == true)
+testlib.test(GET_PREF,"get_preference-bool-1",get_preference("gui.interfaces_show_hidden") == false)
+-- gui.console_open is persistent (in the Windows registry) and for that
+-- reason does not have a default value.
+console_open = get_preference("gui.console_open")
+testlib.test(GET_PREF,"get_preference-enum-0",console_open == "NEVER" or console_open == "AUTOMATIC" or console_open == "ALWAYS")
+testlib.test(GET_PREF,"get_preference-string-0",get_preference("gui.window_title") == "")
+testlib.test(GET_PREF,"get_preference-range-0",get_preference("http.tls.port") == "443")
+success = pcall(get_preference, "user_dlt.encaps_table")
+testlib.test(GET_PREF,"get_preference-uat-0", not success)
+
+--------------------------
+
+testlib.testing("set_preference")
+
+success = pcall(set_preference)
+testlib.test(SET_PREF,"set_preference-empty-0", not success)
+testlib.test(SET_PREF,"set_preference-empty-1",set_preference("") == nil)
+testlib.test(SET_PREF,"set_preference-unknown-0",set_preference("g") == nil)
+testlib.test(SET_PREF,"set_preference-unknown-1",set_preference("gui") == nil)
+testlib.test(SET_PREF,"set_preference-unknown-2",set_preference("gui.") == nil)
+testlib.test(SET_PREF,"set_preference-unknown-3",set_preference("gui.ask") == nil)
+testlib.test(SET_PREF,"set_preference-unknown-4",set_preference("ugi.ask_unsaved") == nil)
+success = pcall(set_preference,"gui.fileopen.preview")
+testlib.test(SET_PREF,"set_preference-uint-0", not success)
+success = pcall(set_preference,"gui.fileopen.preview",true)
+testlib.test(SET_PREF,"set_preference-uint-1", not success)
+success = pcall(set_preference,"gui.fileopen.preview","string")
+testlib.test(SET_PREF,"set_preference-uint-2", not success)
+testlib.test(SET_PREF,"set_preference-uint-3",set_preference("gui.fileopen.preview",3) == false)
+testlib.test(SET_PREF,"set_preference-uint-4",set_preference("gui.fileopen.preview",42) == true)
+testlib.test(SET_PREF,"set_preference-uint-4-get",get_preference("gui.fileopen.preview") == 42)
+success = pcall(set_preference,"gui.ask_unsaved")
+testlib.test(SET_PREF,"set_preference-bool-0", not success)
+success = pcall(set_preference,"gui.ask_unsaved",42)
+testlib.test(SET_PREF,"set_preference-bool-1", not success)
+success = pcall(set_preference,"gui.ask_unsaved","string")
+testlib.test(SET_PREF,"set_preference-bool-2", not success)
+testlib.test(SET_PREF,"set_preference-bool-3",set_preference("gui.ask_unsaved", true) == false)
+testlib.test(SET_PREF,"set_preference-bool-4",set_preference("gui.ask_unsaved", false) == true)
+success = pcall(set_preference,"gui.console_open")
+testlib.test(SET_PREF,"set_preference-enum-0", not success)
+success = pcall(set_preference,"gui.console_open",true)
+testlib.test(SET_PREF,"set_preference-enum-1", not success)
+-- false means unchanged
+testlib.test(SET_PREF,"set_preference-enum-2",set_preference("gui.console_open",console_open) == false)
+success = pcall(set_preference,"gui.window_title")
+testlib.test(SET_PREF,"set_preference-string-0", not success)
+success = pcall(set_preference,"gui.window_title",true)
+testlib.test(SET_PREF,"set_preference-string-1", not success)
+testlib.test(SET_PREF,"set_preference-string-2",set_preference("gui.window_title","Title") == true)
+testlib.test(SET_PREF,"set_preference-string-2-get",get_preference("gui.window_title") == "Title")
+testlib.test(SET_PREF,"set_preference-string-3",set_preference("gui.window_title","Title") == false)
+testlib.test(SET_PREF,"set_preference-string-4",set_preference("gui.window_title","") == true)
+testlib.test(SET_PREF,"set_preference-string-4-get",get_preference("gui.window_title") == "")
+testlib.test(SET_PREF,"set_preference-string-5",set_preference("gui.window_title","") == false)
+success = pcall(set_preference,"http.tls.port")
+testlib.test(SET_PREF,"set_preference-range-0", not success)
+success = pcall(set_preference,"http.tls.port","65536") -- Number too big
+testlib.test(SET_PREF,"set_preference-range-1", not success)
+success = pcall(set_preference,"http.tls.port","http") -- Syntax error
+testlib.test(SET_PREF,"set_preference-range-2", not success)
+testlib.test(SET_PREF,"set_preference-range-3",set_preference("http.tls.port","443") == false)
+testlib.test(SET_PREF,"set_preference-range-4",set_preference("http.tls.port","443-444") == true)
+testlib.test(SET_PREF,"set_preference-range-4-get",get_preference("http.tls.port") == "443-444")
+testlib.test(SET_PREF,"set_preference-range-5",set_preference("http.tls.port","443-444") == false)
+success = pcall(set_preference, "user_dlt.encaps_table")
+testlib.test(SET_PREF,"set_preference-uat-0", not success)
+
+--------------------------
+
+testlib.testing("reset_preference")
+
+success = pcall(set_preference)
+testlib.test(RESET_PREF,"reset_preference-empty-0", not success)
+testlib.test(RESET_PREF,"reset_preference-empty-1",reset_preference("") == nil)
+testlib.test(RESET_PREF,"reset_preference-unknown-0",reset_preference("unknown") == nil)
+testlib.test(RESET_PREF,"reset_preference-uint-0",reset_preference("gui.fileopen.preview") == true)
+testlib.test(RESET_PREF,"reset_preference-uint-0-get",get_preference("gui.fileopen.preview") == 3)
+testlib.test(RESET_PREF,"reset_preference-bool-0",reset_preference("gui.ask_unsaved") == true)
+testlib.test(RESET_PREF,"reset_preference-bool-0-get",get_preference("gui.ask_unsaved") == true)
+testlib.test(RESET_PREF,"reset_preference-string-0",reset_preference("gui.window_title") == true)
+testlib.test(RESET_PREF,"reset_preference-string-0-get",get_preference("gui.window_title") == "")
+testlib.test(RESET_PREF,"reset_preference-range-0",reset_preference("http.tls.port") == true)
+testlib.test(RESET_PREF,"reset_preference-range-0-get",get_preference("http.tls.port") == "443")
+
+testlib.getResults()
diff --git a/test/lua/verify_dissector.lua b/test/lua/verify_dissector.lua
new file mode 100644
index 0000000..b391f85
--- /dev/null
+++ b/test/lua/verify_dissector.lua
@@ -0,0 +1,380 @@
+-- This is a test script for tshark.
+-- This script runs inside tshark.
+-- FIRST run tshark with the "dns_dissector.lua" plugin, with the dns_port.pcap file,
+-- and with full tree output (-V switch). Pipe that to a file named testin.txt.
+-- This verify script then reads in that testin.txt. The filename can be specified
+-- using the "verify_file" argument.
+--
+-- tshark -r bogus.cap -X lua_script:<path_to_testdir>/lua/verify_dns_dissector.lua
+
+local function testing(...)
+ print("---- Testing "..tostring(...).." ----")
+end
+
+local lines = {
+ {
+ "MyDNS Protocol",
+ "Transaction ID: 42",
+ "Flags: 0x0100",
+ "0... .... .... .... = Response: this is a query",
+ "[Expert Info (Chat/Request): DNS query message]",
+ "[DNS query message]",
+ "[Severity level: Chat]",
+ "[Group: Request]",
+ ".000 0... .... .... = Opcode: 0",
+ ".... ..0. .... .... = Truncated: False",
+ ".... ...1 .... .... = Recursion desired: yes",
+ ".... .... .0.. .... = World War Z - Reserved for future use: 0x0",
+ ".... .... ...0 .... = Checking disabled: False",
+ "Number of Questions: 1",
+ "Number of Answer RRs: 0",
+ "Number of Authority RRs: 0",
+ "Number of Additional RRs: 0",
+ "Queries",
+ "us.pool.ntp.org: type A (IPv4 host address) (1), class IN (Internet) (1)",
+ "Name: us.pool.ntp.org",
+ "[Name Length: 17]",
+ "[Label Count: 4]",
+ "Type: A (IPv4 host address) (1)",
+ "Class: IN (Internet) (1)",
+ },
+
+ {
+ "MyDNS Protocol",
+ "Transaction ID: 42",
+ "Flags: 0x8180",
+ "1... .... .... .... = Response: this is a response",
+ "[Expert Info (Chat/Response): It's a response!]",
+ "[It's a response!]",
+ "[Severity level: Chat]",
+ "[Group: Response]",
+ ".000 0... .... .... = Opcode: 0",
+ ".... .0.. .... .... = Authoritative: False",
+ ".... ..0. .... .... = Truncated: False",
+ ".... .... 1... .... = Recursion available: True",
+ ".... .... .0.. .... = World War Z - Reserved for future use: 0x0",
+ ".... .... ..0. .... = Authenticated: no",
+ ".... .... .... 0000 = Response code: No Error (0)",
+ ".... .... ...0 .... = Checking disabled: False",
+ "DNS answer to life, the universe, and everything",
+ "[Expert Info (Note/Comment): DNS answer to life, the universe, and everything]",
+ "[DNS answer to life, the universe, and everything]",
+ "[Severity level: Note]",
+ "[Group: Comment]",
+ "Number of Questions: 1",
+ "Number of Answer RRs: 15",
+ "Number of Authority RRs: 6",
+ "Number of Additional RRs: 2",
+ "Queries",
+ "us.pool.ntp.org: type A (IPv4 host address) (1), class IN (Internet) (1)",
+ "Name: us.pool.ntp.org",
+ "[Name Length: 17]",
+ "[Label Count: 4]",
+ "Type: A (IPv4 host address) (1)",
+ "Class: IN (Internet) (1)",
+ },
+
+ {
+ "MyDNS Protocol",
+ "Transaction ID: 43",
+ "Flags: 0x0100",
+ "0... .... .... .... = Response: this is a query",
+ "[Expert Info (Chat/Request): DNS query message]",
+ "[DNS query message]",
+ "[Severity level: Chat]",
+ "[Group: Request]",
+ ".000 0... .... .... = Opcode: 0",
+ ".... ..0. .... .... = Truncated: False",
+ ".... ...1 .... .... = Recursion desired: yes",
+ ".... .... .0.. .... = World War Z - Reserved for future use: 0x0",
+ ".... .... ...0 .... = Checking disabled: False",
+ "Number of Questions: 1",
+ "Number of Answer RRs: 0",
+ "Number of Authority RRs: 0",
+ "Number of Additional RRs: 0",
+ "Queries",
+ "us.pool.ntp.org: type A (IPv4 host address) (1), class IN (Internet) (1)",
+ "Name: us.pool.ntp.org",
+ "[Name Length: 17]",
+ "[Label Count: 4]",
+ "Type: A (IPv4 host address) (1)",
+ "Class: IN (Internet) (1)",
+ },
+
+ {
+ "MyDNS Protocol",
+ "Transaction ID: 43",
+ "Flags: 0x8180",
+ "1... .... .... .... = Response: this is a response",
+ "[Expert Info (Chat/Response): It's a response!]",
+ "[It's a response!]",
+ "[Severity level: Chat]",
+ "[Group: Response]",
+ ".000 0... .... .... = Opcode: 0",
+ ".... .0.. .... .... = Authoritative: False",
+ ".... ..0. .... .... = Truncated: False",
+ ".... .... 1... .... = Recursion available: True",
+ ".... .... .0.. .... = World War Z - Reserved for future use: 0x0",
+ ".... .... ..0. .... = Authenticated: no",
+ ".... .... .... 0000 = Response code: No Error (0)",
+ ".... .... ...0 .... = Checking disabled: False",
+ "Number of Questions: 1",
+ "Number of Answer RRs: 15",
+ "Number of Authority RRs: 6",
+ "Number of Additional RRs: 2",
+ "Queries",
+ "us.pool.ntp.org: type A (IPv4 host address) (1), class IN (Internet) (1)",
+ "Name: us.pool.ntp.org",
+ "[Name Length: 17]",
+ "[Label Count: 4]",
+ "Type: A (IPv4 host address) (1)",
+ "Class: IN (Internet) (1)",
+ },
+}
+
+-- we're going to see those two sets of output twice: both by the normal
+-- dissector, then the first one by the heuristic, then the second one by
+-- a conversation match
+local numtests = 1 + #lines[1] + #lines[2] + #lines[3] + #lines[4]
+
+local hasHeuristic = true
+
+local verify_file = "testin.txt"
+
+-- grab passed-in arguments
+local args = { ... }
+if #args > 0 then
+ for _, arg in ipairs(args) do
+ local name, value = arg:match("(.+)=(.+)")
+ if arg == "no_heur" then
+ numtests = numtests - 1
+ elseif name == "verify_file" and value then
+ verify_file = value
+ end
+ end
+end
+
+print("going to run "..numtests.." tests")
+
+-- for an example of what we're reading through to verify, look at end of this file
+print("opening file "..verify_file)
+local file = io.open(verify_file, "r")
+local line = file:read()
+
+local pktidx = 1
+local total = 0
+local found = false
+
+while line do
+ -- eat beginning whitespace
+ line = line:gsub("^%s+","",1)
+ if line:find("^Frame %d+:") then
+ pktidx = line:match("^Frame (%d+):")
+ testing("Frame "..pktidx)
+ pktidx = tonumber(pktidx)
+ if pktidx > 4 then pktidx = pktidx - 4 end
+ line = file:read()
+ elseif line:find("%[Heuristic dissector used%]") then
+ -- start again, because it now repeats
+ -- but we should not see this [Heuristic dissector used] line again
+ -- or it's an error in setting the conversation
+ if found then
+ error("Heuristic dissector ran twice - conversation setting not working?")
+ return
+ end
+ found = true
+ total = total + 1
+ line = file:read()
+ elseif line == lines[pktidx][1] then
+ -- we've matched the first line of our section
+ -- now verify the rest is sequential
+ for i, v in ipairs(lines[pktidx]) do
+ io.stdout:write("testing Frame "..pktidx..", line "..i.."...")
+ if not line then
+ -- ended too soon
+ io.stdout:write("failed!\n")
+ error("Ran out of file lines!")
+ return
+ end
+ -- eat beginning whitespace
+ line = line:gsub("^%s+","",1)
+ if line ~= v then
+ io.stdout:write("failed!\n")
+ print("Got this:'"..line.."', expected this:'"..v.."'")
+ error("mismatched lines!")
+ return
+ end
+ io.stdout:write("passed\n")
+ total = total + 1
+ line = file:read()
+ end
+ else
+ line = file:read()
+ end
+end
+
+print(total.." of "..numtests.." tests run and passed")
+
+if total ~= numtests then
+ error("Did not find all our lines to test!")
+ return
+end
+
+print("\n-----------------------------\n")
+-- must print out the following for success (the test shell sciprt looks for this)
+print("All tests passed!\n\n")
+
+
+----------------------------------------------------------
+-- We should see something like this:
+--[[
+Frame 1: 75 bytes on wire (600 bits), 75 bytes captured (600 bits)
+ Encapsulation type: Ethernet (1)
+ Arrival Time: Sep 26, 2004 23:18:04.938672000 EDT
+ [Time shift for this packet: 0.000000000 seconds]
+ Epoch Time: 1096255084.938672000 seconds
+ [Time delta from previous captured frame: 0.000000000 seconds]
+ [Time delta from previous displayed frame: 0.000000000 seconds]
+ [Time since reference or first frame: 0.000000000 seconds]
+ Frame Number: 1
+ Frame Length: 75 bytes (600 bits)
+ Capture Length: 75 bytes (600 bits)
+ [Frame is marked: False]
+ [Frame is ignored: False]
+ [Protocols in frame: eth:ethertype:ip:udp:mydns]
+Ethernet II, Src: AmbitMic_6c:40:4e (00:d0:59:6c:40:4e), Dst: Cisco-Li_82:b2:53 (00:0c:41:82:b2:53)
+ Destination: Cisco-Li_82:b2:53 (00:0c:41:82:b2:53)
+ Address: Cisco-Li_82:b2:53 (00:0c:41:82:b2:53)
+ .... ..0. .... .... .... .... = LG bit: Globally unique address (factory default)
+ .... ...0 .... .... .... .... = IG bit: Individual address (unicast)
+ Source: AmbitMic_6c:40:4e (00:d0:59:6c:40:4e)
+ Address: AmbitMic_6c:40:4e (00:d0:59:6c:40:4e)
+ .... ..0. .... .... .... .... = LG bit: Globally unique address (factory default)
+ .... ...0 .... .... .... .... = IG bit: Individual address (unicast)
+ Type: IP (0x0800)
+Internet Protocol Version 4, Src: 192.168.50.50 (192.168.50.50), Dst: 192.168.0.1 (192.168.0.1)
+ Version: 4
+ Header Length: 20 bytes
+ Differentiated Services Field: 0x00 (DSCP 0x00: Default; ECN: 0x00: Not-ECT (Not ECN-Capable Transport))
+ 0000 00.. = Differentiated Services Codepoint: Default (0x00)
+ .... ..00 = Explicit Congestion Notification: Not-ECT (Not ECN-Capable Transport) (0x00)
+ Total Length: 61
+ Identification: 0x0a41 (2625)
+ Flags: 0x00
+ 0... .... = Reserved bit: Not set
+ .0.. .... = Don't fragment: Not set
+ ..0. .... = More fragments: Not set
+ Fragment offset: 0
+ Time to live: 128
+ Protocol: UDP (17)
+ Header checksum: 0x7ceb [correct]
+ [Good: True]
+ [Bad: False]
+ Source: 192.168.50.50 (192.168.50.50)
+ Destination: 192.168.0.1 (192.168.0.1)
+User Datagram Protocol, Src Port: 65282 (65282), Dst Port: 65333 (65333)
+ Source Port: 65282 (65282)
+ Destination Port: 65333 (65333)
+ Length: 41
+ Checksum: 0x07a9 [validation disabled]
+ [Good Checksum: False]
+ [Bad Checksum: False]
+ [Stream index: 0]
+MyDNS Protocol
+ Transaction ID: 43
+ Flags: 0x0100
+ 0... .... .... .... = Response: this is a query
+ .000 0... .... .... = Opcode: 0
+ .... ..0. .... .... = Truncated: False
+ .... ...1 .... .... = Recursion desired: yes
+ .... .... .0.. .... = World War Z - Reserved for future use: 0x0
+ .... .... ...0 .... = Checking disabled: False
+ Number of Questions: 1
+ Number of Answer RRs: 0
+ Number of Authority RRs: 0
+ Number of Additional RRs: 0
+ Queries
+ us.pool.ntp.org: type A (IPv4 host address) (1), class IN (Internet) (1)
+ Name: us.pool.ntp.org
+ [Name Length: 17]
+ [Label Count: 4]
+ Type: A (IPv4 host address) (1)
+ Class: IN (Internet) (1)
+
+Frame 2: 540 bytes on wire (4320 bits), 540 bytes captured (4320 bits)
+ Encapsulation type: Ethernet (1)
+ Arrival Time: Sep 26, 2004 23:18:04.945618000 EDT
+ [Time shift for this packet: 0.000000000 seconds]
+ Epoch Time: 1096255084.945618000 seconds
+ [Time delta from previous captured frame: 0.006946000 seconds]
+ [Time delta from previous displayed frame: 0.006946000 seconds]
+ [Time since reference or first frame: 0.006946000 seconds]
+ Frame Number: 2
+ Frame Length: 540 bytes (4320 bits)
+ Capture Length: 540 bytes (4320 bits)
+ [Frame is marked: False]
+ [Frame is ignored: False]
+ [Protocols in frame: eth:ethertype:ip:udp:mydns]
+Ethernet II, Src: Cisco-Li_82:b2:53 (00:0c:41:82:b2:53), Dst: AmbitMic_6c:40:4e (00:d0:59:6c:40:4e)
+ Destination: AmbitMic_6c:40:4e (00:d0:59:6c:40:4e)
+ Address: AmbitMic_6c:40:4e (00:d0:59:6c:40:4e)
+ .... ..0. .... .... .... .... = LG bit: Globally unique address (factory default)
+ .... ...0 .... .... .... .... = IG bit: Individual address (unicast)
+ Source: Cisco-Li_82:b2:53 (00:0c:41:82:b2:53)
+ Address: Cisco-Li_82:b2:53 (00:0c:41:82:b2:53)
+ .... ..0. .... .... .... .... = LG bit: Globally unique address (factory default)
+ .... ...0 .... .... .... .... = IG bit: Individual address (unicast)
+ Type: IP (0x0800)
+Internet Protocol Version 4, Src: 192.168.0.1 (192.168.0.1), Dst: 192.168.50.50 (192.168.50.50)
+ Version: 4
+ Header Length: 20 bytes
+ Differentiated Services Field: 0x00 (DSCP 0x00: Default; ECN: 0x00: Not-ECT (Not ECN-Capable Transport))
+ 0000 00.. = Differentiated Services Codepoint: Default (0x00)
+ .... ..00 = Explicit Congestion Notification: Not-ECT (Not ECN-Capable Transport) (0x00)
+ Total Length: 526
+ Identification: 0x2153 (8531)
+ Flags: 0x00
+ 0... .... = Reserved bit: Not set
+ .0.. .... = Don't fragment: Not set
+ ..0. .... = More fragments: Not set
+ Fragment offset: 0
+ Time to live: 63
+ Protocol: UDP (17)
+ Header checksum: 0xa508 [correct]
+ [Good: True]
+ [Bad: False]
+ Source: 192.168.0.1 (192.168.0.1)
+ Destination: 192.168.50.50 (192.168.50.50)
+User Datagram Protocol, Src Port: 65333 (65333), Dst Port: 65282 (65282)
+ Source Port: 65333 (65333)
+ Destination Port: 65282 (65282)
+ Length: 506
+ Checksum: 0xf9d5 [validation disabled]
+ [Good Checksum: False]
+ [Bad Checksum: False]
+ [Stream index: 0]
+MyDNS Protocol
+ Transaction ID: 43
+ Flags: 0x8180
+ 1... .... .... .... = Response: this is a response
+ .000 0... .... .... = Opcode: 0
+ .... .0.. .... .... = Authoritative: False
+ .... ..0. .... .... = Truncated: False
+ .... .... 1... .... = Recursion available: True
+ .... .... .0.. .... = World War Z - Reserved for future use: 0x0
+ .... .... ..0. .... = Authenticated: no
+ .... .... .... 0000 = Response code: No Error (0)
+ .... .... ...0 .... = Checking disabled: False
+ Number of Questions: 1
+ Number of Answer RRs: 15
+ Number of Authority RRs: 6
+ Number of Additional RRs: 2
+ Queries
+ us.pool.ntp.org: type A (IPv4 host address) (1), class IN (Internet) (1)
+ Name: us.pool.ntp.org
+ [Name Length: 17]
+ [Label Count: 4]
+ Type: A (IPv4 host address) (1)
+ Class: IN (Internet) (1)
+]]
+
diff --git a/test/lua/verify_globals.lua b/test/lua/verify_globals.lua
new file mode 100644
index 0000000..dbed8ce
--- /dev/null
+++ b/test/lua/verify_globals.lua
@@ -0,0 +1,135 @@
+-- verify_globals.lua
+
+-- ignore things that change on different machines or every release
+-- the following items still have to exist, but their values don't have to match
+local filter = {
+ -- differences by machine
+ "DATA_DIR",
+ "USER_DIR",
+ "package.cpath",
+ "package.path",
+ "package.loaded",
+ "run_user_scripts_when_superuser",
+ "running_superuser",
+
+ -- differences in Lua versions
+ "_VERSION",
+ "package.config",
+
+ -- differences caused by changes in wireshark 1.11
+ "NSTime",
+ "Proto",
+ 'Listener["<metatable>"].__index',
+ ".__index"
+ }
+
+-- the following items don't have to exist
+local ignore = {
+ -- not sure why this was removed in wireshark 1.11, but it was
+ "TreeItem.set_expert_flags",
+
+ -- in Lua 5.1 only
+ "debug.getfenv",
+ "debug.setfenv",
+ "gcinfo",
+ "getfenv",
+ "io.gfind",
+ "setfenv",
+ "math.mod",
+ "newproxy",
+ "string.gfind",
+ "table.foreach",
+ "table.foreachi",
+ "table.getn",
+ "table.setn",
+
+ -- in Lua 5.2+ only
+ "bit32",
+ "debug.getuservalu",
+ "debug.setuservalu",
+ "debug.upvalueid",
+ "debug.upvaluejoin",
+ "package.searchers",
+ "package.searchpath",
+ "rawlen",
+ "table.pack",
+ "table.unpack",
+
+}
+
+
+local arg={...} -- get passed-in args
+
+-- arg1 = path to find inspect
+-- arg2 = filename to read in (optional, unless 'verify' is set)
+-- arg3 = 'verify' to verify all of read-in file is in _G (default); 'new' to output all items in _G that are not in read-in file
+-- arg4 = 'nometa' to ignore metatables; 'meta' otherwise (default)
+
+local add_path = "lua/?.lua;"
+if #arg > 0 then
+ add_path = arg[1].."?.lua;"
+end
+
+print("package.path = " .. package.path)
+
+-- need the path to find inspect.lua
+local old_path = package.path
+package.path = add_path .. package.path
+
+local inspect = require("inspect")
+
+package.path = old_path -- return path to original
+
+print("-- Wireshark version: " .. get_version())
+
+if #arg == 1 then
+ -- no more args, so just output globals
+ print(inspect(_G, { serialize = true, filter = inspect.makeFilter(filter) }))
+ return
+end
+
+local file = assert(io.open(arg[2], "r"))
+local input = file:read("*all")
+input = inspect.marshal(input)
+
+local nometa = false
+if #arg > 3 and arg[4] == "nometa" then
+ nometa = true
+end
+
+if #arg == 2 or arg[3] == "verify" then
+ print(string.rep("\n", 2))
+ print("Verifying input file '"..arg[2].."' is contained within the global table")
+ local ret, diff = inspect.compare(input, _G, {
+ ['filter'] = inspect.makeFilter(filter),
+ ['ignore'] = inspect.makeFilter(ignore),
+ ['nonumber'] = true,
+ ['nometa'] = nometa
+ })
+ if not ret then
+ print("Comparison failed - global table does not have all the items in the input file!")
+ print(string.rep("\n", 2))
+ print(string.rep("-", 80))
+ print("Differences are:")
+ print(inspect(diff))
+ else
+ print("\n-----------------------------\n")
+ print("All tests passed!\n\n")
+ end
+ return
+elseif #arg > 2 and arg[3] == "new" then
+ local ret, diff = inspect.compare(_G, input, {
+ ['filter'] = inspect.makeFilter(filter),
+ ['ignore'] = inspect.makeFilter(ignore),
+ ['nonumber'] = true,
+ ['keep'] = true,
+ ['nometa'] = nometa
+ })
+ if not ret then
+ print(inspect(diff))
+ else
+ print("\n-----------------------------\n")
+ print("No new items!\n\n")
+ end
+end
+