summaryrefslogtreecommitdiffstats
path: root/tools/asterix
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-09-19 04:14:33 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-09-19 04:14:33 +0000
commit9f153fbfec0fb9c9ce38e749a7c6f4a5e115d4e9 (patch)
tree2784370cda9bbf2da9114d70f05399c0b229d28c /tools/asterix
parentAdding debian version 4.2.6-1. (diff)
downloadwireshark-9f153fbfec0fb9c9ce38e749a7c6f4a5e115d4e9.tar.xz
wireshark-9f153fbfec0fb9c9ce38e749a7c6f4a5e115d4e9.zip
Merging upstream version 4.4.0.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'tools/asterix')
-rw-r--r--tools/asterix/README.md2
-rwxr-xr-xtools/asterix/convertspec.py339
-rw-r--r--tools/asterix/packet-asterix-template.c113
-rwxr-xr-xtools/asterix/update-specs.py189
4 files changed, 483 insertions, 160 deletions
diff --git a/tools/asterix/README.md b/tools/asterix/README.md
index d7b2101f..e936930d 100644
--- a/tools/asterix/README.md
+++ b/tools/asterix/README.md
@@ -3,7 +3,7 @@
*Asterix* is a set of standards, where each standard is defined
as so called *asterix category*.
In addition, each *asterix category* is potentially released
-in number of editions. There is no guarantie about backward
+in number of editions. There is no guarantee about backward
compatibility between the editions.
The structured version of asterix specifications is maintained
diff --git a/tools/asterix/convertspec.py b/tools/asterix/convertspec.py
new file mode 100755
index 00000000..18f81798
--- /dev/null
+++ b/tools/asterix/convertspec.py
@@ -0,0 +1,339 @@
+#!/usr/bin/env python3
+#
+# By Zoran BoĆĄnjak <zoran.bosnjak@sloveniacontrol.si>
+#
+# Convert json from new to old format
+#
+# SPDX-License-Identifier: GPL-2.0-or-later
+#
+
+import sys
+import argparse
+import json
+
+def split(obj):
+ return (obj['tag'], obj['contents'])
+
+def handle_uap(obj):
+ t, cont = split(obj)
+ def f(i):
+ t, name = split(i)
+ if t == 'UapItem':
+ return name
+ elif t == 'UapItemRFS':
+ return 'RFS'
+ else:
+ return None
+ if t == 'Uap':
+ return {
+ 'type': 'uap',
+ 'items': [f(i) for i in cont],
+ }
+ elif t == 'Uaps':
+ def var(i):
+ name, lst = i
+ return {
+ 'name': name,
+ 'items': [f(i) for i in lst],
+ }
+ return {
+ 'type': 'uaps',
+ 'selector': {
+ 'name': cont['selector']['item'],
+ 'rules': cont['selector']['cases'],
+ },
+ 'variations': [var(i) for i in cont['cases']],
+ }
+ else:
+ raise Exception('unexpected', t)
+
+def handle_number(obj):
+ t, cont = split(obj)
+ if t == 'NumInt':
+ return {
+ 'type': 'Integer',
+ 'value': cont,
+ }
+ elif t == 'NumDiv':
+ return {
+ 'type': 'Div',
+ 'numerator': handle_number(cont['numerator']),
+ 'denominator': handle_number(cont['denominator']),
+ }
+ elif t == 'NumPow':
+ return {
+ 'type': 'Pow',
+ 'base': cont['base'],
+ 'exponent': cont['exponent'],
+ }
+ else:
+ raise Exception('unexpected', t)
+
+def handle_signedness(obj):
+ t, cont = split(obj)
+ if t == 'Signed':
+ return True
+ elif t == 'Unsigned':
+ return False
+ else:
+ raise Exception('unexpected', t)
+
+def handle_constrain(obj):
+ t, cont = split(obj)
+ if t == 'EqualTo': s = '=='
+ elif t == 'NotEqualTo': s = '/='
+ elif t == 'GreaterThan': s = '>'
+ elif t == 'GreaterThanOrEqualTo': s = '>='
+ elif t == 'LessThan': s = '<'
+ elif t == 'LessThanOrEqualTo': s = '<='
+ else:
+ raise Exception('unexpected', t)
+ return {
+ 'type': s,
+ 'value': handle_number(cont),
+ }
+
+def handle_content(obj):
+ t, cont = split(obj)
+ if t == 'ContentRaw':
+ return {
+ 'type': 'Raw',
+ }
+ elif t == 'ContentTable':
+ return {
+ 'type': 'Table',
+ 'values': cont,
+ }
+ elif t == 'ContentString':
+ return {
+ 'type': 'String',
+ 'variation': cont['tag'],
+ }
+ elif t == 'ContentInteger':
+ return {
+ 'type': 'Integer',
+ 'signed': handle_signedness(cont['signedness']),
+ 'constraints': [handle_constrain(i) for i in cont['constraints']],
+ }
+ elif t == 'ContentQuantity':
+ return {
+ 'type': 'Quantity',
+ 'constraints': [handle_constrain(i) for i in cont['constraints']],
+ 'lsb': handle_number(cont['lsb']),
+ 'signed': handle_signedness(cont['signedness']),
+ 'unit': cont['unit'],
+ }
+ elif t == 'ContentBds':
+ def f(obj):
+ t, cont = split(obj)
+ if t == 'BdsWithAddress':
+ return {
+ 'type': 'BdsWithAddress',
+ }
+ elif t == 'BdsAt':
+ return {
+ 'type': 'BdsAt',
+ 'address': hex(cont)[2:] if cont is not None else None,
+ }
+ else:
+ raise Exception('unexpected', t)
+ return {
+ 'type': 'Bds',
+ 'variation': f(cont),
+ }
+ else:
+ raise Exception('unexpected', t)
+
+def handle_rule(f, obj):
+ t, cont = split(obj)
+ if t == 'ContextFree':
+ return {
+ 'type': 'ContextFree',
+ 'value': f(cont)
+ }
+ elif t == 'Dependent':
+ def g(i):
+ a, b = i
+ return [
+ a,
+ f(b),
+ ]
+ return {
+ 'type': 'Dependent',
+ 'items': cont['path'],
+ 'default': f(cont['default']),
+ 'cases': [g(i) for i in cont['cases']],
+ }
+ else:
+ raise Exception('unexpected', t)
+
+def handle_item(obj):
+ t, cont = split(obj)
+ if t == 'Spare':
+ return {
+ 'length': cont,
+ 'spare': True,
+ }
+ elif t == 'Item':
+ return handle_nonspare(cont)
+ else:
+ raise Exception('unexpected', t)
+
+def handle_maybe(f, obj):
+ if obj is None:
+ return None
+ return f(obj)
+
+def handle_variation(obj):
+ t, cont = split(obj)
+ if t == 'Element':
+ return {
+ 'type': t,
+ 'size': cont['bitSize'],
+ 'rule': handle_rule(handle_content, cont['rule']),
+ }
+ elif t == 'Group':
+ return {
+ 'type': t,
+ 'items': [handle_item(i) for i in cont]
+ }
+ elif t == 'Extended':
+ return {
+ 'type': t,
+ 'items': [handle_maybe(handle_item, i) for i in cont],
+ }
+ elif t == 'Repetitive':
+ def f(obj):
+ t, cont = split(obj)
+ if t == 'RepetitiveRegular':
+ return {
+ 'type': 'Regular',
+ 'size': cont['byteSize']*8,
+ }
+ elif t == 'RepetitiveFx':
+ return {
+ 'type': 'Fx',
+ }
+ else:
+ raise Exception('unexpected', t)
+ return {
+ 'type': t,
+ 'rep': f(cont['type']),
+ 'variation': handle_variation(cont['variation']),
+ }
+ elif t == 'Explicit':
+ def f(obj):
+ if obj is None:
+ return None
+ t, cont = split(obj)
+ if t == 'ReservedExpansion':
+ return 'RE'
+ elif t == 'SpecialPurpose':
+ return 'SP'
+ else:
+ raise Exception('unexpected', t)
+ return {
+ 'type': t,
+ 'expl': f(cont),
+ }
+ elif t == 'Compound':
+ return {
+ 'type': t,
+ 'fspec': None,
+ 'items': [handle_maybe(handle_nonspare, i) for i in cont],
+ }
+ else:
+ raise Exception('unexpected', t)
+
+def handle_nonspare(obj):
+ doc = obj['documentation']
+ return {
+ 'definition': doc['definition'],
+ 'description': doc['description'],
+ 'name': obj['name'],
+ 'remark': doc['remark'],
+ 'rule': handle_rule(handle_variation, obj['rule']),
+ 'spare': False,
+ 'title': obj['title'],
+ }
+
+def has_rfs(obj):
+ t, cont = split(obj)
+ def check(obj):
+ t, cont = split(obj)
+ return t == 'UapItemRFS'
+ if t == 'Uap':
+ return any(check(i) for i in cont)
+ elif t == 'Uaps':
+ for (uap_name, lst) in cont['cases']:
+ if any(check(i) for i in lst):
+ return True
+ return False
+ else:
+ raise Exception('unexpected', t)
+
+def handle_asterix(obj):
+ t, cont = split(obj)
+ if t == 'AsterixBasic':
+ catalogue = [handle_nonspare(i) for i in cont['catalogue']]
+ if has_rfs(cont['uap']):
+ catalogue.append({
+ "definition": "Random Field Sequencing\n",
+ "description": None,
+ "name": "RFS",
+ "remark": None,
+ "rule": {
+ "type": "ContextFree",
+ "value": {
+ "type": "Rfs"
+ }
+ },
+ "spare": False,
+ "title": "Random Field Sequencing",
+ })
+ return {
+ 'catalogue': catalogue,
+ 'date': cont['date'],
+ 'edition': cont['edition'],
+ 'number': cont['category'],
+ 'preamble': cont['preamble'],
+ 'title': cont['title'],
+ 'type': 'Basic',
+ 'uap': handle_uap(cont['uap']),
+ }
+ elif t == 'AsterixExpansion':
+ return {
+ 'date': cont['date'],
+ 'edition': cont['edition'],
+ 'number': cont['category'],
+ 'title': cont['title'],
+ 'type': 'Expansion',
+ 'variation': {
+ 'fspec': cont['fspecByteSize']*8,
+ 'items': [handle_maybe(handle_nonspare, i) for i in cont['items']],
+ 'type': 'Compound',
+ },
+ }
+ else:
+ raise Exception('unexpected', t)
+
+def main():
+ parser = argparse.ArgumentParser(description='Convert json from new to old format.')
+ parser.add_argument('--in-place', action='store_true')
+ parser.add_argument('path')
+ args = parser.parse_args()
+
+ with open(args.path, 'r') as f:
+ s1 = f.read()
+
+ obj = handle_asterix(json.loads(s1))
+ s2 = json.dumps(obj, ensure_ascii=False, sort_keys=True, indent=4)
+
+ if args.in_place:
+ with open(args.path, 'w') as f:
+ f.write(s2)
+ else:
+ print(s2)
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/asterix/packet-asterix-template.c b/tools/asterix/packet-asterix-template.c
index e655cfd7..d584f1cf 100644
--- a/tools/asterix/packet-asterix-template.c
+++ b/tools/asterix/packet-asterix-template.c
@@ -49,22 +49,22 @@ void proto_reg_handoff_asterix(void);
#define MAX_DISSECT_STR 1024
#define MAX_BUFFER 256
-static int proto_asterix = -1;
-
-static int hf_asterix_category = -1;
-static int hf_asterix_length = -1;
-static int hf_asterix_message = -1;
-static int hf_asterix_fspec = -1;
-static int hf_re_field_len = -1;
-static int hf_spare = -1;
-static int hf_counter = -1;
-static int hf_XXX_FX = -1;
-
-static int ett_asterix = -1;
-static int ett_asterix_category = -1;
-static int ett_asterix_length = -1;
-static int ett_asterix_message = -1;
-static int ett_asterix_subtree = -1;
+static int proto_asterix;
+
+static int hf_asterix_category;
+static int hf_asterix_length;
+static int hf_asterix_message;
+static int hf_asterix_fspec;
+static int hf_re_field_len;
+static int hf_spare;
+static int hf_counter;
+static int hf_XXX_FX;
+
+static int ett_asterix;
+static int ett_asterix_category;
+static int ett_asterix_length;
+static int ett_asterix_message;
+static int ett_asterix_subtree;
static dissector_handle_t asterix_handle;
/* The following defines tell us how to decode the length of
@@ -102,22 +102,20 @@ struct FieldPart_s {
const char *format_string; /* format string for showing float values */
};
-DIAG_OFF_PEDANTIC
typedef struct AsterixField_s AsterixField;
struct AsterixField_s {
- uint8_t type; /* type of field */
- unsigned length; /* fixed length */
- unsigned repetition_counter_size; /* size of repetition counter, length of one item is in length */
- unsigned header_length; /* the size is in first header_length bytes of the field */
- int *hf; /* pointer to Wireshark hf_register_info */
- const FieldPart **part; /* Look declaration and description of FieldPart above. */
- const AsterixField *field[]; /* subfields */
+ uint8_t type; /* type of field */
+ unsigned length; /* fixed length */
+ unsigned repetition_counter_size; /* size of repetition counter, length of one item is in length */
+ unsigned header_length; /* the size is in first header_length bytes of the field */
+ int *hf; /* pointer to Wireshark hf_register_info */
+ const FieldPart * const *part; /* Look declaration and description of FieldPart above. */
+ const AsterixField * const field[]; /* subfields */
};
-DIAG_ON_PEDANTIC
static void dissect_asterix_packet (tvbuff_t *, packet_info *pinfo, proto_tree *);
static void dissect_asterix_data_block (tvbuff_t *tvb, packet_info *pinfo, unsigned, proto_tree *, uint8_t, int);
-static int dissect_asterix_fields (tvbuff_t *, packet_info *pinfo, unsigned, proto_tree *, uint8_t, const AsterixField *[]);
+static int dissect_asterix_fields (tvbuff_t *, packet_info *pinfo, unsigned, proto_tree *, uint8_t, const AsterixField * const []);
static void asterix_build_subtree (tvbuff_t *, packet_info *pinfo, unsigned, proto_tree *, const AsterixField *);
static void twos_complement (int64_t *, int);
@@ -125,8 +123,8 @@ static uint8_t asterix_bit (uint8_t, uint8_t);
static unsigned asterix_fspec_len (tvbuff_t *, unsigned);
static uint8_t asterix_field_exists (tvbuff_t *, unsigned, int);
static uint8_t asterix_get_active_uap (tvbuff_t *, unsigned, uint8_t);
-static int asterix_field_length (tvbuff_t *, unsigned, const AsterixField *);
-static int asterix_field_offset (tvbuff_t *, unsigned, const AsterixField *[], int);
+static int asterix_field_length (tvbuff_t *, unsigned, const AsterixField * const);
+static int asterix_field_offset (tvbuff_t *, unsigned, const AsterixField * const [], int);
static int asterix_message_length (tvbuff_t *, unsigned, uint8_t, uint8_t);
static const char AISCode[] = { ' ', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O',
@@ -149,7 +147,6 @@ static const FieldPart IXXX_6bit_spare = { 6, 1.0, FIELD_PART_UINT, NULL, NULL }
static const FieldPart IXXX_7bit_spare = { 7, 1.0, FIELD_PART_UINT, NULL, NULL };
/* Spare Item */
-DIAG_OFF_PEDANTIC
static const AsterixField IX_SPARE = { FIXED, 0, 0, 0, &hf_spare, NULL, { NULL } };
/* insert1 */
@@ -469,8 +466,8 @@ static void dissect_asterix_packet (tvbuff_t *tvb, packet_info *pinfo, proto_tre
* The User Application Profile (UAP) is simply a mapping from the
* FSPEC to fields. Each category has its own UAP.
*/
- category = tvb_get_guint8 (tvb, i);
- length = (tvb_get_guint8 (tvb, i + 1) << 8) + tvb_get_guint8 (tvb, i + 2) - 3; /* -3 for category and length */
+ category = tvb_get_uint8 (tvb, i);
+ length = (tvb_get_uint8 (tvb, i + 1) << 8) + tvb_get_uint8 (tvb, i + 2) - 3; /* -3 for category and length */
asterix_packet_item = proto_tree_add_item (tree, proto_asterix, tvb, i, length + 3, ENC_NA);
proto_item_append_text (asterix_packet_item, ", Category %03d", category);
@@ -513,7 +510,9 @@ static void dissect_asterix_data_block (tvbuff_t *tvb, packet_info *pinfo, unsig
}
}
-static int dissect_asterix_fields (tvbuff_t *tvb, packet_info *pinfo, unsigned offset, proto_tree *tree, uint8_t category, const AsterixField *current_uap[])
+// We're transported over UDP and our offset always advances.
+// NOLINTNEXTLINE(misc-no-recursion)
+static int dissect_asterix_fields (tvbuff_t *tvb, packet_info *pinfo, unsigned offset, proto_tree *tree, uint8_t category, const AsterixField * const current_uap [])
{
unsigned i, j, size, start, len, inner_offset, fspec_len;
uint64_t counter;
@@ -536,13 +535,13 @@ static int dissect_asterix_fields (tvbuff_t *tvb, packet_info *pinfo, unsigned o
asterix_field_tree = proto_item_add_subtree (asterix_field_item, ett_asterix_subtree);
fspec_len = asterix_fspec_len (tvb, offset + start);
proto_tree_add_item (asterix_field_tree, hf_asterix_fspec, tvb, offset + start, fspec_len, ENC_NA);
- dissect_asterix_fields (tvb, pinfo, offset + start, asterix_field_tree, category, (const AsterixField **)current_uap[i]->field);
+ dissect_asterix_fields (tvb, pinfo, offset + start, asterix_field_tree, category, current_uap[i]->field);
break;
case REPETITIVE:
asterix_field_item = proto_tree_add_item (tree, *current_uap[i]->hf, tvb, offset + start, len, ENC_NA);
asterix_field_tree = proto_item_add_subtree (asterix_field_item, ett_asterix_subtree);
for (j = 0, counter = 0; j < current_uap[i]->repetition_counter_size; j++) {
- counter = (counter << 8) + tvb_get_guint8 (tvb, offset + start + j);
+ counter = (counter << 8) + tvb_get_uint8 (tvb, offset + start + j);
}
proto_tree_add_item (asterix_field_tree, hf_counter, tvb, offset + start, current_uap[i]->repetition_counter_size, ENC_BIG_ENDIAN);
for (j = 0, inner_offset = 0; j < counter; j++, inner_offset += current_uap[i]->length) {
@@ -559,7 +558,7 @@ static int dissect_asterix_fields (tvbuff_t *tvb, packet_info *pinfo, unsigned o
start++;
fspec_len = asterix_fspec_len (tvb, offset + start);
proto_tree_add_item (asterix_field_tree, hf_asterix_fspec, tvb, offset + start, fspec_len, ENC_NA);
- dissect_asterix_fields (tvb, pinfo, offset + start, asterix_field_tree, category, (const AsterixField **)current_uap[i]->field);
+ dissect_asterix_fields (tvb, pinfo, offset + start, asterix_field_tree, category, current_uap[i]->field);
break;*/
default: /* FIXED, FX, FX_1, FX_UAP */
asterix_field_item = proto_tree_add_item (tree, *current_uap[i]->hf, tvb, offset + start, len, ENC_NA);
@@ -650,7 +649,7 @@ static void asterix_build_subtree (tvbuff_t *tvb, packet_info *pinfo, unsigned o
case FIELD_PART_IAS_IM:
/* special processing for I021/150 and I062/380#4 because Air Speed depends on IM subfield */
air_speed_im_bit = wmem_new (pinfo->pool, uint8_t);
- *air_speed_im_bit = (tvb_get_guint8 (tvb, offset_in_tvb) & 0x80) >> 7;
+ *air_speed_im_bit = (tvb_get_uint8 (tvb, offset_in_tvb) & 0x80) >> 7;
/* Save IM info for the packet. key = 21150. */
p_add_proto_data (pinfo->pool, pinfo, proto_asterix, 21150, air_speed_im_bit);
proto_tree_add_item (parent, *field->part[i]->hf, tvb, offset_in_tvb, length_in_tvb, ENC_BIG_ENDIAN);
@@ -683,8 +682,8 @@ static uint8_t asterix_bit (uint8_t b, uint8_t bitNo)
* If the number is positive, all other bits must remain 0. */
static void twos_complement (int64_t *v, int bit_len)
{
- if (*v & (G_GUINT64_CONSTANT(1) << (bit_len - 1))) {
- *v |= (G_GUINT64_CONSTANT(0xffffffffffffffff) << bit_len);
+ if (*v & (UINT64_C(1) << (bit_len - 1))) {
+ *v |= (UINT64_C(0xffffffffffffffff) << bit_len);
}
}
@@ -692,7 +691,7 @@ static unsigned asterix_fspec_len (tvbuff_t *tvb, unsigned offset)
{
unsigned i;
unsigned max_length = tvb_reported_length (tvb) - offset;
- for (i = 0; (tvb_get_guint8 (tvb, offset + i) & 1) && i < max_length; i++);
+ for (i = 0; (tvb_get_uint8 (tvb, offset + i) & 1) && i < max_length; i++);
return i + 1;
}
@@ -701,16 +700,20 @@ static uint8_t asterix_field_exists (tvbuff_t *tvb, unsigned offset, int bitInde
uint8_t bitNo, i;
bitNo = bitIndex + bitIndex / 7;
for (i = 0; i < bitNo / 8; i++) {
- if (!(tvb_get_guint8 (tvb, offset + i) & 1)) return 0;
+ if (!(tvb_get_uint8 (tvb, offset + i) & 1)) return 0;
}
- return asterix_bit (tvb_get_guint8 (tvb, offset + i), bitNo % 8);
+ return asterix_bit (tvb_get_uint8 (tvb, offset + i), bitNo % 8);
}
-static int asterix_field_length (tvbuff_t *tvb, unsigned offset, const AsterixField *field)
+// We're transported over UDP and our offset always advances.
+// NOLINTNEXTLINE(misc-no-recursion)
+static int asterix_field_length (tvbuff_t *tvb, unsigned offset, const AsterixField * const field)
{
+ unsigned bit_size;
unsigned size;
uint64_t count;
uint8_t i;
+ bool should_break;
size = 0;
switch(field->type) {
@@ -719,20 +722,26 @@ static int asterix_field_length (tvbuff_t *tvb, unsigned offset, const AsterixFi
break;
case REPETITIVE:
for (i = 0, count = 0; i < field->repetition_counter_size && i < sizeof (count); i++)
- count = (count << 8) + tvb_get_guint8 (tvb, offset + i);
+ count = (count << 8) + tvb_get_uint8 (tvb, offset + i);
size = (unsigned)(field->repetition_counter_size + count * field->length);
break;
case FX:
- for (size = field->length + field->header_length; tvb_get_guint8 (tvb, offset + size - 1) & 1; size += field->length);
+ for (i = 0, bit_size = 0; field->part[i] != NULL; i++) {
+ // We don't need to shift value as FX bits are always at the end
+ should_break = field->part[i]->type == FIELD_PART_FX && !(tvb_get_uint8 (tvb, offset + bit_size / 8) & 1);
+ bit_size += field->part[i]->bit_length;
+ if (should_break) break;
+ }
+ size = bit_size / 8;
break;
case EXP:
for (i = 0, size = 0; i < field->header_length; i++) {
- size = (size << 8) + tvb_get_guint8 (tvb, offset + i);
+ size = (size << 8) + tvb_get_uint8 (tvb, offset + i);
}
break;
case COMPOUND:
/* FSPEC */
- for (size = 0; tvb_get_guint8 (tvb, offset + size) & 1; size++);
+ for (size = 0; tvb_get_uint8 (tvb, offset + size) & 1; size++);
size++;
for (i = 0; field->field[i] != NULL; i++) {
@@ -748,17 +757,17 @@ static int asterix_field_length (tvbuff_t *tvb, unsigned offset, const AsterixFi
static uint8_t asterix_get_active_uap (tvbuff_t *tvb, unsigned offset, uint8_t category)
{
int i, inner_offset;
- AsterixField **current_uap;
+ AsterixField const * const *current_uap;
if ((category == 1) && (categories[category] != NULL)) { /* if category is supported */
if (categories[category][global_categories_version[category]][1] != NULL) { /* if exists another uap */
- current_uap = (AsterixField **)categories[category][global_categories_version[category]][0];
+ current_uap = categories[category][global_categories_version[category]][0];
if (current_uap != NULL) {
inner_offset = asterix_fspec_len (tvb, offset);
for (i = 0; current_uap[i] != NULL; i++) {
if (asterix_field_exists (tvb, offset, i)) {
if (i == 1) { /* uap selector (I001/020) is always at index '1' */
- return tvb_get_guint8 (tvb, offset + inner_offset) >> 7;
+ return tvb_get_uint8 (tvb, offset + inner_offset) >> 7;
}
inner_offset += asterix_field_length (tvb, offset + inner_offset, current_uap[i]);
}
@@ -769,7 +778,7 @@ static uint8_t asterix_get_active_uap (tvbuff_t *tvb, unsigned offset, uint8_t c
return 0;
}
-static int asterix_field_offset (tvbuff_t *tvb, unsigned offset, const AsterixField *current_uap[], int field_index)
+static int asterix_field_offset (tvbuff_t *tvb, unsigned offset, const AsterixField * const current_uap[], int field_index)
{
int i, inner_offset;
inner_offset = 0;
@@ -786,10 +795,10 @@ static int asterix_field_offset (tvbuff_t *tvb, unsigned offset, const AsterixFi
static int asterix_message_length (tvbuff_t *tvb, unsigned offset, uint8_t category, uint8_t active_uap)
{
int i, size;
- AsterixField **current_uap;
+ AsterixField const * const *current_uap;
if (categories[category] != NULL) { /* if category is supported */
- current_uap = (AsterixField **)categories[category][global_categories_version[category]][active_uap];
+ current_uap = categories[category][global_categories_version[category]][active_uap];
if (current_uap != NULL) {
size = asterix_fspec_len (tvb, offset);
for (i = 0; current_uap[i] != NULL; i++) {
diff --git a/tools/asterix/update-specs.py b/tools/asterix/update-specs.py
index 7af735dc..03850c50 100755
--- a/tools/asterix/update-specs.py
+++ b/tools/asterix/update-specs.py
@@ -20,6 +20,8 @@ import os
import sys
import re
+import convertspec as convert
+
# Path to default upstream repository
upstream_repo = 'https://zoranbosnjak.github.io/asterix-specs'
dissector_file = 'epan/dissectors/packet-asterix.c'
@@ -68,42 +70,15 @@ class Context(object):
self.offset = Offset()
def get_number(value):
- """Get Natural/Real/Rational number as an object."""
- class Integer(object):
- def __init__(self, val):
- self.val = val
- def __str__(self):
- return '{}'.format(self.val)
- def __float__(self):
- return float(self.val)
-
- class Ratio(object):
- def __init__(self, a, b):
- self.a = a
- self.b = b
- def __str__(self):
- return '{}/{}'.format(self.a, self.b)
- def __float__(self):
- return float(self.a) / float(self.b)
-
- class Real(object):
- def __init__(self, val):
- self.val = val
- def __str__(self):
- return '{0:f}'.format(self.val).rstrip('0')
- def __float__(self):
- return float(self.val)
-
t = value['type']
- val = value['value']
-
if t == 'Integer':
- return Integer(int(val))
- if t == 'Ratio':
- x, y = val['numerator'], val['denominator']
- return Ratio(x, y)
- if t == 'Real':
- return Real(float(val))
+ return float(value['value'])
+ if t == 'Div':
+ a = get_number(value['numerator'])
+ b = get_number(value['denominator'])
+ return a/b
+ if t == 'Pow':
+ return float(pow(value['base'], value['exponent']))
raise Exception('unexpected value type {}'.format(t))
def replace_string(s, mapping):
@@ -132,19 +107,10 @@ def safe_string(s):
def get_scaling(content):
"""Get scaling factor from the content."""
- k = content.get('scaling')
- if k is None:
+ lsb = content.get('lsb')
+ if lsb is None:
return None
- k = get_number(k)
-
- fract = content['fractionalBits']
-
- if fract > 0:
- scale = format(float(k) / (pow(2, fract)), '.29f')
- scale = scale.rstrip('0')
- else:
- scale = format(float(k))
- return scale
+ return '{}'.format(get_number(lsb))
def get_fieldpart(content):
"""Get FIELD_PART* from the content."""
@@ -297,14 +263,12 @@ def reference(cat, edition, path):
return('{:03d}_{}'.format(cat, name))
return('{:03d}_V{}_{}_{}'.format(cat, edition['major'], edition['minor'], name))
-def get_content(rule):
+def get_rule(rule):
t = rule['type']
- # Most cases are 'ContextFree', use as specified.
if t == 'ContextFree':
- return rule['content']
- # Handle 'Dependent' contents as 'Raw'.
+ return rule['value']
elif t == 'Dependent':
- return {'type': "Raw"}
+ return rule['default']
else:
raise Exception('unexpected type: {}'.format(t))
@@ -313,7 +277,7 @@ def get_bit_size(item):
if item['spare']:
return item['length']
else:
- return item['variation']['size']
+ return get_rule(item['rule'])['size']
def get_description(item, content=None):
"""Return item description."""
@@ -336,12 +300,18 @@ def generate_group(item, variation=None):
level2['is_generated'] = True
if variation is None:
level1 = copy(item)
- level1['variation'] = {
- 'type': 'Group',
- 'items': [level2],
+ level1['rule'] = {
+ 'type': 'ContextFree',
+ 'value': {
+ 'type': 'Group',
+ 'items': [level2],
+ },
}
else:
- level2['variation'] = variation['variation']
+ level2['rule'] = {
+ 'type': 'ContextFree',
+ 'value': variation,
+ }
level1 = {
'type': "Group",
'items': [level2],
@@ -353,15 +323,18 @@ def is_generated(item):
def ungroup(item):
"""Convert group of items of known size to element"""
- n = sum([get_bit_size(i) for i in item['variation']['items']])
+ n = sum([get_bit_size(i) for i in get_rule(item['rule'])['items']])
result = copy(item)
- result['variation'] = {
- 'rule': {
- 'content': {'type': 'Raw'},
- 'type': 'ContextFree',
+ result['rule'] = {
+ 'type': 'ContextFree',
+ 'value': {
+ 'type': 'Element',
+ 'size': n,
+ 'rule': {
+ 'type': 'ContextFree',
+ 'value': {'type': 'Raw'},
+ },
},
- 'size': n,
- 'type': 'Element',
}
return result
@@ -397,9 +370,9 @@ def part1(ctx, get_ref, catalogue):
return '&I{}_{}'.format(ref, item['name'])
if t == 'Element':
- tell('static int hf_{} = -1;'.format(ref))
+ tell('static int hf_{};'.format(ref))
n = variation['size']
- content = get_content(variation['rule'])
+ content = get_rule(variation['rule'])
scaling = get_scaling(content)
scaling = scaling if scaling is not None else 1.0
fp = get_fieldpart(content)
@@ -425,12 +398,12 @@ def part1(ctx, get_ref, catalogue):
description = get_description(item)
tell_pr(' {} &hf_{}, {} "{}", "asterix.{}", FT_NONE, BASE_NONE, NULL, 0x00, NULL, HFILL {} {},'.format('{', ref, '{', description, ref, '}', '}'))
- tell('static int hf_{} = -1;'.format(ref))
+ tell('static int hf_{};'.format(ref))
for i in variation['items']:
handle_item(path, i)
# FieldPart[]
- tell('static const FieldPart *I{}_PARTS[] = {}'.format(ref,'{'))
+ tell('static const FieldPart * const I{}_PARTS[] = {}'.format(ref,'{'))
for i in variation['items']:
tell(' {},'.format(part_of(i)))
tell(' NULL')
@@ -450,15 +423,15 @@ def part1(ctx, get_ref, catalogue):
description = get_description(item)
tell_pr(' {} &hf_{}, {} "{}", "asterix.{}", FT_NONE, BASE_NONE, NULL, 0x00, NULL, HFILL {} {},'.format('{', ref, '{', description, ref, '}', '}'))
- tell('static int hf_{} = -1;'.format(ref))
+ tell('static int hf_{};'.format(ref))
items = []
for i in variation['items']:
if i is None:
items.append(i)
continue
- if i.get('variation') is not None:
- if i['variation']['type'] == 'Group':
+ if i.get('rule') is not None:
+ if get_rule(i['rule'])['type'] == 'Group':
i = ungroup(i)
items.append(i)
@@ -468,7 +441,7 @@ def part1(ctx, get_ref, catalogue):
else:
handle_item(path, i)
- tell('static const FieldPart *I{}_PARTS[] = {}'.format(ref,'{'))
+ tell('static const FieldPart * const I{}_PARTS[] = {}'.format(ref,'{'))
for i in items:
if i is None:
tell(' &IXXX_FX,')
@@ -479,12 +452,10 @@ def part1(ctx, get_ref, catalogue):
tell('};')
# AsterixField
- first_part = list(takewhile(lambda x: x is not None, items))
- n = (sum([get_bit_size(i) for i in first_part]) + 1) // 8
parts = 'I{}_PARTS'.format(ref)
comp = '{ NULL }'
- tell('static const AsterixField I{} = {} FX, {}, 0, {}, &hf_{}, {}, {} {};'.format
- (ref, '{', n, 0, ref, parts, comp, '}'))
+ tell('static const AsterixField I{} = {} FX, 0, 0, 0, &hf_{}, {}, {} {};'.format
+ (ref, '{', ref, parts, comp, '}'))
elif t == 'Repetitive':
ctx.reset_offset()
@@ -492,7 +463,7 @@ def part1(ctx, get_ref, catalogue):
# Group is required below this item.
if variation['variation']['type'] == 'Element':
- subvar = generate_group(item, variation)
+ subvar = generate_group(item, variation['variation'])
else:
subvar = variation['variation']
handle_variation(path, subvar)
@@ -509,14 +480,14 @@ def part1(ctx, get_ref, catalogue):
elif t == 'Explicit':
ctx.reset_offset()
- tell('static int hf_{} = -1;'.format(ref))
+ tell('static int hf_{};'.format(ref))
description = get_description(item)
tell_pr(' {} &hf_{}, {} "{}", "asterix.{}", FT_NONE, BASE_NONE, NULL, 0x00, NULL, HFILL {} {},'.format('{', ref, '{', description, ref, '}', '}'))
tell('static const AsterixField I{} = {} EXP, 0, 0, 1, &hf_{}, NULL, {} NULL {} {};'.format(ref, '{', ref, '{', '}', '}'))
elif t == 'Compound':
ctx.reset_offset()
- tell('static int hf_{} = -1;'.format(ref))
+ tell('static int hf_{};'.format(ref))
description = get_description(item)
tell_pr(' {} &hf_{}, {} "{}", "asterix.{}", FT_NONE, BASE_NONE, NULL, 0x00, NULL, HFILL {} {},'.format('{', ref, '{', description, ref, '}', '}'))
comp = '{'
@@ -525,7 +496,7 @@ def part1(ctx, get_ref, catalogue):
comp += ' &IX_SPARE,'
continue
# Group is required below this item.
- if i['variation']['type'] == 'Element':
+ if get_rule(i['rule'])['type'] == 'Element':
subitem = generate_group(i)
else:
subitem = i
@@ -545,30 +516,36 @@ def part1(ctx, get_ref, catalogue):
return
# Group is required on the first level.
- if path == [] and item['variation']['type'] == 'Element':
- variation = generate_group(item)['variation']
+ if path == [] and get_rule(item['rule'])['type'] == 'Element':
+ variation = get_rule(generate_group(item)['rule'])
else:
- variation = item['variation']
+ variation = get_rule(item['rule'])
handle_variation(path + [item['name']], variation)
for item in catalogue:
# adjust 'repetitive fx' item
- if item['variation']['type'] == 'Repetitive' and item['variation']['rep']['type'] == 'Fx':
- var = item['variation']['variation'].copy()
+ if get_rule(item['rule'])['type'] == 'Repetitive' and get_rule(item['rule'])['rep']['type'] == 'Fx':
+ var = get_rule(item['rule'])['variation'].copy()
if var['type'] != 'Element':
raise Exception("Expecting 'Element'")
item = item.copy()
- item['variation'] = {
- 'type': 'Extended',
- 'items': [{
- 'definition': None,
- 'description': None,
- 'name': 'Subitem',
- 'remark': None,
- 'spare': False,
- 'title': 'Subitem',
- 'variation': var,
+ item['rule'] = {
+ 'type': 'ContextFree',
+ 'value': {
+ 'type': 'Extended',
+ 'items': [{
+ 'definition': None,
+ 'description': None,
+ 'name': 'Subitem',
+ 'remark': None,
+ 'spare': False,
+ 'title': 'Subitem',
+ 'rule': {
+ 'type': 'ContextFree',
+ 'value': var,
+ },
}, None]
+ }
}
handle_item([], item)
tell('')
@@ -577,7 +554,6 @@ def part2(ctx, ref, uap):
"""Generate UAPs"""
tell = lambda s: ctx.tell('insert1', s)
- tell('DIAG_OFF_PEDANTIC')
ut = uap['type']
if ut == 'uap':
@@ -588,7 +564,7 @@ def part2(ctx, ref, uap):
raise Exception('unexpected uap type {}'.format(ut))
for var in variations:
- tell('static const AsterixField *I{}_{}[] = {}'.format(ref, var['name'], '{'))
+ tell('static const AsterixField * const I{}_{}[] = {}'.format(ref, var['name'], '{'))
for i in var['items']:
if i is None:
tell(' &IX_SPARE,')
@@ -597,12 +573,11 @@ def part2(ctx, ref, uap):
tell(' NULL')
tell('};')
- tell('static const AsterixField **I{}[] = {}'.format(ref, '{'))
+ tell('static const AsterixField * const * const I{}[] = {}'.format(ref, '{'))
for var in variations:
tell(' I{}_{},'.format(ref, var['name']))
tell(' NULL')
tell('};')
- tell('DIAG_ON_PEDANTIC')
tell('')
def part3(ctx, specs):
@@ -620,9 +595,7 @@ def part3(ctx, specs):
editions = sorted([val['edition'] for val in lst], key = lambda x: (x['major'], x['minor']), reverse=True)
editions_fmt = [fmt_edition(cat, edition) for edition in editions]
editions_str = ', '.join(['I{:03d}'.format(cat)] + editions_fmt)
- tell('DIAG_OFF_PEDANTIC')
- tell('static const AsterixField ***I{:03d}all[] = {} {} {};'.format(cat, '{', editions_str, '}'))
- tell('DIAG_ON_PEDANTIC')
+ tell('static const AsterixField * const * const * const I{:03d}all[] = {} {} {};'.format(cat, '{', editions_str, '}'))
tell('')
tell('static const enum_val_t I{:03d}_versions[] = {}'.format(cat, '{'))
@@ -646,7 +619,7 @@ def part4(ctx, cats):
tell = lambda s: ctx.tell('insert1', s)
tell_pr = lambda s: ctx.tell('insert3', s)
- tell('static const AsterixField ****categories[] = {')
+ tell('static const AsterixField * const * const * const * const categories[] = {')
for i in range(0, 256):
val = 'I{:03d}all'.format(i) if i in cats else 'NULL'
tell(' {}, /* {:03d} */'.format(val, i))
@@ -683,7 +656,7 @@ def remove_rfs(spec):
catalogue = [] # create new catalogue without RFS
rfs_items = []
for i in spec['catalogue']:
- if i['variation']['type'] == 'Rfs':
+ if get_rule(i['rule'])['type'] == 'Rfs':
rfs_items.append(i['name'])
else:
catalogue.append(i)
@@ -716,7 +689,7 @@ def is_valid(spec):
def check_item(item):
if item['spare']:
return True
- return check_variation(item['variation'])
+ return check_variation(get_rule(item['rule']))
def check_variation(variation):
t = variation['type']
if t == 'Element':
@@ -757,6 +730,7 @@ def main():
# read and json-decode input files
jsons = load_jsons(args.paths)
jsons = [json.loads(i) for i in jsons]
+ jsons = [convert.handle_asterix(i) for i in jsons]
jsons = sorted(jsons, key = lambda x: (x['number'], x['edition']['major'], x['edition']['minor']))
jsons = [spec for spec in jsons if spec['type'] == 'Basic']
jsons = [remove_rfs(spec) for spec in jsons]
@@ -780,13 +754,15 @@ def main():
for spec in jsons:
is_latest = spec['edition'] == latest_editions[spec['number']]
- ctx.tell('insert1', '/* Category {:03d}, edition {}.{} */'.format(spec['number'], spec['edition']['major'], spec['edition']['minor']))
+ ctx.tell('insert1', '/* Category {:03d}, edition {}.{} */'.format(
+ spec['number'], spec['edition']['major'], spec['edition']['minor']))
# handle part1
get_ref = lambda path: reference(spec['number'], spec['edition'], path)
part1(ctx, get_ref, spec['catalogue'])
if is_latest:
- ctx.tell('insert1', '/* Category {:03d}, edition {}.{} (latest) */'.format(spec['number'], spec['edition']['major'], spec['edition']['minor']))
+ ctx.tell('insert1', '/* Category {:03d}, edition {}.{} (latest) */'.format(
+ spec['number'], spec['edition']['major'], spec['edition']['minor']))
get_ref = lambda path: reference(spec['number'], None, path)
part1(ctx, get_ref, spec['catalogue'])
@@ -826,4 +802,3 @@ def main():
if __name__ == '__main__':
main()
-