summaryrefslogtreecommitdiffstats
path: root/_test/lib/test_tokens.py
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-14 20:19:53 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-04-14 20:19:53 +0000
commite7ee850d46d54789979bf0c5244bae1825fb7149 (patch)
tree6e94ed55df9ec749682a3c792ce752d07892b968 /_test/lib/test_tokens.py
parentInitial commit. (diff)
downloadpython-ruyaml-upstream.tar.xz
python-ruyaml-upstream.zip
Adding upstream version 0.91.0.upstream/0.91.0upstream
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r--_test/lib/test_tokens.py93
1 files changed, 93 insertions, 0 deletions
diff --git a/_test/lib/test_tokens.py b/_test/lib/test_tokens.py
new file mode 100644
index 0000000..8c213fd
--- /dev/null
+++ b/_test/lib/test_tokens.py
@@ -0,0 +1,93 @@
+# Skipped because we have no idea where all those fixtures originate
+import pytest
+
+pytestmark = pytest.mark.skip
+
+import pprint
+
+import ruyaml
+
+# Tokens mnemonic:
+# directive: %
+# document_start: ---
+# document_end: ...
+# alias: *
+# anchor: &
+# tag: !
+# scalar _
+# block_sequence_start: [[
+# block_mapping_start: {{
+# block_end: ]}
+# flow_sequence_start: [
+# flow_sequence_end: ]
+# flow_mapping_start: {
+# flow_mapping_end: }
+# entry: ,
+# key: ?
+# value: :
+
+_replaces = {
+ ruyaml.DirectiveToken: '%',
+ ruyaml.DocumentStartToken: '---',
+ ruyaml.DocumentEndToken: '...',
+ ruyaml.AliasToken: '*',
+ ruyaml.AnchorToken: '&',
+ ruyaml.TagToken: '!',
+ ruyaml.ScalarToken: '_',
+ ruyaml.BlockSequenceStartToken: '[[',
+ ruyaml.BlockMappingStartToken: '{{',
+ ruyaml.BlockEndToken: ']}',
+ ruyaml.FlowSequenceStartToken: '[',
+ ruyaml.FlowSequenceEndToken: ']',
+ ruyaml.FlowMappingStartToken: '{',
+ ruyaml.FlowMappingEndToken: '}',
+ ruyaml.BlockEntryToken: ',',
+ ruyaml.FlowEntryToken: ',',
+ ruyaml.KeyToken: '?',
+ ruyaml.ValueToken: ':',
+}
+
+
+def test_tokens(data_filename, tokens_filename, verbose=False):
+ tokens1 = []
+ with open(tokens_filename, 'r') as fp:
+ tokens2 = fp.read().split()
+ try:
+ yaml = ruyaml.YAML(typ='unsafe', pure=True)
+ with open(data_filename, 'rb') as fp1:
+ for token in yaml.scan(fp1):
+ if not isinstance(
+ token, (ruyaml.StreamStartToken, ruyaml.StreamEndToken)
+ ):
+ tokens1.append(_replaces[token.__class__])
+ finally:
+ if verbose:
+ print('TOKENS1:', ' '.join(tokens1))
+ print('TOKENS2:', ' '.join(tokens2))
+ assert len(tokens1) == len(tokens2), (tokens1, tokens2)
+ for token1, token2 in zip(tokens1, tokens2):
+ assert token1 == token2, (token1, token2)
+
+
+test_tokens.unittest = ['.data', '.tokens']
+
+
+def test_scanner(data_filename, canonical_filename, verbose=False):
+ for filename in [data_filename, canonical_filename]:
+ tokens = []
+ try:
+ yaml = ruyaml.YAML(typ='unsafe', pure=False)
+ with open(filename, 'rb') as fp:
+ for token in yaml.scan(fp):
+ tokens.append(token.__class__.__name__)
+ finally:
+ if verbose:
+ pprint.pprint(tokens)
+
+
+test_scanner.unittest = ['.data', '.canonical']
+
+if __name__ == '__main__':
+ import test_appliance
+
+ test_appliance.run(globals())