From e6918187568dbd01842d8d1d2c808ce16a894239 Mon Sep 17 00:00:00 2001 From: Daniel Baumann Date: Sun, 21 Apr 2024 13:54:28 +0200 Subject: Adding upstream version 18.2.2. Signed-off-by: Daniel Baumann --- src/s3select/.github/workflows/clang-tidy.yml | 65 + src/s3select/.github/workflows/cmake.yml | 76 + src/s3select/.gitignore | 2 + src/s3select/.gitmodules | 6 + src/s3select/CMakeLists.txt | 59 + src/s3select/Dockerfile | 23 + src/s3select/LICENSE | 201 + src/s3select/README.md | 53 + src/s3select/TPCDS/ddl/create_tpcds_tables.sql | 651 ++ src/s3select/TPCDS/sample-queries-tpcds/README.md | 4 + src/s3select/TPCDS/sample-queries-tpcds/query1.sql | 25 + .../TPCDS/sample-queries-tpcds/query10.sql | 59 + .../TPCDS/sample-queries-tpcds/query11.sql | 81 + .../TPCDS/sample-queries-tpcds/query12.sql | 34 + .../TPCDS/sample-queries-tpcds/query13.sql | 52 + .../TPCDS/sample-queries-tpcds/query14.sql | 210 + .../TPCDS/sample-queries-tpcds/query15.sql | 20 + .../TPCDS/sample-queries-tpcds/query16.sql | 31 + .../TPCDS/sample-queries-tpcds/query17.sql | 45 + .../TPCDS/sample-queries-tpcds/query18.sql | 34 + .../TPCDS/sample-queries-tpcds/query19.sql | 25 + src/s3select/TPCDS/sample-queries-tpcds/query2.sql | 60 + .../TPCDS/sample-queries-tpcds/query20.sql | 30 + .../TPCDS/sample-queries-tpcds/query21.sql | 30 + .../TPCDS/sample-queries-tpcds/query22.sql | 20 + .../TPCDS/sample-queries-tpcds/query23.sql | 107 + .../TPCDS/sample-queries-tpcds/query24.sql | 107 + .../TPCDS/sample-queries-tpcds/query25.sql | 48 + .../TPCDS/sample-queries-tpcds/query26.sql | 21 + .../TPCDS/sample-queries-tpcds/query27.sql | 23 + .../TPCDS/sample-queries-tpcds/query28.sql | 53 + .../TPCDS/sample-queries-tpcds/query29.sql | 47 + src/s3select/TPCDS/sample-queries-tpcds/query3.sql | 21 + .../TPCDS/sample-queries-tpcds/query30.sql | 31 + .../TPCDS/sample-queries-tpcds/query31.sql | 52 + .../TPCDS/sample-queries-tpcds/query32.sql | 28 + .../TPCDS/sample-queries-tpcds/query33.sql | 75 + .../TPCDS/sample-queries-tpcds/query34.sql | 31 + .../TPCDS/sample-queries-tpcds/query35.sql | 58 + .../TPCDS/sample-queries-tpcds/query36.sql | 30 + .../TPCDS/sample-queries-tpcds/query37.sql | 17 + .../TPCDS/sample-queries-tpcds/query38.sql | 23 + .../TPCDS/sample-queries-tpcds/query39.sql | 54 + src/s3select/TPCDS/sample-queries-tpcds/query4.sql | 116 + .../TPCDS/sample-queries-tpcds/query40.sql | 28 + .../TPCDS/sample-queries-tpcds/query41.sql | 52 + .../TPCDS/sample-queries-tpcds/query42.sql | 22 + .../TPCDS/sample-queries-tpcds/query43.sql | 19 + .../TPCDS/sample-queries-tpcds/query44.sql | 35 + .../TPCDS/sample-queries-tpcds/query45.sql | 20 + .../TPCDS/sample-queries-tpcds/query46.sql | 35 + .../TPCDS/sample-queries-tpcds/query47.sql | 51 + .../TPCDS/sample-queries-tpcds/query48.sql | 67 + .../TPCDS/sample-queries-tpcds/query49.sql | 129 + src/s3select/TPCDS/sample-queries-tpcds/query5.sql | 128 + .../TPCDS/sample-queries-tpcds/query50.sql | 59 + .../TPCDS/sample-queries-tpcds/query51.sql | 45 + .../TPCDS/sample-queries-tpcds/query52.sql | 22 + .../TPCDS/sample-queries-tpcds/query53.sql | 28 + .../TPCDS/sample-queries-tpcds/query54.sql | 56 + .../TPCDS/sample-queries-tpcds/query55.sql | 14 + .../TPCDS/sample-queries-tpcds/query56.sql | 69 + .../TPCDS/sample-queries-tpcds/query57.sql | 48 + .../TPCDS/sample-queries-tpcds/query58.sql | 65 + .../TPCDS/sample-queries-tpcds/query59.sql | 44 + src/s3select/TPCDS/sample-queries-tpcds/query6.sql | 26 + .../TPCDS/sample-queries-tpcds/query60.sql | 78 + .../TPCDS/sample-queries-tpcds/query61.sql | 44 + .../TPCDS/sample-queries-tpcds/query62.sql | 35 + .../TPCDS/sample-queries-tpcds/query63.sql | 29 + .../TPCDS/sample-queries-tpcds/query64.sql | 121 + .../TPCDS/sample-queries-tpcds/query65.sql | 29 + .../TPCDS/sample-queries-tpcds/query66.sql | 220 + .../TPCDS/sample-queries-tpcds/query67.sql | 44 + .../TPCDS/sample-queries-tpcds/query68.sql | 42 + .../TPCDS/sample-queries-tpcds/query69.sql | 47 + src/s3select/TPCDS/sample-queries-tpcds/query7.sql | 21 + .../TPCDS/sample-queries-tpcds/query70.sql | 38 + .../TPCDS/sample-queries-tpcds/query71.sql | 40 + .../TPCDS/sample-queries-tpcds/query72.sql | 29 + .../TPCDS/sample-queries-tpcds/query73.sql | 28 + .../TPCDS/sample-queries-tpcds/query74.sql | 61 + .../TPCDS/sample-queries-tpcds/query75.sql | 70 + .../TPCDS/sample-queries-tpcds/query76.sql | 24 + .../TPCDS/sample-queries-tpcds/query77.sql | 108 + .../TPCDS/sample-queries-tpcds/query78.sql | 58 + .../TPCDS/sample-queries-tpcds/query79.sql | 23 + src/s3select/TPCDS/sample-queries-tpcds/query8.sql | 108 + .../TPCDS/sample-queries-tpcds/query80.sql | 96 + .../TPCDS/sample-queries-tpcds/query81.sql | 31 + .../TPCDS/sample-queries-tpcds/query82.sql | 17 + .../TPCDS/sample-queries-tpcds/query83.sql | 67 + .../TPCDS/sample-queries-tpcds/query84.sql | 21 + .../TPCDS/sample-queries-tpcds/query85.sql | 84 + .../TPCDS/sample-queries-tpcds/query86.sql | 26 + .../TPCDS/sample-queries-tpcds/query87.sql | 23 + .../TPCDS/sample-queries-tpcds/query88.sql | 94 + .../TPCDS/sample-queries-tpcds/query89.sql | 28 + src/s3select/TPCDS/sample-queries-tpcds/query9.sql | 51 + .../TPCDS/sample-queries-tpcds/query90.sql | 22 + .../TPCDS/sample-queries-tpcds/query91.sql | 31 + .../TPCDS/sample-queries-tpcds/query92.sql | 30 + .../TPCDS/sample-queries-tpcds/query93.sql | 18 + .../TPCDS/sample-queries-tpcds/query94.sql | 29 + .../TPCDS/sample-queries-tpcds/query95.sql | 32 + .../TPCDS/sample-queries-tpcds/query96.sql | 16 + .../TPCDS/sample-queries-tpcds/query97.sql | 25 + .../TPCDS/sample-queries-tpcds/query98.sql | 33 + .../TPCDS/sample-queries-tpcds/query99.sql | 35 + src/s3select/TPCDS/tpcds_functions.bash | 40 + src/s3select/container/trino/hms_trino.yaml | 31 + .../container/trino/run_trino_on_ceph.bash | 86 + .../container/trino/trino/catalog/hive.properties | 33 + .../container/trino/trino/config.properties | 5 + src/s3select/container/trino/trino/jvm.config | 19 + src/s3select/container/trino/trino/log.properties | 2 + src/s3select/container/trino/trino/node.properties | 2 + src/s3select/example/CMakeLists.txt | 23 + src/s3select/example/csv_to_parquet.cpp | 417 ++ src/s3select/example/expr_genrator.py | 9 + src/s3select/example/generate_rand_csv.c | 28 + src/s3select/example/parse_csv.py | 12 + src/s3select/example/run_test.bash | 111 + src/s3select/example/s3select_example.cpp | 711 ++ src/s3select/include/csvparser/LICENSE | 28 + src/s3select/include/csvparser/README.md | 275 + src/s3select/include/csvparser/csv.h | 1273 ++++ src/s3select/include/encryption_internal.h | 114 + src/s3select/include/internal_file_decryptor.h | 121 + src/s3select/include/s3select.h | 3153 ++++++++ src/s3select/include/s3select_csv_parser.h | 418 ++ src/s3select/include/s3select_functions.h | 2703 +++++++ src/s3select/include/s3select_json_parser.h | 829 +++ src/s3select/include/s3select_oper.h | 3326 +++++++++ src/s3select/include/s3select_parquet_intrf.h | 2079 ++++++ src/s3select/parquet_mix_types.parquet | Bin 0 -> 683635 bytes src/s3select/rapidjson/.gitattributes | 22 + src/s3select/rapidjson/.gitignore | 29 + src/s3select/rapidjson/.gitmodules | 3 + src/s3select/rapidjson/.travis.yml | 166 + src/s3select/rapidjson/CHANGELOG.md | 158 + src/s3select/rapidjson/CMakeLists.txt | 250 + .../rapidjson/CMakeModules/FindGTestSrc.cmake | 30 + src/s3select/rapidjson/RapidJSON.pc.in | 7 + src/s3select/rapidjson/RapidJSONConfig.cmake.in | 25 + .../rapidjson/RapidJSONConfigVersion.cmake.in | 10 + src/s3select/rapidjson/appveyor.yml | 102 + src/s3select/rapidjson/bin/data/abcde.txt | 1 + src/s3select/rapidjson/bin/data/glossary.json | 22 + src/s3select/rapidjson/bin/data/menu.json | 27 + src/s3select/rapidjson/bin/data/readme.txt | 1 + src/s3select/rapidjson/bin/data/sample.json | 3315 +++++++++ src/s3select/rapidjson/bin/data/webapp.json | 88 + src/s3select/rapidjson/bin/data/widget.json | 26 + src/s3select/rapidjson/bin/draft-04/schema | 150 + src/s3select/rapidjson/bin/encodings/utf16be.json | Bin 0 -> 368 bytes .../rapidjson/bin/encodings/utf16bebom.json | Bin 0 -> 370 bytes src/s3select/rapidjson/bin/encodings/utf16le.json | Bin 0 -> 368 bytes .../rapidjson/bin/encodings/utf16lebom.json | Bin 0 -> 370 bytes src/s3select/rapidjson/bin/encodings/utf32be.json | Bin 0 -> 736 bytes .../rapidjson/bin/encodings/utf32bebom.json | Bin 0 -> 740 bytes src/s3select/rapidjson/bin/encodings/utf32le.json | Bin 0 -> 736 bytes .../rapidjson/bin/encodings/utf32lebom.json | Bin 0 -> 740 bytes src/s3select/rapidjson/bin/encodings/utf8.json | 7 + src/s3select/rapidjson/bin/encodings/utf8bom.json | 7 + src/s3select/rapidjson/bin/jsonchecker/fail1.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail10.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail11.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail12.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail13.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail14.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail15.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail16.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail17.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail18.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail19.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail2.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail20.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail21.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail22.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail23.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail24.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail25.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail26.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail27.json | 2 + src/s3select/rapidjson/bin/jsonchecker/fail28.json | 2 + src/s3select/rapidjson/bin/jsonchecker/fail29.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail3.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail30.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail31.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail32.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail33.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail4.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail5.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail6.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail7.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail8.json | 1 + src/s3select/rapidjson/bin/jsonchecker/fail9.json | 1 + src/s3select/rapidjson/bin/jsonchecker/pass1.json | 58 + src/s3select/rapidjson/bin/jsonchecker/pass2.json | 1 + src/s3select/rapidjson/bin/jsonchecker/pass3.json | 6 + src/s3select/rapidjson/bin/jsonchecker/readme.txt | 3 + src/s3select/rapidjson/bin/jsonschema/.gitignore | 1 + src/s3select/rapidjson/bin/jsonschema/.travis.yml | 4 + src/s3select/rapidjson/bin/jsonschema/LICENSE | 19 + src/s3select/rapidjson/bin/jsonschema/README.md | 148 + .../rapidjson/bin/jsonschema/bin/jsonschema_suite | 283 + .../jsonschema/remotes/folder/folderInteger.json | 3 + .../rapidjson/bin/jsonschema/remotes/integer.json | 3 + .../bin/jsonschema/remotes/subSchemas.json | 8 + .../jsonschema/tests/draft3/additionalItems.json | 82 + .../tests/draft3/additionalProperties.json | 88 + .../bin/jsonschema/tests/draft3/default.json | 49 + .../bin/jsonschema/tests/draft3/dependencies.json | 108 + .../bin/jsonschema/tests/draft3/disallow.json | 80 + .../bin/jsonschema/tests/draft3/divisibleBy.json | 60 + .../bin/jsonschema/tests/draft3/enum.json | 71 + .../bin/jsonschema/tests/draft3/extends.json | 94 + .../bin/jsonschema/tests/draft3/items.json | 46 + .../bin/jsonschema/tests/draft3/maxItems.json | 28 + .../bin/jsonschema/tests/draft3/maxLength.json | 33 + .../bin/jsonschema/tests/draft3/maximum.json | 42 + .../bin/jsonschema/tests/draft3/minItems.json | 28 + .../bin/jsonschema/tests/draft3/minLength.json | 33 + .../bin/jsonschema/tests/draft3/minimum.json | 42 + .../jsonschema/tests/draft3/optional/bignum.json | 107 + .../jsonschema/tests/draft3/optional/format.json | 222 + .../jsonschema/tests/draft3/optional/jsregex.json | 18 + .../draft3/optional/zeroTerminatedFloats.json | 15 + .../bin/jsonschema/tests/draft3/pattern.json | 34 + .../jsonschema/tests/draft3/patternProperties.json | 110 + .../bin/jsonschema/tests/draft3/properties.json | 92 + .../rapidjson/bin/jsonschema/tests/draft3/ref.json | 159 + .../bin/jsonschema/tests/draft3/refRemote.json | 74 + .../bin/jsonschema/tests/draft3/required.json | 53 + .../bin/jsonschema/tests/draft3/type.json | 474 ++ .../bin/jsonschema/tests/draft3/uniqueItems.json | 79 + .../jsonschema/tests/draft4/additionalItems.json | 82 + .../tests/draft4/additionalProperties.json | 88 + .../bin/jsonschema/tests/draft4/allOf.json | 112 + .../bin/jsonschema/tests/draft4/anyOf.json | 68 + .../bin/jsonschema/tests/draft4/default.json | 49 + .../bin/jsonschema/tests/draft4/definitions.json | 32 + .../bin/jsonschema/tests/draft4/dependencies.json | 113 + .../bin/jsonschema/tests/draft4/enum.json | 72 + .../bin/jsonschema/tests/draft4/items.json | 46 + .../bin/jsonschema/tests/draft4/maxItems.json | 28 + .../bin/jsonschema/tests/draft4/maxLength.json | 33 + .../bin/jsonschema/tests/draft4/maxProperties.json | 28 + .../bin/jsonschema/tests/draft4/maximum.json | 42 + .../bin/jsonschema/tests/draft4/minItems.json | 28 + .../bin/jsonschema/tests/draft4/minLength.json | 33 + .../bin/jsonschema/tests/draft4/minProperties.json | 28 + .../bin/jsonschema/tests/draft4/minimum.json | 42 + .../bin/jsonschema/tests/draft4/multipleOf.json | 60 + .../rapidjson/bin/jsonschema/tests/draft4/not.json | 96 + .../bin/jsonschema/tests/draft4/oneOf.json | 68 + .../jsonschema/tests/draft4/optional/bignum.json | 107 + .../jsonschema/tests/draft4/optional/format.json | 148 + .../draft4/optional/zeroTerminatedFloats.json | 15 + .../bin/jsonschema/tests/draft4/pattern.json | 34 + .../jsonschema/tests/draft4/patternProperties.json | 110 + .../bin/jsonschema/tests/draft4/properties.json | 92 + .../rapidjson/bin/jsonschema/tests/draft4/ref.json | 159 + .../bin/jsonschema/tests/draft4/refRemote.json | 74 + .../bin/jsonschema/tests/draft4/required.json | 39 + .../bin/jsonschema/tests/draft4/type.json | 330 + .../bin/jsonschema/tests/draft4/uniqueItems.json | 79 + src/s3select/rapidjson/bin/jsonschema/tox.ini | 8 + src/s3select/rapidjson/bin/types/alotofkeys.json | 502 ++ src/s3select/rapidjson/bin/types/booleans.json | 102 + src/s3select/rapidjson/bin/types/floats.json | 102 + src/s3select/rapidjson/bin/types/guids.json | 102 + src/s3select/rapidjson/bin/types/integers.json | 102 + src/s3select/rapidjson/bin/types/mixed.json | 592 ++ src/s3select/rapidjson/bin/types/nulls.json | 102 + src/s3select/rapidjson/bin/types/paragraphs.json | 102 + src/s3select/rapidjson/bin/types/readme.txt | 1 + .../rapidjson/bin/unittestschema/address.json | 139 + .../bin/unittestschema/allOf_address.json | 7 + .../bin/unittestschema/anyOf_address.json | 7 + .../rapidjson/bin/unittestschema/idandref.json | 69 + .../bin/unittestschema/oneOf_address.json | 7 + src/s3select/rapidjson/contrib/natvis/LICENSE | 45 + src/s3select/rapidjson/contrib/natvis/README.md | 7 + .../rapidjson/contrib/natvis/rapidjson.natvis | 38 + src/s3select/rapidjson/doc/CMakeLists.txt | 27 + src/s3select/rapidjson/doc/Doxyfile.in | 2369 ++++++ src/s3select/rapidjson/doc/Doxyfile.zh-cn.in | 2369 ++++++ .../rapidjson/doc/diagram/architecture.dot | 50 + .../rapidjson/doc/diagram/architecture.png | Bin 0 -> 16569 bytes .../rapidjson/doc/diagram/insituparsing.dot | 65 + .../rapidjson/doc/diagram/insituparsing.png | Bin 0 -> 37281 bytes .../diagram/iterative-parser-states-diagram.dot | 62 + .../diagram/iterative-parser-states-diagram.png | Bin 0 -> 92378 bytes src/s3select/rapidjson/doc/diagram/makefile | 8 + src/s3select/rapidjson/doc/diagram/move1.dot | 47 + src/s3select/rapidjson/doc/diagram/move1.png | Bin 0 -> 16081 bytes src/s3select/rapidjson/doc/diagram/move2.dot | 62 + src/s3select/rapidjson/doc/diagram/move2.png | Bin 0 -> 41517 bytes src/s3select/rapidjson/doc/diagram/move3.dot | 60 + src/s3select/rapidjson/doc/diagram/move3.png | Bin 0 -> 36371 bytes .../rapidjson/doc/diagram/normalparsing.dot | 56 + .../rapidjson/doc/diagram/normalparsing.png | Bin 0 -> 32887 bytes src/s3select/rapidjson/doc/diagram/simpledom.dot | 54 + src/s3select/rapidjson/doc/diagram/simpledom.png | Bin 0 -> 43670 bytes src/s3select/rapidjson/doc/diagram/tutorial.dot | 58 + src/s3select/rapidjson/doc/diagram/tutorial.png | Bin 0 -> 44634 bytes .../rapidjson/doc/diagram/utilityclass.dot | 73 + .../rapidjson/doc/diagram/utilityclass.png | Bin 0 -> 99993 bytes src/s3select/rapidjson/doc/dom.md | 281 + src/s3select/rapidjson/doc/dom.zh-cn.md | 285 + src/s3select/rapidjson/doc/encoding.md | 146 + src/s3select/rapidjson/doc/encoding.zh-cn.md | 152 + src/s3select/rapidjson/doc/faq.md | 289 + src/s3select/rapidjson/doc/faq.zh-cn.md | 290 + src/s3select/rapidjson/doc/features.md | 104 + src/s3select/rapidjson/doc/features.zh-cn.md | 103 + src/s3select/rapidjson/doc/internals.md | 368 + src/s3select/rapidjson/doc/internals.zh-cn.md | 363 + src/s3select/rapidjson/doc/logo/rapidjson.png | Bin 0 -> 5259 bytes src/s3select/rapidjson/doc/logo/rapidjson.svg | 119 + src/s3select/rapidjson/doc/misc/DoxygenLayout.xml | 194 + src/s3select/rapidjson/doc/misc/doxygenextra.css | 274 + src/s3select/rapidjson/doc/misc/footer.html | 11 + src/s3select/rapidjson/doc/misc/header.html | 24 + src/s3select/rapidjson/doc/npm.md | 31 + src/s3select/rapidjson/doc/performance.md | 26 + src/s3select/rapidjson/doc/performance.zh-cn.md | 26 + src/s3select/rapidjson/doc/pointer.md | 234 + src/s3select/rapidjson/doc/pointer.zh-cn.md | 234 + src/s3select/rapidjson/doc/sax.md | 509 ++ src/s3select/rapidjson/doc/sax.zh-cn.md | 487 ++ src/s3select/rapidjson/doc/schema.md | 505 ++ src/s3select/rapidjson/doc/schema.zh-cn.md | 237 + src/s3select/rapidjson/doc/stream.md | 429 ++ src/s3select/rapidjson/doc/stream.zh-cn.md | 429 ++ src/s3select/rapidjson/doc/tutorial.md | 536 ++ src/s3select/rapidjson/doc/tutorial.zh-cn.md | 535 ++ src/s3select/rapidjson/docker/debian/Dockerfile | 8 + src/s3select/rapidjson/example/CMakeLists.txt | 46 + .../rapidjson/example/archiver/archiver.cpp | 292 + src/s3select/rapidjson/example/archiver/archiver.h | 145 + .../rapidjson/example/archiver/archivertest.cpp | 287 + .../rapidjson/example/capitalize/capitalize.cpp | 67 + .../rapidjson/example/condense/condense.cpp | 32 + .../rapidjson/example/filterkey/filterkey.cpp | 135 + .../example/filterkeydom/filterkeydom.cpp | 170 + src/s3select/rapidjson/example/jsonx/jsonx.cpp | 207 + .../example/lookaheadparser/lookaheadparser.cpp | 350 + .../example/messagereader/messagereader.cpp | 105 + .../example/parsebyparts/parsebyparts.cpp | 176 + src/s3select/rapidjson/example/pretty/pretty.cpp | 30 + .../rapidjson/example/prettyauto/prettyauto.cpp | 56 + .../example/schemavalidator/schemavalidator.cpp | 198 + .../rapidjson/example/serialize/serialize.cpp | 173 + .../rapidjson/example/simpledom/simpledom.cpp | 29 + .../example/simplepullreader/simplepullreader.cpp | 53 + .../example/simplereader/simplereader.cpp | 42 + .../example/simplewriter/simplewriter.cpp | 36 + .../rapidjson/example/sortkeys/sortkeys.cpp | 62 + .../rapidjson/example/traverseaspointer.cpp | 39 + .../rapidjson/example/tutorial/tutorial.cpp | 151 + .../rapidjson/include/rapidjson/allocators.h | 692 ++ .../include/rapidjson/cursorstreamwrapper.h | 78 + .../rapidjson/include/rapidjson/document.h | 3028 ++++++++ .../rapidjson/include/rapidjson/encodedstream.h | 299 + .../rapidjson/include/rapidjson/encodings.h | 716 ++ .../rapidjson/include/rapidjson/error/en.h | 122 + .../rapidjson/include/rapidjson/error/error.h | 216 + .../rapidjson/include/rapidjson/filereadstream.h | 99 + .../rapidjson/include/rapidjson/filewritestream.h | 104 + src/s3select/rapidjson/include/rapidjson/fwd.h | 151 + .../include/rapidjson/internal/biginteger.h | 297 + .../rapidjson/include/rapidjson/internal/clzll.h | 71 + .../rapidjson/include/rapidjson/internal/diyfp.h | 261 + .../rapidjson/include/rapidjson/internal/dtoa.h | 249 + .../rapidjson/include/rapidjson/internal/ieee754.h | 78 + .../rapidjson/include/rapidjson/internal/itoa.h | 308 + .../rapidjson/include/rapidjson/internal/meta.h | 186 + .../rapidjson/include/rapidjson/internal/pow10.h | 55 + .../rapidjson/include/rapidjson/internal/regex.h | 739 ++ .../rapidjson/include/rapidjson/internal/stack.h | 232 + .../rapidjson/include/rapidjson/internal/strfunc.h | 83 + .../rapidjson/include/rapidjson/internal/strtod.h | 293 + .../rapidjson/include/rapidjson/internal/swap.h | 46 + .../rapidjson/include/rapidjson/istreamwrapper.h | 128 + .../rapidjson/include/rapidjson/memorybuffer.h | 70 + .../rapidjson/include/rapidjson/memorystream.h | 71 + .../include/rapidjson/msinttypes/inttypes.h | 316 + .../include/rapidjson/msinttypes/stdint.h | 300 + .../rapidjson/include/rapidjson/ostreamwrapper.h | 81 + src/s3select/rapidjson/include/rapidjson/pointer.h | 1482 ++++ .../rapidjson/include/rapidjson/prettywriter.h | 277 + .../rapidjson/include/rapidjson/rapidjson.h | 741 ++ src/s3select/rapidjson/include/rapidjson/reader.h | 2246 ++++++ src/s3select/rapidjson/include/rapidjson/schema.h | 2808 +++++++ src/s3select/rapidjson/include/rapidjson/stream.h | 223 + .../rapidjson/include/rapidjson/stringbuffer.h | 121 + src/s3select/rapidjson/include/rapidjson/uri.h | 481 ++ src/s3select/rapidjson/include/rapidjson/writer.h | 710 ++ src/s3select/rapidjson/include_dirs.js | 2 + src/s3select/rapidjson/library.json | 15 + src/s3select/rapidjson/license.txt | 57 + src/s3select/rapidjson/package.json | 24 + src/s3select/rapidjson/rapidjson.autopkg | 77 + src/s3select/rapidjson/readme.md | 210 + src/s3select/rapidjson/readme.zh-cn.md | 152 + src/s3select/rapidjson/test/CMakeLists.txt | 20 + .../rapidjson/test/perftest/CMakeLists.txt | 28 + src/s3select/rapidjson/test/perftest/misctest.cpp | 974 +++ src/s3select/rapidjson/test/perftest/perftest.cpp | 24 + src/s3select/rapidjson/test/perftest/perftest.h | 186 + .../rapidjson/test/perftest/platformtest.cpp | 166 + .../rapidjson/test/perftest/rapidjsontest.cpp | 564 ++ .../rapidjson/test/perftest/schematest.cpp | 223 + .../rapidjson/test/unittest/CMakeLists.txt | 95 + .../rapidjson/test/unittest/allocatorstest.cpp | 292 + .../rapidjson/test/unittest/bigintegertest.cpp | 138 + src/s3select/rapidjson/test/unittest/clzlltest.cpp | 34 + .../test/unittest/cursorstreamwrappertest.cpp | 115 + .../rapidjson/test/unittest/documenttest.cpp | 674 ++ src/s3select/rapidjson/test/unittest/dtoatest.cpp | 99 + .../rapidjson/test/unittest/encodedstreamtest.cpp | 313 + .../rapidjson/test/unittest/encodingstest.cpp | 451 ++ .../rapidjson/test/unittest/filestreamtest.cpp | 155 + src/s3select/rapidjson/test/unittest/fwdtest.cpp | 230 + .../rapidjson/test/unittest/istreamwrappertest.cpp | 181 + src/s3select/rapidjson/test/unittest/itoatest.cpp | 160 + .../rapidjson/test/unittest/jsoncheckertest.cpp | 143 + .../rapidjson/test/unittest/namespacetest.cpp | 70 + .../rapidjson/test/unittest/ostreamwrappertest.cpp | 92 + .../rapidjson/test/unittest/platformtest.cpp | 40 + .../rapidjson/test/unittest/pointertest.cpp | 1730 +++++ .../rapidjson/test/unittest/prettywritertest.cpp | 373 + .../rapidjson/test/unittest/readertest.cpp | 2370 ++++++ src/s3select/rapidjson/test/unittest/regextest.cpp | 639 ++ .../rapidjson/test/unittest/schematest.cpp | 2952 ++++++++ src/s3select/rapidjson/test/unittest/simdtest.cpp | 219 + .../rapidjson/test/unittest/strfunctest.cpp | 30 + .../rapidjson/test/unittest/stringbuffertest.cpp | 192 + .../rapidjson/test/unittest/strtodtest.cpp | 132 + src/s3select/rapidjson/test/unittest/unittest.cpp | 51 + src/s3select/rapidjson/test/unittest/unittest.h | 143 + src/s3select/rapidjson/test/unittest/uritest.cpp | 718 ++ src/s3select/rapidjson/test/unittest/valuetest.cpp | 1861 +++++ .../rapidjson/test/unittest/writertest.cpp | 598 ++ src/s3select/rapidjson/test/valgrind.supp | 17 + src/s3select/rapidjson/thirdparty/gtest/.gitignore | 40 + .../rapidjson/thirdparty/gtest/.travis.yml | 81 + .../rapidjson/thirdparty/gtest/BUILD.bazel | 175 + .../rapidjson/thirdparty/gtest/CMakeLists.txt | 33 + .../rapidjson/thirdparty/gtest/CONTRIBUTING.md | 160 + src/s3select/rapidjson/thirdparty/gtest/LICENSE | 28 + .../rapidjson/thirdparty/gtest/Makefile.am | 14 + src/s3select/rapidjson/thirdparty/gtest/README.md | 122 + src/s3select/rapidjson/thirdparty/gtest/WORKSPACE | 8 + .../rapidjson/thirdparty/gtest/appveyor.yml | 104 + .../thirdparty/gtest/ci/build-linux-autotools.sh | 44 + .../thirdparty/gtest/ci/build-linux-bazel.sh | 36 + .../rapidjson/thirdparty/gtest/ci/env-linux.sh | 41 + .../rapidjson/thirdparty/gtest/ci/env-osx.sh | 40 + .../thirdparty/gtest/ci/get-nprocessors.sh | 48 + .../rapidjson/thirdparty/gtest/ci/install-linux.sh | 49 + .../rapidjson/thirdparty/gtest/ci/install-osx.sh | 39 + .../rapidjson/thirdparty/gtest/ci/log-config.sh | 51 + .../rapidjson/thirdparty/gtest/ci/travis.sh | 44 + .../rapidjson/thirdparty/gtest/configure.ac | 16 + .../rapidjson/thirdparty/gtest/googlemock/CHANGES | 126 + .../thirdparty/gtest/googlemock/CMakeLists.txt | 242 + .../thirdparty/gtest/googlemock/CONTRIBUTORS | 40 + .../rapidjson/thirdparty/gtest/googlemock/LICENSE | 28 + .../thirdparty/gtest/googlemock/Makefile.am | 224 + .../thirdparty/gtest/googlemock/README.md | 344 + .../thirdparty/gtest/googlemock/build-aux/.keep | 0 .../thirdparty/gtest/googlemock/cmake/gmock.pc.in | 9 + .../gtest/googlemock/cmake/gmock_main.pc.in | 9 + .../thirdparty/gtest/googlemock/configure.ac | 146 + .../thirdparty/gtest/googlemock/docs/CheatSheet.md | 564 ++ .../thirdparty/gtest/googlemock/docs/CookBook.md | 3660 +++++++++ .../thirdparty/gtest/googlemock/docs/DesignDoc.md | 280 + .../gtest/googlemock/docs/Documentation.md | 15 + .../thirdparty/gtest/googlemock/docs/ForDummies.md | 447 ++ .../googlemock/docs/FrequentlyAskedQuestions.md | 627 ++ .../gtest/googlemock/docs/KnownIssues.md | 19 + .../gtest/googlemock/include/gmock/gmock-actions.h | 1262 ++++ .../googlemock/include/gmock/gmock-cardinalities.h | 147 + .../include/gmock/gmock-generated-actions.h | 2571 +++++++ .../include/gmock/gmock-generated-actions.h.pump | 833 +++ .../gmock/gmock-generated-function-mockers.h | 1379 ++++ .../gmock/gmock-generated-function-mockers.h.pump | 347 + .../include/gmock/gmock-generated-matchers.h | 2258 ++++++ .../include/gmock/gmock-generated-matchers.h.pump | 675 ++ .../include/gmock/gmock-generated-nice-strict.h | 458 ++ .../gmock/gmock-generated-nice-strict.h.pump | 178 + .../googlemock/include/gmock/gmock-matchers.h | 5255 +++++++++++++ .../googlemock/include/gmock/gmock-more-actions.h | 246 + .../googlemock/include/gmock/gmock-more-matchers.h | 91 + .../googlemock/include/gmock/gmock-spec-builders.h | 1918 +++++ .../gtest/googlemock/include/gmock/gmock.h | 95 + .../internal/custom/gmock-generated-actions.h | 8 + .../internal/custom/gmock-generated-actions.h.pump | 10 + .../include/gmock/internal/custom/gmock-matchers.h | 38 + .../include/gmock/internal/custom/gmock-port.h | 46 + .../internal/gmock-generated-internal-utils.h | 286 + .../internal/gmock-generated-internal-utils.h.pump | 136 + .../include/gmock/internal/gmock-internal-utils.h | 574 ++ .../googlemock/include/gmock/internal/gmock-port.h | 87 + .../thirdparty/gtest/googlemock/make/Makefile | 101 + .../gtest/googlemock/msvc/2005/gmock.sln | 32 + .../gtest/googlemock/msvc/2005/gmock.vcproj | 191 + .../googlemock/msvc/2005/gmock_config.vsprops | 15 + .../gtest/googlemock/msvc/2005/gmock_main.vcproj | 187 + .../gtest/googlemock/msvc/2005/gmock_test.vcproj | 201 + .../gtest/googlemock/msvc/2010/gmock.sln | 46 + .../gtest/googlemock/msvc/2010/gmock.vcxproj | 145 + .../gtest/googlemock/msvc/2010/gmock_config.props | 19 + .../gtest/googlemock/msvc/2010/gmock_main.vcxproj | 151 + .../gtest/googlemock/msvc/2010/gmock_test.vcxproj | 176 + .../gtest/googlemock/msvc/2015/gmock.sln | 46 + .../gtest/googlemock/msvc/2015/gmock.vcxproj | 145 + .../gtest/googlemock/msvc/2015/gmock_config.props | 19 + .../gtest/googlemock/msvc/2015/gmock_main.vcxproj | 151 + .../gtest/googlemock/msvc/2015/gmock_test.vcxproj | 176 + .../gtest/googlemock/scripts/fuse_gmock_files.py | 240 + .../gtest/googlemock/scripts/generator/LICENSE | 203 + .../gtest/googlemock/scripts/generator/README | 34 + .../googlemock/scripts/generator/README.cppclean | 115 + .../googlemock/scripts/generator/cpp/__init__.py | 0 .../gtest/googlemock/scripts/generator/cpp/ast.py | 1733 +++++ .../scripts/generator/cpp/gmock_class.py | 227 + .../scripts/generator/cpp/gmock_class_test.py | 448 ++ .../googlemock/scripts/generator/cpp/keywords.py | 59 + .../googlemock/scripts/generator/cpp/tokenize.py | 287 + .../googlemock/scripts/generator/cpp/utils.py | 41 + .../googlemock/scripts/generator/gmock_gen.py | 31 + .../gtest/googlemock/scripts/gmock-config.in | 303 + .../gtest/googlemock/scripts/gmock_doctor.py | 640 ++ .../thirdparty/gtest/googlemock/scripts/upload.py | 1387 ++++ .../gtest/googlemock/scripts/upload_gmock.py | 78 + .../thirdparty/gtest/googlemock/src/gmock-all.cc | 47 + .../gtest/googlemock/src/gmock-cardinalities.cc | 156 + .../gtest/googlemock/src/gmock-internal-utils.cc | 204 + .../gtest/googlemock/src/gmock-matchers.cc | 573 ++ .../gtest/googlemock/src/gmock-spec-builders.cc | 883 +++ .../thirdparty/gtest/googlemock/src/gmock.cc | 205 + .../thirdparty/gtest/googlemock/src/gmock_main.cc | 54 + .../thirdparty/gtest/googlemock/test/BUILD.bazel | 123 + .../gtest/googlemock/test/gmock-actions_test.cc | 1575 ++++ .../googlemock/test/gmock-cardinalities_test.cc | 428 ++ .../test/gmock-generated-actions_test.cc | 1230 +++ .../test/gmock-generated-function-mockers_test.cc | 647 ++ .../test/gmock-generated-internal-utils_test.cc | 129 + .../test/gmock-generated-matchers_test.cc | 1341 ++++ .../googlemock/test/gmock-internal-utils_test.cc | 718 ++ .../gtest/googlemock/test/gmock-matchers_test.cc | 6767 +++++++++++++++++ .../googlemock/test/gmock-more-actions_test.cc | 710 ++ .../googlemock/test/gmock-nice-strict_test.cc | 511 ++ .../gtest/googlemock/test/gmock-port_test.cc | 43 + .../googlemock/test/gmock-spec-builders_test.cc | 2771 +++++++ .../gtest/googlemock/test/gmock_all_test.cc | 51 + .../gtest/googlemock/test/gmock_ex_test.cc | 81 + .../gtest/googlemock/test/gmock_leak_test.py | 108 + .../gtest/googlemock/test/gmock_leak_test_.cc | 100 + .../gtest/googlemock/test/gmock_link2_test.cc | 40 + .../gtest/googlemock/test/gmock_link_test.cc | 40 + .../gtest/googlemock/test/gmock_link_test.h | 691 ++ .../gtest/googlemock/test/gmock_output_test.py | 183 + .../gtest/googlemock/test/gmock_output_test_.cc | 310 + .../googlemock/test/gmock_output_test_golden.txt | 317 + .../gtest/googlemock/test/gmock_stress_test.cc | 323 + .../thirdparty/gtest/googlemock/test/gmock_test.cc | 262 + .../gtest/googlemock/test/gmock_test_utils.py | 110 + .../rapidjson/thirdparty/gtest/googletest/CHANGES | 157 + .../thirdparty/gtest/googletest/CMakeLists.txt | 312 + .../thirdparty/gtest/googletest/CONTRIBUTORS | 37 + .../rapidjson/thirdparty/gtest/googletest/LICENSE | 28 + .../thirdparty/gtest/googletest/Makefile.am | 339 + .../thirdparty/gtest/googletest/README.md | 341 + .../thirdparty/gtest/googletest/cmake/gtest.pc.in | 9 + .../gtest/googletest/cmake/gtest_main.pc.in | 10 + .../gtest/googletest/cmake/internal_utils.cmake | 280 + .../gtest/googletest/codegear/gtest.cbproj | 138 + .../gtest/googletest/codegear/gtest.groupproj | 54 + .../gtest/googletest/codegear/gtest_all.cc | 38 + .../gtest/googletest/codegear/gtest_link.cc | 40 + .../gtest/googletest/codegear/gtest_main.cbproj | 82 + .../googletest/codegear/gtest_unittest.cbproj | 88 + .../thirdparty/gtest/googletest/configure.ac | 68 + .../thirdparty/gtest/googletest/docs/Pkgconfig.md | 146 + .../thirdparty/gtest/googletest/docs/PumpManual.md | 177 + .../thirdparty/gtest/googletest/docs/XcodeGuide.md | 93 + .../thirdparty/gtest/googletest/docs/advanced.md | 2416 ++++++ .../thirdparty/gtest/googletest/docs/faq.md | 1092 +++ .../thirdparty/gtest/googletest/docs/primer.md | 536 ++ .../thirdparty/gtest/googletest/docs/samples.md | 14 + .../googletest/include/gtest/gtest-death-test.h | 342 + .../gtest/googletest/include/gtest/gtest-message.h | 249 + .../googletest/include/gtest/gtest-param-test.h | 1438 ++++ .../include/gtest/gtest-param-test.h.pump | 501 ++ .../googletest/include/gtest/gtest-printers.h | 1082 +++ .../gtest/googletest/include/gtest/gtest-spi.h | 231 + .../googletest/include/gtest/gtest-test-part.h | 179 + .../googletest/include/gtest/gtest-typed-test.h | 264 + .../gtest/googletest/include/gtest/gtest.h | 2332 ++++++ .../googletest/include/gtest/gtest_pred_impl.h | 357 + .../gtest/googletest/include/gtest/gtest_prod.h | 61 + .../include/gtest/internal/custom/gtest-port.h | 70 + .../include/gtest/internal/custom/gtest-printers.h | 42 + .../include/gtest/internal/custom/gtest.h | 45 + .../gtest/internal/gtest-death-test-internal.h | 275 + .../include/gtest/internal/gtest-filepath.h | 205 + .../include/gtest/internal/gtest-internal.h | 1277 ++++ .../include/gtest/internal/gtest-linked_ptr.h | 243 + .../gtest/internal/gtest-param-util-generated.h | 5139 +++++++++++++ .../internal/gtest-param-util-generated.h.pump | 279 + .../include/gtest/internal/gtest-param-util.h | 723 ++ .../include/gtest/internal/gtest-port-arch.h | 100 + .../googletest/include/gtest/internal/gtest-port.h | 2687 +++++++ .../include/gtest/internal/gtest-string.h | 167 + .../include/gtest/internal/gtest-tuple.h | 1020 +++ .../include/gtest/internal/gtest-tuple.h.pump | 347 + .../include/gtest/internal/gtest-type-util.h | 3347 +++++++++ .../include/gtest/internal/gtest-type-util.h.pump | 313 + .../thirdparty/gtest/googletest/m4/acx_pthread.m4 | 363 + .../thirdparty/gtest/googletest/m4/gtest.m4 | 74 + .../thirdparty/gtest/googletest/make/Makefile | 82 + .../gtest/googletest/msvc/2010/gtest-md.sln | 55 + .../gtest/googletest/msvc/2010/gtest-md.vcxproj | 149 + .../googletest/msvc/2010/gtest-md.vcxproj.filters | 18 + .../gtest/googletest/msvc/2010/gtest.sln | 55 + .../gtest/googletest/msvc/2010/gtest.vcxproj | 149 + .../googletest/msvc/2010/gtest.vcxproj.filters | 18 + .../googletest/msvc/2010/gtest_main-md.vcxproj | 154 + .../msvc/2010/gtest_main-md.vcxproj.filters | 18 + .../gtest/googletest/msvc/2010/gtest_main.vcxproj | 162 + .../msvc/2010/gtest_main.vcxproj.filters | 18 + .../msvc/2010/gtest_prod_test-md.vcxproj | 199 + .../msvc/2010/gtest_prod_test-md.vcxproj.filters | 26 + .../googletest/msvc/2010/gtest_prod_test.vcxproj | 191 + .../msvc/2010/gtest_prod_test.vcxproj.filters | 26 + .../googletest/msvc/2010/gtest_unittest-md.vcxproj | 188 + .../msvc/2010/gtest_unittest-md.vcxproj.filters | 18 + .../googletest/msvc/2010/gtest_unittest.vcxproj | 180 + .../msvc/2010/gtest_unittest.vcxproj.filters | 18 + .../gtest/googletest/samples/prime_tables.h | 127 + .../thirdparty/gtest/googletest/samples/sample1.cc | 68 + .../thirdparty/gtest/googletest/samples/sample1.h | 43 + .../gtest/googletest/samples/sample10_unittest.cc | 140 + .../gtest/googletest/samples/sample1_unittest.cc | 154 + .../thirdparty/gtest/googletest/samples/sample2.cc | 56 + .../thirdparty/gtest/googletest/samples/sample2.h | 85 + .../gtest/googletest/samples/sample2_unittest.cc | 110 + .../gtest/googletest/samples/sample3-inl.h | 172 + .../gtest/googletest/samples/sample3_unittest.cc | 152 + .../thirdparty/gtest/googletest/samples/sample4.cc | 46 + .../thirdparty/gtest/googletest/samples/sample4.h | 53 + .../gtest/googletest/samples/sample4_unittest.cc | 49 + .../gtest/googletest/samples/sample5_unittest.cc | 199 + .../gtest/googletest/samples/sample6_unittest.cc | 225 + .../gtest/googletest/samples/sample7_unittest.cc | 118 + .../gtest/googletest/samples/sample8_unittest.cc | 174 + .../gtest/googletest/samples/sample9_unittest.cc | 157 + .../thirdparty/gtest/googletest/scripts/common.py | 83 + .../gtest/googletest/scripts/fuse_gtest_files.py | 253 + .../googletest/scripts/gen_gtest_pred_impl.py | 730 ++ .../gtest/googletest/scripts/gtest-config.in | 274 + .../thirdparty/gtest/googletest/scripts/pump.py | 855 +++ .../gtest/googletest/scripts/release_docs.py | 158 + .../gtest/googletest/scripts/test/Makefile | 59 + .../thirdparty/gtest/googletest/scripts/upload.py | 1387 ++++ .../gtest/googletest/scripts/upload_gtest.py | 78 + .../thirdparty/gtest/googletest/src/gtest-all.cc | 48 + .../gtest/googletest/src/gtest-death-test.cc | 1536 ++++ .../gtest/googletest/src/gtest-filepath.cc | 385 + .../gtest/googletest/src/gtest-internal-inl.h | 1175 +++ .../thirdparty/gtest/googletest/src/gtest-port.cc | 1277 ++++ .../gtest/googletest/src/gtest-printers.cc | 458 ++ .../gtest/googletest/src/gtest-test-part.cc | 102 + .../gtest/googletest/src/gtest-typed-test.cc | 119 + .../thirdparty/gtest/googletest/src/gtest.cc | 5846 +++++++++++++++ .../thirdparty/gtest/googletest/src/gtest_main.cc | 38 + .../thirdparty/gtest/googletest/test/BUILD.bazel | 396 + .../googletest/test/gtest-death-test_ex_test.cc | 93 + .../gtest/googletest/test/gtest-death-test_test.cc | 1424 ++++ .../gtest/googletest/test/gtest-filepath_test.cc | 652 ++ .../gtest/googletest/test/gtest-linked_ptr_test.cc | 154 + .../gtest/googletest/test/gtest-listener_test.cc | 311 + .../gtest/googletest/test/gtest-message_test.cc | 159 + .../gtest/googletest/test/gtest-options_test.cc | 213 + .../googletest/test/gtest-param-test2_test.cc | 61 + .../gtest/googletest/test/gtest-param-test_test.cc | 1110 +++ .../gtest/googletest/test/gtest-param-test_test.h | 53 + .../gtest/googletest/test/gtest-port_test.cc | 1303 ++++ .../gtest/googletest/test/gtest-printers_test.cc | 1737 +++++ .../gtest/googletest/test/gtest-test-part_test.cc | 208 + .../gtest/googletest/test/gtest-tuple_test.cc | 320 + .../googletest/test/gtest-typed-test2_test.cc | 45 + .../gtest/googletest/test/gtest-typed-test_test.cc | 380 + .../gtest/googletest/test/gtest-typed-test_test.h | 66 + .../googletest/test/gtest-unittest-api_test.cc | 341 + .../gtest/googletest/test/gtest_all_test.cc | 47 + .../test/gtest_assert_by_exception_test.cc | 119 + .../test/gtest_break_on_failure_unittest.py | 210 + .../test/gtest_break_on_failure_unittest_.cc | 87 + .../googletest/test/gtest_catch_exceptions_test.py | 235 + .../test/gtest_catch_exceptions_test_.cc | 311 + .../gtest/googletest/test/gtest_color_test.py | 129 + .../gtest/googletest/test/gtest_color_test_.cc | 63 + .../gtest/googletest/test/gtest_env_var_test.py | 119 + .../gtest/googletest/test/gtest_env_var_test_.cc | 124 + .../googletest/test/gtest_environment_test.cc | 189 + .../gtest/googletest/test/gtest_filter_unittest.py | 638 ++ .../googletest/test/gtest_filter_unittest_.cc | 138 + .../gtest/googletest/test/gtest_help_test.py | 172 + .../gtest/googletest/test/gtest_help_test_.cc | 46 + .../googletest/test/gtest_json_outfiles_test.py | 162 + .../googletest/test/gtest_json_output_unittest.py | 611 ++ .../gtest/googletest/test/gtest_json_test_utils.py | 60 + .../googletest/test/gtest_list_tests_unittest.py | 207 + .../googletest/test/gtest_list_tests_unittest_.cc | 157 + .../gtest/googletest/test/gtest_main_unittest.cc | 45 + .../googletest/test/gtest_no_test_unittest.cc | 56 + .../gtest/googletest/test/gtest_output_test.py | 344 + .../gtest/googletest/test/gtest_output_test_.cc | 1067 +++ .../test/gtest_output_test_golden_lin.txt | 781 ++ .../googletest/test/gtest_pred_impl_unittest.cc | 2427 ++++++ .../googletest/test/gtest_premature_exit_test.cc | 127 + .../gtest/googletest/test/gtest_prod_test.cc | 57 + .../gtest/googletest/test/gtest_repeat_test.cc | 236 + .../gtest/googletest/test/gtest_shuffle_test.py | 325 + .../gtest/googletest/test/gtest_shuffle_test_.cc | 103 + .../googletest/test/gtest_sole_header_test.cc | 57 + .../gtest/googletest/test/gtest_stress_test.cc | 250 + .../gtest/googletest/test/gtest_test_utils.py | 318 + .../gtest/googletest/test/gtest_testbridge_test.py | 65 + .../googletest/test/gtest_testbridge_test_.cc | 44 + .../test/gtest_throw_on_failure_ex_test.cc | 92 + .../googletest/test/gtest_throw_on_failure_test.py | 171 + .../test/gtest_throw_on_failure_test_.cc | 72 + .../googletest/test/gtest_uninitialized_test.py | 69 + .../googletest/test/gtest_uninitialized_test_.cc | 43 + .../gtest/googletest/test/gtest_unittest.cc | 7797 ++++++++++++++++++++ .../googletest/test/gtest_xml_outfile1_test_.cc | 48 + .../googletest/test/gtest_xml_outfile2_test_.cc | 48 + .../googletest/test/gtest_xml_outfiles_test.py | 140 + .../googletest/test/gtest_xml_output_unittest.py | 378 + .../googletest/test/gtest_xml_output_unittest_.cc | 179 + .../gtest/googletest/test/gtest_xml_test_utils.py | 196 + .../thirdparty/gtest/googletest/test/production.cc | 36 + .../thirdparty/gtest/googletest/test/production.h | 55 + .../googletest/xcode/Config/DebugProject.xcconfig | 30 + .../xcode/Config/FrameworkTarget.xcconfig | 17 + .../gtest/googletest/xcode/Config/General.xcconfig | 41 + .../xcode/Config/ReleaseProject.xcconfig | 32 + .../xcode/Config/StaticLibraryTarget.xcconfig | 18 + .../googletest/xcode/Config/TestTarget.xcconfig | 8 + .../gtest/googletest/xcode/Resources/Info.plist | 30 + .../xcode/Samples/FrameworkSample/Info.plist | 28 + .../WidgetFramework.xcodeproj/project.pbxproj | 457 ++ .../xcode/Samples/FrameworkSample/runtests.sh | 62 + .../xcode/Samples/FrameworkSample/widget.cc | 63 + .../xcode/Samples/FrameworkSample/widget.h | 59 + .../xcode/Samples/FrameworkSample/widget_test.cc | 68 + .../gtest/googletest/xcode/Scripts/runtests.sh | 65 + .../googletest/xcode/Scripts/versiongenerate.py | 100 + .../xcode/gtest.xcodeproj/project.pbxproj | 1182 +++ src/s3select/rapidjson/travis-doxygen.sh | 128 + src/s3select/s3select-parse-s.png | Bin 0 -> 193829 bytes src/s3select/s3select.rst | 259 + src/s3select/test/CMakeLists.txt | 26 + src/s3select/test/s3select_perf_test.cpp | 488 ++ src/s3select/test/s3select_test.cpp | 3446 +++++++++ src/s3select/test/s3select_test.h | 709 ++ src/s3select/test/s3select_test_json_parser.cpp | 1826 +++++ 775 files changed, 228306 insertions(+) create mode 100644 src/s3select/.github/workflows/clang-tidy.yml create mode 100644 src/s3select/.github/workflows/cmake.yml create mode 100644 src/s3select/.gitignore create mode 100644 src/s3select/.gitmodules create mode 100644 src/s3select/CMakeLists.txt create mode 100644 src/s3select/Dockerfile create mode 100644 src/s3select/LICENSE create mode 100644 src/s3select/README.md create mode 100644 src/s3select/TPCDS/ddl/create_tpcds_tables.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/README.md create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query1.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query10.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query11.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query12.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query13.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query14.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query15.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query16.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query17.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query18.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query19.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query2.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query20.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query21.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query22.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query23.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query24.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query25.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query26.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query27.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query28.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query29.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query3.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query30.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query31.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query32.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query33.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query34.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query35.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query36.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query37.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query38.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query39.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query4.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query40.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query41.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query42.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query43.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query44.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query45.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query46.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query47.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query48.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query49.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query5.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query50.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query51.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query52.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query53.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query54.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query55.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query56.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query57.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query58.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query59.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query6.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query60.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query61.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query62.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query63.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query64.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query65.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query66.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query67.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query68.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query69.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query7.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query70.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query71.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query72.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query73.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query74.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query75.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query76.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query77.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query78.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query79.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query8.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query80.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query81.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query82.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query83.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query84.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query85.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query86.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query87.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query88.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query89.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query9.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query90.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query91.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query92.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query93.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query94.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query95.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query96.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query97.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query98.sql create mode 100644 src/s3select/TPCDS/sample-queries-tpcds/query99.sql create mode 100644 src/s3select/TPCDS/tpcds_functions.bash create mode 100644 src/s3select/container/trino/hms_trino.yaml create mode 100644 src/s3select/container/trino/run_trino_on_ceph.bash create mode 100644 src/s3select/container/trino/trino/catalog/hive.properties create mode 100644 src/s3select/container/trino/trino/config.properties create mode 100644 src/s3select/container/trino/trino/jvm.config create mode 100644 src/s3select/container/trino/trino/log.properties create mode 100644 src/s3select/container/trino/trino/node.properties create mode 100644 src/s3select/example/CMakeLists.txt create mode 100644 src/s3select/example/csv_to_parquet.cpp create mode 100755 src/s3select/example/expr_genrator.py create mode 100644 src/s3select/example/generate_rand_csv.c create mode 100755 src/s3select/example/parse_csv.py create mode 100755 src/s3select/example/run_test.bash create mode 100644 src/s3select/example/s3select_example.cpp create mode 100644 src/s3select/include/csvparser/LICENSE create mode 100644 src/s3select/include/csvparser/README.md create mode 100644 src/s3select/include/csvparser/csv.h create mode 100644 src/s3select/include/encryption_internal.h create mode 100644 src/s3select/include/internal_file_decryptor.h create mode 100644 src/s3select/include/s3select.h create mode 100644 src/s3select/include/s3select_csv_parser.h create mode 100644 src/s3select/include/s3select_functions.h create mode 100644 src/s3select/include/s3select_json_parser.h create mode 100644 src/s3select/include/s3select_oper.h create mode 100644 src/s3select/include/s3select_parquet_intrf.h create mode 100644 src/s3select/parquet_mix_types.parquet create mode 100644 src/s3select/rapidjson/.gitattributes create mode 100644 src/s3select/rapidjson/.gitignore create mode 100644 src/s3select/rapidjson/.gitmodules create mode 100644 src/s3select/rapidjson/.travis.yml create mode 100644 src/s3select/rapidjson/CHANGELOG.md create mode 100644 src/s3select/rapidjson/CMakeLists.txt create mode 100644 src/s3select/rapidjson/CMakeModules/FindGTestSrc.cmake create mode 100644 src/s3select/rapidjson/RapidJSON.pc.in create mode 100644 src/s3select/rapidjson/RapidJSONConfig.cmake.in create mode 100644 src/s3select/rapidjson/RapidJSONConfigVersion.cmake.in create mode 100644 src/s3select/rapidjson/appveyor.yml create mode 100644 src/s3select/rapidjson/bin/data/abcde.txt create mode 100644 src/s3select/rapidjson/bin/data/glossary.json create mode 100644 src/s3select/rapidjson/bin/data/menu.json create mode 100644 src/s3select/rapidjson/bin/data/readme.txt create mode 100644 src/s3select/rapidjson/bin/data/sample.json create mode 100644 src/s3select/rapidjson/bin/data/webapp.json create mode 100644 src/s3select/rapidjson/bin/data/widget.json create mode 100644 src/s3select/rapidjson/bin/draft-04/schema create mode 100644 src/s3select/rapidjson/bin/encodings/utf16be.json create mode 100644 src/s3select/rapidjson/bin/encodings/utf16bebom.json create mode 100644 src/s3select/rapidjson/bin/encodings/utf16le.json create mode 100644 src/s3select/rapidjson/bin/encodings/utf16lebom.json create mode 100644 src/s3select/rapidjson/bin/encodings/utf32be.json create mode 100644 src/s3select/rapidjson/bin/encodings/utf32bebom.json create mode 100644 src/s3select/rapidjson/bin/encodings/utf32le.json create mode 100644 src/s3select/rapidjson/bin/encodings/utf32lebom.json create mode 100644 src/s3select/rapidjson/bin/encodings/utf8.json create mode 100644 src/s3select/rapidjson/bin/encodings/utf8bom.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail1.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail10.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail11.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail12.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail13.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail14.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail15.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail16.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail17.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail18.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail19.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail2.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail20.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail21.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail22.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail23.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail24.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail25.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail26.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail27.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail28.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail29.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail3.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail30.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail31.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail32.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail33.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail4.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail5.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail6.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail7.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail8.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/fail9.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/pass1.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/pass2.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/pass3.json create mode 100644 src/s3select/rapidjson/bin/jsonchecker/readme.txt create mode 100644 src/s3select/rapidjson/bin/jsonschema/.gitignore create mode 100644 src/s3select/rapidjson/bin/jsonschema/.travis.yml create mode 100644 src/s3select/rapidjson/bin/jsonschema/LICENSE create mode 100644 src/s3select/rapidjson/bin/jsonschema/README.md create mode 100755 src/s3select/rapidjson/bin/jsonschema/bin/jsonschema_suite create mode 100644 src/s3select/rapidjson/bin/jsonschema/remotes/folder/folderInteger.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/remotes/integer.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/remotes/subSchemas.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/additionalItems.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/additionalProperties.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/default.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/dependencies.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/disallow.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/divisibleBy.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/enum.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/extends.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/items.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/maxItems.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/maxLength.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/maximum.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/minItems.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/minLength.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/minimum.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/optional/bignum.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/optional/format.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/optional/jsregex.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/optional/zeroTerminatedFloats.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/pattern.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/patternProperties.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/properties.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/ref.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/refRemote.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/required.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/type.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft3/uniqueItems.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/additionalItems.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/additionalProperties.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/allOf.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/anyOf.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/default.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/definitions.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/dependencies.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/enum.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/items.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/maxItems.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/maxLength.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/maxProperties.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/maximum.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/minItems.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/minLength.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/minProperties.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/minimum.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/multipleOf.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/not.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/oneOf.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/optional/bignum.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/optional/format.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/optional/zeroTerminatedFloats.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/pattern.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/patternProperties.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/properties.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/ref.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/refRemote.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/required.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/type.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tests/draft4/uniqueItems.json create mode 100644 src/s3select/rapidjson/bin/jsonschema/tox.ini create mode 100644 src/s3select/rapidjson/bin/types/alotofkeys.json create mode 100644 src/s3select/rapidjson/bin/types/booleans.json create mode 100644 src/s3select/rapidjson/bin/types/floats.json create mode 100644 src/s3select/rapidjson/bin/types/guids.json create mode 100644 src/s3select/rapidjson/bin/types/integers.json create mode 100644 src/s3select/rapidjson/bin/types/mixed.json create mode 100644 src/s3select/rapidjson/bin/types/nulls.json create mode 100644 src/s3select/rapidjson/bin/types/paragraphs.json create mode 100644 src/s3select/rapidjson/bin/types/readme.txt create mode 100644 src/s3select/rapidjson/bin/unittestschema/address.json create mode 100644 src/s3select/rapidjson/bin/unittestschema/allOf_address.json create mode 100644 src/s3select/rapidjson/bin/unittestschema/anyOf_address.json create mode 100644 src/s3select/rapidjson/bin/unittestschema/idandref.json create mode 100644 src/s3select/rapidjson/bin/unittestschema/oneOf_address.json create mode 100644 src/s3select/rapidjson/contrib/natvis/LICENSE create mode 100644 src/s3select/rapidjson/contrib/natvis/README.md create mode 100644 src/s3select/rapidjson/contrib/natvis/rapidjson.natvis create mode 100644 src/s3select/rapidjson/doc/CMakeLists.txt create mode 100644 src/s3select/rapidjson/doc/Doxyfile.in create mode 100644 src/s3select/rapidjson/doc/Doxyfile.zh-cn.in create mode 100644 src/s3select/rapidjson/doc/diagram/architecture.dot create mode 100644 src/s3select/rapidjson/doc/diagram/architecture.png create mode 100644 src/s3select/rapidjson/doc/diagram/insituparsing.dot create mode 100644 src/s3select/rapidjson/doc/diagram/insituparsing.png create mode 100644 src/s3select/rapidjson/doc/diagram/iterative-parser-states-diagram.dot create mode 100644 src/s3select/rapidjson/doc/diagram/iterative-parser-states-diagram.png create mode 100644 src/s3select/rapidjson/doc/diagram/makefile create mode 100644 src/s3select/rapidjson/doc/diagram/move1.dot create mode 100644 src/s3select/rapidjson/doc/diagram/move1.png create mode 100644 src/s3select/rapidjson/doc/diagram/move2.dot create mode 100644 src/s3select/rapidjson/doc/diagram/move2.png create mode 100644 src/s3select/rapidjson/doc/diagram/move3.dot create mode 100644 src/s3select/rapidjson/doc/diagram/move3.png create mode 100644 src/s3select/rapidjson/doc/diagram/normalparsing.dot create mode 100644 src/s3select/rapidjson/doc/diagram/normalparsing.png create mode 100644 src/s3select/rapidjson/doc/diagram/simpledom.dot create mode 100644 src/s3select/rapidjson/doc/diagram/simpledom.png create mode 100644 src/s3select/rapidjson/doc/diagram/tutorial.dot create mode 100644 src/s3select/rapidjson/doc/diagram/tutorial.png create mode 100644 src/s3select/rapidjson/doc/diagram/utilityclass.dot create mode 100644 src/s3select/rapidjson/doc/diagram/utilityclass.png create mode 100644 src/s3select/rapidjson/doc/dom.md create mode 100644 src/s3select/rapidjson/doc/dom.zh-cn.md create mode 100644 src/s3select/rapidjson/doc/encoding.md create mode 100644 src/s3select/rapidjson/doc/encoding.zh-cn.md create mode 100644 src/s3select/rapidjson/doc/faq.md create mode 100644 src/s3select/rapidjson/doc/faq.zh-cn.md create mode 100644 src/s3select/rapidjson/doc/features.md create mode 100644 src/s3select/rapidjson/doc/features.zh-cn.md create mode 100644 src/s3select/rapidjson/doc/internals.md create mode 100644 src/s3select/rapidjson/doc/internals.zh-cn.md create mode 100644 src/s3select/rapidjson/doc/logo/rapidjson.png create mode 100644 src/s3select/rapidjson/doc/logo/rapidjson.svg create mode 100644 src/s3select/rapidjson/doc/misc/DoxygenLayout.xml create mode 100644 src/s3select/rapidjson/doc/misc/doxygenextra.css create mode 100644 src/s3select/rapidjson/doc/misc/footer.html create mode 100644 src/s3select/rapidjson/doc/misc/header.html create mode 100644 src/s3select/rapidjson/doc/npm.md create mode 100644 src/s3select/rapidjson/doc/performance.md create mode 100644 src/s3select/rapidjson/doc/performance.zh-cn.md create mode 100644 src/s3select/rapidjson/doc/pointer.md create mode 100644 src/s3select/rapidjson/doc/pointer.zh-cn.md create mode 100644 src/s3select/rapidjson/doc/sax.md create mode 100644 src/s3select/rapidjson/doc/sax.zh-cn.md create mode 100644 src/s3select/rapidjson/doc/schema.md create mode 100644 src/s3select/rapidjson/doc/schema.zh-cn.md create mode 100644 src/s3select/rapidjson/doc/stream.md create mode 100644 src/s3select/rapidjson/doc/stream.zh-cn.md create mode 100644 src/s3select/rapidjson/doc/tutorial.md create mode 100644 src/s3select/rapidjson/doc/tutorial.zh-cn.md create mode 100644 src/s3select/rapidjson/docker/debian/Dockerfile create mode 100644 src/s3select/rapidjson/example/CMakeLists.txt create mode 100644 src/s3select/rapidjson/example/archiver/archiver.cpp create mode 100644 src/s3select/rapidjson/example/archiver/archiver.h create mode 100644 src/s3select/rapidjson/example/archiver/archivertest.cpp create mode 100644 src/s3select/rapidjson/example/capitalize/capitalize.cpp create mode 100644 src/s3select/rapidjson/example/condense/condense.cpp create mode 100644 src/s3select/rapidjson/example/filterkey/filterkey.cpp create mode 100644 src/s3select/rapidjson/example/filterkeydom/filterkeydom.cpp create mode 100644 src/s3select/rapidjson/example/jsonx/jsonx.cpp create mode 100644 src/s3select/rapidjson/example/lookaheadparser/lookaheadparser.cpp create mode 100644 src/s3select/rapidjson/example/messagereader/messagereader.cpp create mode 100644 src/s3select/rapidjson/example/parsebyparts/parsebyparts.cpp create mode 100644 src/s3select/rapidjson/example/pretty/pretty.cpp create mode 100644 src/s3select/rapidjson/example/prettyauto/prettyauto.cpp create mode 100644 src/s3select/rapidjson/example/schemavalidator/schemavalidator.cpp create mode 100644 src/s3select/rapidjson/example/serialize/serialize.cpp create mode 100644 src/s3select/rapidjson/example/simpledom/simpledom.cpp create mode 100644 src/s3select/rapidjson/example/simplepullreader/simplepullreader.cpp create mode 100644 src/s3select/rapidjson/example/simplereader/simplereader.cpp create mode 100644 src/s3select/rapidjson/example/simplewriter/simplewriter.cpp create mode 100644 src/s3select/rapidjson/example/sortkeys/sortkeys.cpp create mode 100644 src/s3select/rapidjson/example/traverseaspointer.cpp create mode 100644 src/s3select/rapidjson/example/tutorial/tutorial.cpp create mode 100644 src/s3select/rapidjson/include/rapidjson/allocators.h create mode 100644 src/s3select/rapidjson/include/rapidjson/cursorstreamwrapper.h create mode 100644 src/s3select/rapidjson/include/rapidjson/document.h create mode 100644 src/s3select/rapidjson/include/rapidjson/encodedstream.h create mode 100644 src/s3select/rapidjson/include/rapidjson/encodings.h create mode 100644 src/s3select/rapidjson/include/rapidjson/error/en.h create mode 100644 src/s3select/rapidjson/include/rapidjson/error/error.h create mode 100644 src/s3select/rapidjson/include/rapidjson/filereadstream.h create mode 100644 src/s3select/rapidjson/include/rapidjson/filewritestream.h create mode 100644 src/s3select/rapidjson/include/rapidjson/fwd.h create mode 100644 src/s3select/rapidjson/include/rapidjson/internal/biginteger.h create mode 100644 src/s3select/rapidjson/include/rapidjson/internal/clzll.h create mode 100644 src/s3select/rapidjson/include/rapidjson/internal/diyfp.h create mode 100644 src/s3select/rapidjson/include/rapidjson/internal/dtoa.h create mode 100644 src/s3select/rapidjson/include/rapidjson/internal/ieee754.h create mode 100644 src/s3select/rapidjson/include/rapidjson/internal/itoa.h create mode 100644 src/s3select/rapidjson/include/rapidjson/internal/meta.h create mode 100644 src/s3select/rapidjson/include/rapidjson/internal/pow10.h create mode 100644 src/s3select/rapidjson/include/rapidjson/internal/regex.h create mode 100644 src/s3select/rapidjson/include/rapidjson/internal/stack.h create mode 100644 src/s3select/rapidjson/include/rapidjson/internal/strfunc.h create mode 100644 src/s3select/rapidjson/include/rapidjson/internal/strtod.h create mode 100644 src/s3select/rapidjson/include/rapidjson/internal/swap.h create mode 100644 src/s3select/rapidjson/include/rapidjson/istreamwrapper.h create mode 100644 src/s3select/rapidjson/include/rapidjson/memorybuffer.h create mode 100644 src/s3select/rapidjson/include/rapidjson/memorystream.h create mode 100644 src/s3select/rapidjson/include/rapidjson/msinttypes/inttypes.h create mode 100644 src/s3select/rapidjson/include/rapidjson/msinttypes/stdint.h create mode 100644 src/s3select/rapidjson/include/rapidjson/ostreamwrapper.h create mode 100644 src/s3select/rapidjson/include/rapidjson/pointer.h create mode 100644 src/s3select/rapidjson/include/rapidjson/prettywriter.h create mode 100644 src/s3select/rapidjson/include/rapidjson/rapidjson.h create mode 100644 src/s3select/rapidjson/include/rapidjson/reader.h create mode 100644 src/s3select/rapidjson/include/rapidjson/schema.h create mode 100644 src/s3select/rapidjson/include/rapidjson/stream.h create mode 100644 src/s3select/rapidjson/include/rapidjson/stringbuffer.h create mode 100644 src/s3select/rapidjson/include/rapidjson/uri.h create mode 100644 src/s3select/rapidjson/include/rapidjson/writer.h create mode 100644 src/s3select/rapidjson/include_dirs.js create mode 100644 src/s3select/rapidjson/library.json create mode 100644 src/s3select/rapidjson/license.txt create mode 100644 src/s3select/rapidjson/package.json create mode 100644 src/s3select/rapidjson/rapidjson.autopkg create mode 100644 src/s3select/rapidjson/readme.md create mode 100644 src/s3select/rapidjson/readme.zh-cn.md create mode 100644 src/s3select/rapidjson/test/CMakeLists.txt create mode 100644 src/s3select/rapidjson/test/perftest/CMakeLists.txt create mode 100644 src/s3select/rapidjson/test/perftest/misctest.cpp create mode 100644 src/s3select/rapidjson/test/perftest/perftest.cpp create mode 100644 src/s3select/rapidjson/test/perftest/perftest.h create mode 100644 src/s3select/rapidjson/test/perftest/platformtest.cpp create mode 100644 src/s3select/rapidjson/test/perftest/rapidjsontest.cpp create mode 100644 src/s3select/rapidjson/test/perftest/schematest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/CMakeLists.txt create mode 100644 src/s3select/rapidjson/test/unittest/allocatorstest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/bigintegertest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/clzlltest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/cursorstreamwrappertest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/documenttest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/dtoatest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/encodedstreamtest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/encodingstest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/filestreamtest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/fwdtest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/istreamwrappertest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/itoatest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/jsoncheckertest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/namespacetest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/ostreamwrappertest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/platformtest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/pointertest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/prettywritertest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/readertest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/regextest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/schematest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/simdtest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/strfunctest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/stringbuffertest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/strtodtest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/unittest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/unittest.h create mode 100644 src/s3select/rapidjson/test/unittest/uritest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/valuetest.cpp create mode 100644 src/s3select/rapidjson/test/unittest/writertest.cpp create mode 100644 src/s3select/rapidjson/test/valgrind.supp create mode 100644 src/s3select/rapidjson/thirdparty/gtest/.gitignore create mode 100644 src/s3select/rapidjson/thirdparty/gtest/.travis.yml create mode 100644 src/s3select/rapidjson/thirdparty/gtest/BUILD.bazel create mode 100644 src/s3select/rapidjson/thirdparty/gtest/CMakeLists.txt create mode 100644 src/s3select/rapidjson/thirdparty/gtest/CONTRIBUTING.md create mode 100644 src/s3select/rapidjson/thirdparty/gtest/LICENSE create mode 100644 src/s3select/rapidjson/thirdparty/gtest/Makefile.am create mode 100644 src/s3select/rapidjson/thirdparty/gtest/README.md create mode 100644 src/s3select/rapidjson/thirdparty/gtest/WORKSPACE create mode 100644 src/s3select/rapidjson/thirdparty/gtest/appveyor.yml create mode 100755 src/s3select/rapidjson/thirdparty/gtest/ci/build-linux-autotools.sh create mode 100755 src/s3select/rapidjson/thirdparty/gtest/ci/build-linux-bazel.sh create mode 100755 src/s3select/rapidjson/thirdparty/gtest/ci/env-linux.sh create mode 100755 src/s3select/rapidjson/thirdparty/gtest/ci/env-osx.sh create mode 100755 src/s3select/rapidjson/thirdparty/gtest/ci/get-nprocessors.sh create mode 100755 src/s3select/rapidjson/thirdparty/gtest/ci/install-linux.sh create mode 100755 src/s3select/rapidjson/thirdparty/gtest/ci/install-osx.sh create mode 100755 src/s3select/rapidjson/thirdparty/gtest/ci/log-config.sh create mode 100755 src/s3select/rapidjson/thirdparty/gtest/ci/travis.sh create mode 100644 src/s3select/rapidjson/thirdparty/gtest/configure.ac create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/CHANGES create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/CMakeLists.txt create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/CONTRIBUTORS create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/LICENSE create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/Makefile.am create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/README.md create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/build-aux/.keep create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/cmake/gmock.pc.in create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/cmake/gmock_main.pc.in create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/configure.ac create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/docs/CheatSheet.md create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/docs/CookBook.md create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/docs/DesignDoc.md create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/docs/Documentation.md create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/docs/ForDummies.md create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/docs/FrequentlyAskedQuestions.md create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/docs/KnownIssues.md create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/gmock-actions.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/gmock-cardinalities.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/gmock-generated-actions.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/gmock-generated-actions.h.pump create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/gmock-generated-function-mockers.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/gmock-generated-function-mockers.h.pump create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/gmock-generated-matchers.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/gmock-generated-matchers.h.pump create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/gmock-generated-nice-strict.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/gmock-generated-nice-strict.h.pump create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/gmock-matchers.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/gmock-more-actions.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/gmock-more-matchers.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/gmock-spec-builders.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/gmock.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/internal/custom/gmock-generated-actions.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/internal/custom/gmock-generated-actions.h.pump create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/internal/custom/gmock-matchers.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/internal/custom/gmock-port.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/internal/gmock-generated-internal-utils.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/internal/gmock-generated-internal-utils.h.pump create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/internal/gmock-internal-utils.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/include/gmock/internal/gmock-port.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/make/Makefile create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/msvc/2005/gmock.sln create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/msvc/2005/gmock.vcproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/msvc/2005/gmock_config.vsprops create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/msvc/2005/gmock_main.vcproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/msvc/2005/gmock_test.vcproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/msvc/2010/gmock.sln create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/msvc/2010/gmock.vcxproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/msvc/2010/gmock_config.props create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/msvc/2010/gmock_main.vcxproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/msvc/2010/gmock_test.vcxproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/msvc/2015/gmock.sln create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/msvc/2015/gmock.vcxproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/msvc/2015/gmock_config.props create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/msvc/2015/gmock_main.vcxproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/msvc/2015/gmock_test.vcxproj create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googlemock/scripts/fuse_gmock_files.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/scripts/generator/LICENSE create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/scripts/generator/README create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/scripts/generator/README.cppclean create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googlemock/scripts/generator/cpp/__init__.py create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googlemock/scripts/generator/cpp/ast.py create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googlemock/scripts/generator/cpp/gmock_class.py create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googlemock/scripts/generator/cpp/gmock_class_test.py create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googlemock/scripts/generator/cpp/keywords.py create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googlemock/scripts/generator/cpp/tokenize.py create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googlemock/scripts/generator/cpp/utils.py create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googlemock/scripts/generator/gmock_gen.py create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googlemock/scripts/gmock-config.in create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googlemock/scripts/gmock_doctor.py create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googlemock/scripts/upload.py create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googlemock/scripts/upload_gmock.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/src/gmock-all.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/src/gmock-cardinalities.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/src/gmock-internal-utils.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/src/gmock-matchers.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/src/gmock-spec-builders.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/src/gmock.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/src/gmock_main.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/BUILD.bazel create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock-actions_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock-cardinalities_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock-generated-actions_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock-generated-function-mockers_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock-generated-internal-utils_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock-generated-matchers_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock-internal-utils_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock-matchers_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock-more-actions_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock-nice-strict_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock-port_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock-spec-builders_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock_all_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock_ex_test.cc create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock_leak_test.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock_leak_test_.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock_link2_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock_link_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock_link_test.h create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock_output_test.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock_output_test_.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock_output_test_golden.txt create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock_stress_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock_test.cc create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googlemock/test/gmock_test_utils.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/CHANGES create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/CMakeLists.txt create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/CONTRIBUTORS create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/LICENSE create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/Makefile.am create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/README.md create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/cmake/gtest.pc.in create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/cmake/gtest_main.pc.in create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/cmake/internal_utils.cmake create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/codegear/gtest.cbproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/codegear/gtest.groupproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/codegear/gtest_all.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/codegear/gtest_link.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/codegear/gtest_main.cbproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/codegear/gtest_unittest.cbproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/configure.ac create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/docs/Pkgconfig.md create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/docs/PumpManual.md create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/docs/XcodeGuide.md create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/docs/advanced.md create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/docs/faq.md create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/docs/primer.md create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/docs/samples.md create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/gtest-death-test.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/gtest-message.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/gtest-param-test.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/gtest-param-test.h.pump create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/gtest-printers.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/gtest-spi.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/gtest-test-part.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/gtest-typed-test.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/gtest.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/gtest_pred_impl.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/gtest_prod.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/internal/custom/gtest-port.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/internal/custom/gtest-printers.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/internal/custom/gtest.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/internal/gtest-death-test-internal.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/internal/gtest-filepath.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/internal/gtest-internal.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/internal/gtest-linked_ptr.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/internal/gtest-param-util-generated.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/internal/gtest-param-util-generated.h.pump create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/internal/gtest-param-util.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/internal/gtest-port-arch.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/internal/gtest-port.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/internal/gtest-string.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/internal/gtest-tuple.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/internal/gtest-tuple.h.pump create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/internal/gtest-type-util.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/include/gtest/internal/gtest-type-util.h.pump create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/m4/acx_pthread.m4 create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/m4/gtest.m4 create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/make/Makefile create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/msvc/2010/gtest-md.sln create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/msvc/2010/gtest-md.vcxproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/msvc/2010/gtest-md.vcxproj.filters create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/msvc/2010/gtest.sln create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/msvc/2010/gtest.vcxproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/msvc/2010/gtest.vcxproj.filters create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/msvc/2010/gtest_main-md.vcxproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/msvc/2010/gtest_main-md.vcxproj.filters create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/msvc/2010/gtest_main.vcxproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/msvc/2010/gtest_main.vcxproj.filters create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/msvc/2010/gtest_prod_test-md.vcxproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/msvc/2010/gtest_prod_test-md.vcxproj.filters create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/msvc/2010/gtest_prod_test.vcxproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/msvc/2010/gtest_prod_test.vcxproj.filters create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/msvc/2010/gtest_unittest-md.vcxproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/msvc/2010/gtest_unittest-md.vcxproj.filters create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/msvc/2010/gtest_unittest.vcxproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/msvc/2010/gtest_unittest.vcxproj.filters create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/samples/prime_tables.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/samples/sample1.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/samples/sample1.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/samples/sample10_unittest.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/samples/sample1_unittest.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/samples/sample2.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/samples/sample2.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/samples/sample2_unittest.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/samples/sample3-inl.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/samples/sample3_unittest.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/samples/sample4.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/samples/sample4.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/samples/sample4_unittest.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/samples/sample5_unittest.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/samples/sample6_unittest.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/samples/sample7_unittest.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/samples/sample8_unittest.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/samples/sample9_unittest.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/scripts/common.py create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/scripts/fuse_gtest_files.py create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/scripts/gen_gtest_pred_impl.py create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/scripts/gtest-config.in create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/scripts/pump.py create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/scripts/release_docs.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/scripts/test/Makefile create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/scripts/upload.py create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/scripts/upload_gtest.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/src/gtest-all.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/src/gtest-death-test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/src/gtest-filepath.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/src/gtest-internal-inl.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/src/gtest-port.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/src/gtest-printers.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/src/gtest-test-part.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/src/gtest-typed-test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/src/gtest.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/src/gtest_main.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/BUILD.bazel create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest-death-test_ex_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest-death-test_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest-filepath_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest-linked_ptr_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest-listener_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest-message_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest-options_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest-param-test2_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest-param-test_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest-param-test_test.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest-port_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest-printers_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest-test-part_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest-tuple_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest-typed-test2_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest-typed-test_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest-typed-test_test.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest-unittest-api_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_all_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_assert_by_exception_test.cc create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_break_on_failure_unittest.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_break_on_failure_unittest_.cc create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_catch_exceptions_test.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_catch_exceptions_test_.cc create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_color_test.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_color_test_.cc create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_env_var_test.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_env_var_test_.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_environment_test.cc create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_filter_unittest.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_filter_unittest_.cc create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_help_test.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_help_test_.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_json_outfiles_test.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_json_output_unittest.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_json_test_utils.py create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_list_tests_unittest.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_list_tests_unittest_.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_main_unittest.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_no_test_unittest.cc create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_output_test.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_output_test_.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_output_test_golden_lin.txt create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_pred_impl_unittest.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_premature_exit_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_prod_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_repeat_test.cc create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_shuffle_test.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_shuffle_test_.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_sole_header_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_stress_test.cc create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_test_utils.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_testbridge_test.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_testbridge_test_.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_throw_on_failure_ex_test.cc create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_throw_on_failure_test.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_throw_on_failure_test_.cc create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_uninitialized_test.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_uninitialized_test_.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_unittest.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_xml_outfile1_test_.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_xml_outfile2_test_.cc create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_xml_outfiles_test.py create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_xml_output_unittest.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_xml_output_unittest_.cc create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/test/gtest_xml_test_utils.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/production.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/test/production.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/xcode/Config/DebugProject.xcconfig create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/xcode/Config/FrameworkTarget.xcconfig create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/xcode/Config/General.xcconfig create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/xcode/Config/ReleaseProject.xcconfig create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/xcode/Config/StaticLibraryTarget.xcconfig create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/xcode/Config/TestTarget.xcconfig create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/xcode/Resources/Info.plist create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/xcode/Samples/FrameworkSample/Info.plist create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/xcode/Samples/FrameworkSample/WidgetFramework.xcodeproj/project.pbxproj create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/xcode/Samples/FrameworkSample/runtests.sh create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/xcode/Samples/FrameworkSample/widget.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/xcode/Samples/FrameworkSample/widget.h create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/xcode/Samples/FrameworkSample/widget_test.cc create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/xcode/Scripts/runtests.sh create mode 100755 src/s3select/rapidjson/thirdparty/gtest/googletest/xcode/Scripts/versiongenerate.py create mode 100644 src/s3select/rapidjson/thirdparty/gtest/googletest/xcode/gtest.xcodeproj/project.pbxproj create mode 100755 src/s3select/rapidjson/travis-doxygen.sh create mode 100644 src/s3select/s3select-parse-s.png create mode 100644 src/s3select/s3select.rst create mode 100644 src/s3select/test/CMakeLists.txt create mode 100644 src/s3select/test/s3select_perf_test.cpp create mode 100644 src/s3select/test/s3select_test.cpp create mode 100644 src/s3select/test/s3select_test.h create mode 100644 src/s3select/test/s3select_test_json_parser.cpp (limited to 'src/s3select') diff --git a/src/s3select/.github/workflows/clang-tidy.yml b/src/s3select/.github/workflows/clang-tidy.yml new file mode 100644 index 000000000..c3c95a3e3 --- /dev/null +++ b/src/s3select/.github/workflows/clang-tidy.yml @@ -0,0 +1,65 @@ +name: clang-tidy + +on: [push] + +env: + # Customize the CMake build type here (Release, Debug, RelWithDebInfo, etc.) + BUILD_TYPE: Release + +jobs: + clang-tidy: + runs-on: ubuntu-20.04 + + steps: + - uses: actions/checkout@v2 + - name: Checkout submodules + run: git submodule update --init --recursive + + - name: install-clang-tidy + run: sudo apt-get install -y clang-tidy + + - name: install-boost + run: | + sudo apt-get update + sudo apt-get install -y libboost-all-dev + + - name: fetch-gtest + run: sudo apt-get install -y libgtest-dev + + - name: install arrow + run: | + sudo apt-get update + sudo apt-get install -y -V ca-certificates lsb-release wget + sudo curl -L https://dist.apache.org/repos/dist/dev/arrow/KEYS | sudo apt-key add - + sudo add-apt-repository "deb [arch=amd64] https://apache.jfrog.io/artifactory/arrow/ubuntu focal main" + sudo apt-get update + sudo apt-get install -y -V libarrow-dev + + - name: install parquet + run: sudo apt-get install -y -V libparquet-dev + + + - name: install-gtest + run: cd /usr/src/gtest && sudo cmake . && sudo make && sudo cp lib/*.a /usr/lib || sudo cp *.a /usr/lib + + - name: install-gperftools + run: sudo apt-get install -y libgoogle-perftools-dev + + - name: Create Build Environment + # Some projects don't allow in-source building, so create a separate build directory + # We'll use this as our working directory for all subsequent commands + run: cmake -E make_directory ${{github.workspace}}/build + + - name: Configure CMake + # Use a bash shell so we can use the same syntax for environment variable + # access regardless of the host operating system + shell: bash + working-directory: ${{github.workspace}}/build + # Note the current convention is to use the -S and -B options here to specify source + # and build directories, but this is only available with CMake 3.13 and higher. + # The CMake binaries on the Github Actions machines are (as of this writing) 3.12 + run: cmake $GITHUB_WORKSPACE -DCMAKE_BUILD_TYPE=$BUILD_TYPE + + - name: run-clang-tidy + run: clang-tidy -p ${{github.workspace}}/build -header-filter=.* -checks=-*,clang-analyzer-*,concurrency-*,cert-*,-cert-err58-cpp,google-explicit-constructor,misc-redundant-expression,readability-braces-around-statements.ShortStatementLines=1,readability-delete-null-pointer,readability-make-member-function-const,cppcoreguidelines-special-member-functions example/s3select_example.cpp + diff --git a/src/s3select/.github/workflows/cmake.yml b/src/s3select/.github/workflows/cmake.yml new file mode 100644 index 000000000..577eb19cc --- /dev/null +++ b/src/s3select/.github/workflows/cmake.yml @@ -0,0 +1,76 @@ +name: CMake + +on: [push] + +env: + # Customize the CMake build type here (Release, Debug, RelWithDebInfo, etc.) + BUILD_TYPE: Release + +jobs: + build: + # The CMake configure and build commands are platform agnostic and should work equally + # well on Windows or Mac. You can convert this to a matrix build if you need + # cross-platform coverage. + # See: https://docs.github.com/en/free-pro-team@latest/actions/learn-github-actions/managing-complex-workflows#using-a-build-matrix + runs-on: ubuntu-20.04 + + steps: + - uses: actions/checkout@v2 + - name: Checkout submodules + run: git submodule update --init --recursive + + - name: install-boost + run: | + sudo apt-get update + sudo apt-get install -y libboost-all-dev + + - name: fetch-gtest + run: sudo apt-get install -y libgtest-dev + + - name: install arrow + run: | + sudo apt-get update + sudo apt-get install -y -V ca-certificates lsb-release wget + sudo curl -L https://dist.apache.org/repos/dist/dev/arrow/KEYS | sudo apt-key add - + sudo add-apt-repository "deb [arch=amd64] https://apache.jfrog.io/artifactory/arrow/ubuntu focal main" + sudo apt-get update + sudo apt-get install -y -V libarrow-dev + + - name: install parquet + run: sudo apt-get install -y -V libparquet-dev + + + - name: install-gtest + run: cd /usr/src/gtest && sudo cmake . && sudo make && sudo cp lib/*.a /usr/lib || sudo cp *.a /usr/lib + + - name: install-gperftools + run: sudo apt-get install -y libgoogle-perftools-dev + + - name: Create Build Environment + # Some projects don't allow in-source building, so create a separate build directory + # We'll use this as our working directory for all subsequent commands + run: cmake -E make_directory ${{github.workspace}}/build + + - name: Configure CMake + # Use a bash shell so we can use the same syntax for environment variable + # access regardless of the host operating system + shell: bash + working-directory: ${{github.workspace}}/build + # Note the current convention is to use the -S and -B options here to specify source + # and build directories, but this is only available with CMake 3.13 and higher. + # The CMake binaries on the Github Actions machines are (as of this writing) 3.12 + run: cmake $GITHUB_WORKSPACE -DCMAKE_BUILD_TYPE=$BUILD_TYPE + + - name: Build + working-directory: ${{github.workspace}}/build + shell: bash + # Execute the build. You can specify a specific target with "--target " + run: cmake --build . --config $BUILD_TYPE + + - name: Test + working-directory: ${{github.workspace}}/build + shell: bash + # Execute tests defined by the CMake configuration. + # See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail + run: ctest --rerun-failed --output-on-failure -C $BUILD_TYPE + diff --git a/src/s3select/.gitignore b/src/s3select/.gitignore new file mode 100644 index 000000000..d6536badc --- /dev/null +++ b/src/s3select/.gitignore @@ -0,0 +1,2 @@ +build +compile_commands.json diff --git a/src/s3select/.gitmodules b/src/s3select/.gitmodules new file mode 100644 index 000000000..a33a6d70c --- /dev/null +++ b/src/s3select/.gitmodules @@ -0,0 +1,6 @@ +[submodule "rapidjson"] + path = rapidjson + url = https://github.com/Tencent/rapidjson.git +[submodule "include/csvparser"] + path = include/csvparser + url = https://github.com/ben-strasser/fast-cpp-csv-parser.git diff --git a/src/s3select/CMakeLists.txt b/src/s3select/CMakeLists.txt new file mode 100644 index 000000000..25e644c65 --- /dev/null +++ b/src/s3select/CMakeLists.txt @@ -0,0 +1,59 @@ +cmake_minimum_required(VERSION 3.0) + +project(s3select) + +find_package(Arrow QUIET) + +if(Arrow_FOUND) + message( "arrow is installed") + add_definitions(-D_ARROW_EXIST) +endif() + +if(DEFINED ENV{DEBUG}) + set(CMAKE_CXX_FLAGS "-std=gnu++17 -ggdb -Wnon-virtual-dtor -Wreorder -Wunused-variable -Wtype-limits -Wsign-compare -Wmaybe-uninitialized") +else() + set(CMAKE_CXX_FLAGS "-std=gnu++17 -ggdb -Wnon-virtual-dtor -Wreorder -Wunused-variable -Wtype-limits -Wsign-compare -Wmaybe-uninitialized -O3") +endif() + +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) +set(CMAKE_CXX_STANDARD_REQUIRED ON) + +find_package(Boost REQUIRED) +find_package(GTest REQUIRED) + +if(DEFINED ENV{NANO_SEC}) + add_definitions(-DBOOST_DATE_TIME_POSIX_TIME_STD_CONFIG) +endif() + + +set(REGEX_EN) +if(REGEX_EN MATCHES "HS") + find_path(HYPERSCAN_INCLUDE_DIR NAMES hs/hs.h) + find_library(HYPERSCAN_LIB1 NAMES hs) + find_library(HYPERSCAN_LIB2 NAMES hs_runtime) + set(HYPERSCAN_LIB $(HYPERSCAN_LIB1) $(HYPERSCAN_LIB2)) + if(HYPERSCAN_INCLUDE_DIR AND HYPERSCAN_LIB) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -lhs") + add_definitions(-DREGEX_HS) + message(STATUS "Hyperscan is set as regex engine") + else() + message(WARNING "-- Hyperscan not found --") + endif() +elseif(REGEX_EN MATCHES "RE2") + find_path(RE2_INCLUDE_DIR NAMES re2/re2.h) + find_library(RE2_LIB NAMES re2) + if(RE2_INCLUDE_DIR AND RE2_LIB) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -lre2") + add_definitions(-DREGEX_RE2) + message(STATUS "re2 is set as regex engine") + else() + message(WARNING "-- re2 not found --") + endif() +endif() + +enable_testing() + +add_subdirectory(example) +add_subdirectory(test) + + diff --git a/src/s3select/Dockerfile b/src/s3select/Dockerfile new file mode 100644 index 000000000..451035207 --- /dev/null +++ b/src/s3select/Dockerfile @@ -0,0 +1,23 @@ +# Each instruction in this file generates a new layer that gets pushed to your local image cache +# Lines preceeded by # are regarded as comments and ignored +# +# +FROM ubuntu:latest +# +LABEL maintainer="gal.salomon@gmail.com" +# +# Update the image to the latest packages, the image will contain arrow-parquet,boost,and s3select(source). +# this version enables to run s3select queries on parquet-file located on local file-system/ + +RUN apt-get update +RUN apt-get install -y -V ca-certificates lsb-release wget +RUN wget https://apache.bintray.com/arrow/$(lsb_release --id --short | tr 'A-Z' 'a-z')/apache-arrow-archive-keyring-latest-$(lsb_release --codename --short).deb +RUN apt-get install -y -V ./apache-arrow-archive-keyring-latest-$(lsb_release --codename --short).deb +RUN apt-get update +RUN apt-get install -y -V libarrow-dev +RUN apt-get install -y -V libparquet-dev +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends libboost-all-dev libgtest-dev +RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends git cmake g++ make +RUN git clone -b parquet_local_fs_first_phase https://github.com/ceph/s3select +RUN cd /s3select/ && cmake . && make -j4 + diff --git a/src/s3select/LICENSE b/src/s3select/LICENSE new file mode 100644 index 000000000..261eeb9e9 --- /dev/null +++ b/src/s3select/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/src/s3select/README.md b/src/s3select/README.md new file mode 100644 index 000000000..6beb98329 --- /dev/null +++ b/src/s3select/README.md @@ -0,0 +1,53 @@ +# s3select + +
The s3select is another S3 request, that enables the client to push down an SQL statement(according to [spec](https://docs.ceph.com/en/latest/radosgw/s3select/#features-support)) into CEPH storage. +
The s3select is an implementation of a push-down paradigm. +
The push-down paradigm is about moving(“pushing”) the operation close to the data. +
It's contrary to what is commonly done, i.e. moving the data to the “place” of operation. +
In a big-data ecosystem, it makes a big difference. +
In order to execute __“select sum( x + y) from s3object where a + b > c”__ +
It needs to fetch the entire object to the client side, and only then execute the operation with an analytic application, +
With push-down(s3-select) the entire operation is executed on the server side, and only the result is returned to the client side. + + +## Analyzing huge amount of cold/warm data without moving or converting +
The s3-storage is reliable, efficient, cheap, and already contains a huge amount of objects, It contains many CSV, JSON, and Parquet objects, and these objects contain a huge amount of data to analyze. +
An ETL may convert these objects into Parquet and then run queries on these converted objects. +
But it comes with an expensive price, downloading all of these objects close to the analytic application. + +
The s3select-engine that resides on s3-storage can do these jobs for many use cases, saving time and resources. + + +## The s3select engine stands by itself +
The engine resides on a dedicated GitHub repo, and it is also capable to execute SQL statements on standard input or files residing on a local file system. +
Users may clone and build this repo, and execute various SQL statements as CLI. + +## A docker image containing a development environment +An immediate way for a quick start is available using the following container. +That container already contains the cloned repo, enabling code review and modification. + +### Running the s3select container image +`sudo docker run -w /s3select -it galsl/ubunto_arrow_parquet_s3select:dev` + +### Running google test suite, it contains hundreads of queries +`./test/s3select_test` + +### Running SQL statements using CLI on standard input +`./example/s3select_example`, is a small demo app, it lets you run queries on local file or standard input. +for one example, the following runs the engine on standard input. +`seq 1 1000 | ./example/s3select_example -q 'select count(0) from stdin;'` + +#### SQL statement on ps command (standard input) +>`ps -ef | tr -s ' ' | CSV_COLUMN_DELIMETER=' ' CSV_HEADER_INFO= ./example/s3select_example -q 'select PID,CMD from stdin where PPID="1";'` + +#### SQL statement processed by the container, the input-data pipe into the container. +> `seq 1 1000000 | sudo docker run -w /s3select -i galsl/ubunto_arrow_parquet_s3select:dev +bash -c "./example/s3select_example -q 'select count(0) from stdin;'"` +### Running SQL statements using CLI on local file +it possible to run a query on local file, as follows. + +`./example/s3select_example -q 'select count(0) from /full/path/file_name;'` +#### SQL statement processed by the container, the input-data is mapped to container FS. +>`sudo docker run -w /s3select -v /home/gsalomon/work:/work -it galsl/ubunto_arrow_parquet_s3select:dev bash -c "./example/s3select_example -q 'select count(*) from /work/datatime.csv;'"` + + diff --git a/src/s3select/TPCDS/ddl/create_tpcds_tables.sql b/src/s3select/TPCDS/ddl/create_tpcds_tables.sql new file mode 100644 index 000000000..692539b9d --- /dev/null +++ b/src/s3select/TPCDS/ddl/create_tpcds_tables.sql @@ -0,0 +1,651 @@ +-- this DDL can be run from Trino client ( trino --schema XXXX --catalog HHHH -f ) +-- the external_location should be modified according to generated data-set + + +-- Table + +drop table if exists store_sales; +create table store_sales( + ss_sold_date_sk bigint +, ss_sold_time_sk bigint +, ss_item_sk bigint +, ss_customer_sk bigint +, ss_cdemo_sk bigint +, ss_hdemo_sk bigint +, ss_addr_sk bigint +, ss_store_sk bigint +, ss_promo_sk bigint +, ss_ticket_number bigint +, ss_quantity int +, ss_wholesale_cost decimal(7,2) +, ss_list_price decimal(7,2) +, ss_sales_price decimal(7,2) +, ss_ext_discount_amt decimal(7,2) +, ss_ext_sales_price decimal(7,2) +, ss_ext_wholesale_cost decimal(7,2) +, ss_ext_list_price decimal(7,2) +, ss_ext_tax decimal(7,2) +, ss_coupon_amt decimal(7,2) +, ss_net_paid decimal(7,2) +, ss_net_paid_inc_tax decimal(7,2) +, ss_net_profit decimal(7,2) +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/store_sales'); +; + +-- Table + +drop table if exists store_returns; +create table store_returns( + sr_returned_date_sk bigint +, sr_return_time_sk bigint +, sr_item_sk bigint +, sr_customer_sk bigint +, sr_cdemo_sk bigint +, sr_hdemo_sk bigint +, sr_addr_sk bigint +, sr_store_sk bigint +, sr_reason_sk bigint +, sr_ticket_number bigint +, sr_return_quantity int +, sr_return_amt decimal(7,2) +, sr_return_tax decimal(7,2) +, sr_return_amt_inc_tax decimal(7,2) +, sr_fee decimal(7,2) +, sr_return_ship_cost decimal(7,2) +, sr_refunded_cash decimal(7,2) +, sr_reversed_charge decimal(7,2) +, sr_store_credit decimal(7,2) +, sr_net_loss decimal(7,2) +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/store_returns'); +; + +-- Table + +drop table if exists catalog_sales; +create table catalog_sales( + cs_sold_date_sk bigint +, cs_sold_time_sk bigint +, cs_ship_date_sk bigint +, cs_bill_customer_sk bigint +, cs_bill_cdemo_sk bigint +, cs_bill_hdemo_sk bigint +, cs_bill_addr_sk bigint +, cs_ship_customer_sk bigint +, cs_ship_cdemo_sk bigint +, cs_ship_hdemo_sk bigint +, cs_ship_addr_sk bigint +, cs_call_center_sk bigint +, cs_catalog_page_sk bigint +, cs_ship_mode_sk bigint +, cs_warehouse_sk bigint +, cs_item_sk bigint +, cs_promo_sk bigint +, cs_order_number bigint +, cs_quantity int +, cs_wholesale_cost decimal(7,2) +, cs_list_price decimal(7,2) +, cs_sales_price decimal(7,2) +, cs_ext_discount_amt decimal(7,2) +, cs_ext_sales_price decimal(7,2) +, cs_ext_wholesale_cost decimal(7,2) +, cs_ext_list_price decimal(7,2) +, cs_ext_tax decimal(7,2) +, cs_coupon_amt decimal(7,2) +, cs_ext_ship_cost decimal(7,2) +, cs_net_paid decimal(7,2) +, cs_net_paid_inc_tax decimal(7,2) +, cs_net_paid_inc_ship decimal(7,2) +, cs_net_paid_inc_ship_tax decimal(7,2) +, cs_net_profit decimal(7,2) +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/catalog_sales'); +; + +-- Table + +drop table if exists catalog_returns; +create table catalog_returns( + cr_returned_date_sk bigint +, cr_returned_time_sk bigint +, cr_item_sk bigint +, cr_refunded_customer_sk bigint +, cr_refunded_cdemo_sk bigint +, cr_refunded_hdemo_sk bigint +, cr_refunded_addr_sk bigint +, cr_returning_customer_sk bigint +, cr_returning_cdemo_sk bigint +, cr_returning_hdemo_sk bigint +, cr_returning_addr_sk bigint +, cr_call_center_sk bigint +, cr_catalog_page_sk bigint +, cr_ship_mode_sk bigint +, cr_warehouse_sk bigint +, cr_reason_sk bigint +, cr_order_number bigint +, cr_return_quantity int +, cr_return_amount decimal(7,2) +, cr_return_tax decimal(7,2) +, cr_return_amt_inc_tax decimal(7,2) +, cr_fee decimal(7,2) +, cr_return_ship_cost decimal(7,2) +, cr_refunded_cash decimal(7,2) +, cr_reversed_charge decimal(7,2) +, cr_store_credit decimal(7,2) +, cr_net_loss decimal(7,2) +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/catalog_returns'); +; + +-- Table + +drop table if exists web_sales; +create table web_sales( + ws_sold_date_sk bigint +, ws_sold_time_sk bigint +, ws_ship_date_sk bigint +, ws_item_sk bigint +, ws_bill_customer_sk bigint +, ws_bill_cdemo_sk bigint +, ws_bill_hdemo_sk bigint +, ws_bill_addr_sk bigint +, ws_ship_customer_sk bigint +, ws_ship_cdemo_sk bigint +, ws_ship_hdemo_sk bigint +, ws_ship_addr_sk bigint +, ws_web_page_sk bigint +, ws_web_site_sk bigint +, ws_ship_mode_sk bigint +, ws_warehouse_sk bigint +, ws_promo_sk bigint +, ws_order_number bigint +, ws_quantity int +, ws_wholesale_cost decimal(7,2) +, ws_list_price decimal(7,2) +, ws_sales_price decimal(7,2) +, ws_ext_discount_amt decimal(7,2) +, ws_ext_sales_price decimal(7,2) +, ws_ext_wholesale_cost decimal(7,2) +, ws_ext_list_price decimal(7,2) +, ws_ext_tax decimal(7,2) +, ws_coupon_amt decimal(7,2) +, ws_ext_ship_cost decimal(7,2) +, ws_net_paid decimal(7,2) +, ws_net_paid_inc_tax decimal(7,2) +, ws_net_paid_inc_ship decimal(7,2) +, ws_net_paid_inc_ship_tax decimal(7,2) +, ws_net_profit decimal(7,2) +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/web_sales'); +; + +-- Table + +drop table if exists web_returns; +create table web_returns( + wr_returned_date_sk bigint +, wr_returned_time_sk bigint +, wr_item_sk bigint +, wr_refunded_customer_sk bigint +, wr_refunded_cdemo_sk bigint +, wr_refunded_hdemo_sk bigint +, wr_refunded_addr_sk bigint +, wr_returning_customer_sk bigint +, wr_returning_cdemo_sk bigint +, wr_returning_hdemo_sk bigint +, wr_returning_addr_sk bigint +, wr_web_page_sk bigint +, wr_reason_sk bigint +, wr_order_number bigint +, wr_return_quantity int +, wr_return_amt decimal(7,2) +, wr_return_tax decimal(7,2) +, wr_return_amt_inc_tax decimal(7,2) +, wr_fee decimal(7,2) +, wr_return_ship_cost decimal(7,2) +, wr_refunded_cash decimal(7,2) +, wr_reversed_charge decimal(7,2) +, wr_account_credit decimal(7,2) +, wr_net_loss decimal(7,2) +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/web_returns'); +; + +-- Table + +drop table if exists inventory; +create table inventory( + inv_date_sk bigint +, inv_item_sk bigint +, inv_warehouse_sk bigint +, inv_quantity_on_hand int +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/inventory'); + +-- Table + +drop table if exists store; +create table store( + s_store_sk bigint +, s_store_id varchar +, s_rec_start_date date +, s_rec_end_date date +, s_closed_date_sk bigint +, s_store_name varchar +, s_number_employees int +, s_floor_space int +, s_hours varchar +, S_manager varchar +, S_market_id int +, S_geography_class varchar +, S_market_desc varchar +, s_market_manager varchar +, s_division_id int +, s_division_name varchar +, s_company_id int +, s_company_name varchar +, s_street_number varchar +, s_street_name varchar +, s_street_type varchar +, s_suite_number varchar +, s_city varchar +, s_county varchar +, s_state varchar +, s_zip varchar +, s_country varchar +, s_gmt_offset decimal(5,2) +, s_tax_percentage decimal(5,2) +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/store'); + +-- Table + +drop table if exists call_center; +create table call_center( + cc_call_center_sk bigint +, cc_call_center_id varchar +, cc_rec_start_date date +, cc_rec_end_date date +, cc_closed_date_sk bigint +, cc_open_date_sk bigint +, cc_name varchar +, cc_class varchar +, cc_employees int +, cc_sq_ft int +, cc_hours varchar +, cc_manager varchar +, cc_mkt_id int +, cc_mkt_class varchar +, cc_mkt_desc varchar +, cc_market_manager varchar +, cc_division int +, cc_division_name varchar +, cc_company int +, cc_company_name varchar +, cc_street_number varchar +, cc_street_name varchar +, cc_street_type varchar +, cc_suite_number varchar +, cc_city varchar +, cc_county varchar +, cc_state varchar +, cc_zip varchar +, cc_country varchar +, cc_gmt_offset decimal(5,2) +, cc_tax_percentage decimal(5,2) +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/call_center'); + +-- Table + +drop table if exists catalog_page; +create table catalog_page( + cp_catalog_page_sk bigint +, cp_catalog_page_id varchar +, cp_start_date_sk bigint +, cp_end_date_sk bigint +, cp_department varchar +, cp_catalog_number int +, cp_catalog_page_number int +, cp_description varchar +, cp_type varchar +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/catalog_page'); + +-- Table + +drop table if exists web_site; +create table web_site( + web_site_sk bigint +, web_site_id varchar +, web_rec_start_date date +, web_rec_end_date date +, web_name varchar +, web_open_date_sk bigint +, web_close_date_sk bigint +, web_class varchar +, web_manager varchar +, web_mkt_id int +, web_mkt_class varchar +, web_mkt_desc varchar +, web_market_manager varchar +, web_company_id int +, web_company_name varchar +, web_street_number varchar +, web_street_name varchar +, web_street_type varchar +, web_suite_number varchar +, web_city varchar +, web_county varchar +, web_state varchar +, web_zip varchar +, web_country varchar +, web_gmt_offset decimal(5,2) +, web_tax_percentage decimal(5,2) +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/web_site'); + +-- Table + +drop table if exists web_page; +create table web_page( + wp_web_page_sk bigint +, wp_web_page_id varchar +, wp_rec_start_date date +, wp_rec_end_date date +, wp_creation_date_sk bigint +, wp_access_date_sk bigint +, wp_autogen_flag varchar +, wp_customer_sk bigint +, wp_url varchar +, wp_type varchar +, wp_char_count int +, wp_link_count int +, wp_image_count int +, wp_max_ad_count int +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/web_page'); + +-- Table + +drop table if exists warehouse; +create table warehouse( + w_warehouse_sk bigint +, w_warehouse_id varchar +, w_warehouse_name varchar +, w_warehouse_sq_ft int +, w_street_number varchar +, w_street_name varchar +, w_street_type varchar +, w_suite_number varchar +, w_city varchar +, w_county varchar +, w_state varchar +, w_zip varchar +, w_country varchar +, w_gmt_offset decimal(5,2) +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/warehouse'); + +-- Table + +drop table if exists customer; +create table customer( + c_customer_sk bigint +, c_customer_id varchar +, c_current_cdemo_sk bigint +, c_current_hdemo_sk bigint +, c_current_addr_sk bigint +, c_first_shipto_date_sk bigint +, c_first_sales_date_sk bigint +, c_salutation varchar +, c_first_name varchar +, c_last_name varchar +, c_preferred_cust_flag varchar +, c_birth_day int +, c_birth_month int +, c_birth_year int +, c_birth_country varchar +, c_login varchar +, c_email_address varchar +, c_last_review_date_sk bigint +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/customer'); + +-- Table + +drop table if exists customer_address; +create table customer_address( + ca_address_sk bigint +, ca_address_id varchar +, ca_street_number varchar +, ca_street_name varchar +, ca_street_type varchar +, ca_suite_number varchar +, ca_city varchar +, ca_county varchar +, ca_state varchar +, ca_zip varchar +, ca_country varchar +, ca_gmt_offset decimal(5,2) +, ca_location_type varchar +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/customer_address'); + +-- Table + +drop table if exists customer_demographics; +create table customer_demographics( + cd_demo_sk bigint +, cd_gender varchar +, cd_marital_status varchar +, cd_education_status varchar +, cd_purchase_estimate int +, cd_credit_rating varchar +, cd_dep_count int +, cd_dep_employed_count int +, cd_dep_college_count int +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/customer_demographics'); + +-- Table + +drop table if exists date_dim; +create table date_dim( + d_date_sk bigint +, d_date_id varchar +, d_date date +, d_month_seq int +, d_week_seq int +, d_quarter_seq int +, d_year int +, d_dow int +, d_moy int +, d_dom int +, d_qoy int +, d_fy_year int +, d_fy_quarter_seq int +, d_fy_week_seq int +, d_day_name varchar +, d_quarter_name varchar +, d_holiday varchar +, d_weekend varchar +, d_following_holiday varchar +, d_first_dom int +, d_last_dom int +, d_same_day_ly int +, d_same_day_lq int +, d_current_day varchar +, d_current_week varchar +, d_current_month varchar +, d_current_quarter varchar +, d_current_year varchar +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/date_dim'); + +-- Table + +drop table if exists household_demographics; +create table household_demographics( + hd_demo_sk bigint +, hd_income_band_sk bigint +, hd_buy_potential varchar +, hd_dep_count int +, hd_vehicle_count int +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/household_demographics'); + +-- Table + +drop table if exists item; +create table item( + i_item_sk bigint +, i_item_id varchar +, i_rec_start_date date +, i_rec_end_date date +, i_item_desc varchar +, i_current_price decimal(7,2) +, i_wholesale_cost decimal(7,2) +, i_brand_id int +, i_brand varchar +, i_class_id int +, i_class varchar +, i_category_id int +, i_category varchar +, i_manufact_id int +, i_manufact varchar +, i_size varchar +, i_formulation varchar +, i_color varchar +, i_units varchar +, i_container varchar +, i_manager_id int +, i_product_name varchar +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/item'); + +-- Table + +drop table if exists income_band; +create table income_band( + ib_income_band_sk bigint +, ib_lower_bound int +, ib_upper_bound int +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/income_band'); + +-- Table + +drop table if exists promotion; +create table promotion( + p_promo_sk bigint +, p_promo_id varchar +, p_start_date_sk bigint +, p_end_date_sk bigint +, p_item_sk bigint +, p_cost decimal(15,2) +, p_response_target int +, p_promo_name varchar +, p_channel_dmail varchar +, p_channel_email varchar +, p_channel_catalog varchar +, p_channel_tv varchar +, p_channel_radio varchar +, p_channel_press varchar +, p_channel_event varchar +, p_channel_demo varchar +, p_channel_details varchar +, p_purpose varchar +, p_discount_active varchar +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/promotion'); + +-- Table + +drop table if exists reason; +create table reason( + r_reason_sk bigint +, r_reason_id varchar +, r_reason_desc varchar +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/reason'); + +-- Table + +drop table if exists ship_mode; +create table ship_mode( + sm_ship_mode_sk bigint +, sm_ship_mode_id varchar +, sm_type varchar +, sm_code varchar +, sm_carrier varchar +, sm_contract varchar +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/ship_mode'); + +-- Table + +drop table if exists time_dim; +create table time_dim( + t_time_sk bigint +, t_time_id varchar +, t_time int +, t_hour int +, t_minute int +, t_second int +, t_am_pm varchar +, t_shift varchar +, t_sub_shift varchar +, t_meal_time varchar +) +with( format = 'TEXTFILE', +textfile_field_separator = '|', +external_location = 's3a://tpcds2/4/time_dim'); + diff --git a/src/s3select/TPCDS/sample-queries-tpcds/README.md b/src/s3select/TPCDS/sample-queries-tpcds/README.md new file mode 100644 index 000000000..3fc71c50e --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/README.md @@ -0,0 +1,4 @@ +Sample TPC-DS Queries +===================== + +This directory contains sample TPC-DS queries you can run once you have generated your data. Queries are compatible with HDP 2.6 and up. diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query1.sql b/src/s3select/TPCDS/sample-queries-tpcds/query1.sql new file mode 100644 index 000000000..c201f7334 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query1.sql @@ -0,0 +1,25 @@ +-- start query 1 in stream 0 using template query1.tpl and seed 2031708268 +with customer_total_return as +(select sr_customer_sk as ctr_customer_sk +,sr_store_sk as ctr_store_sk +,sum(SR_FEE) as ctr_total_return +from store_returns +,date_dim +where sr_returned_date_sk = d_date_sk +and d_year =2000 +group by sr_customer_sk +,sr_store_sk) + select c_customer_id +from customer_total_return ctr1 +,store +,customer +where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 +from customer_total_return ctr2 +where ctr1.ctr_store_sk = ctr2.ctr_store_sk) +and s_store_sk = ctr1.ctr_store_sk +and s_state = 'NM' +and ctr1.ctr_customer_sk = c_customer_sk +order by c_customer_id +limit 100; + +-- end query 1 in stream 0 using template query1.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query10.sql b/src/s3select/TPCDS/sample-queries-tpcds/query10.sql new file mode 100644 index 000000000..c5b4ac247 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query10.sql @@ -0,0 +1,59 @@ +-- start query 1 in stream 0 using template query10.tpl and seed 797269820 +select + cd_gender, + cd_marital_status, + cd_education_status, + count(*) cnt1, + cd_purchase_estimate, + count(*) cnt2, + cd_credit_rating, + count(*) cnt3, + cd_dep_count, + count(*) cnt4, + cd_dep_employed_count, + count(*) cnt5, + cd_dep_college_count, + count(*) cnt6 + from + customer c,customer_address ca,customer_demographics + where + c.c_current_addr_sk = ca.ca_address_sk and + ca_county in ('Fillmore County','McPherson County','Bonneville County','Boone County','Brown County') and + cd_demo_sk = c.c_current_cdemo_sk and + exists (select * + from store_sales,date_dim + where c.c_customer_sk = ss_customer_sk and + ss_sold_date_sk = d_date_sk and + d_year = 2000 and + d_moy between 3 and 3+3) and + (exists (select * + from web_sales,date_dim + where c.c_customer_sk = ws_bill_customer_sk and + ws_sold_date_sk = d_date_sk and + d_year = 2000 and + d_moy between 3 ANd 3+3) or + exists (select * + from catalog_sales,date_dim + where c.c_customer_sk = cs_ship_customer_sk and + cs_sold_date_sk = d_date_sk and + d_year = 2000 and + d_moy between 3 and 3+3)) + group by cd_gender, + cd_marital_status, + cd_education_status, + cd_purchase_estimate, + cd_credit_rating, + cd_dep_count, + cd_dep_employed_count, + cd_dep_college_count + order by cd_gender, + cd_marital_status, + cd_education_status, + cd_purchase_estimate, + cd_credit_rating, + cd_dep_count, + cd_dep_employed_count, + cd_dep_college_count +limit 100; + +-- end query 1 in stream 0 using template query10.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query11.sql b/src/s3select/TPCDS/sample-queries-tpcds/query11.sql new file mode 100644 index 000000000..156d9da5e --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query11.sql @@ -0,0 +1,81 @@ +-- start query 1 in stream 0 using template query11.tpl and seed 1819994127 +with year_total as ( + select c_customer_id customer_id + ,c_first_name customer_first_name + ,c_last_name customer_last_name + ,c_preferred_cust_flag customer_preferred_cust_flag + ,c_birth_country customer_birth_country + ,c_login customer_login + ,c_email_address customer_email_address + ,d_year dyear + ,sum(ss_ext_list_price-ss_ext_discount_amt) year_total + ,'s' sale_type + from customer + ,store_sales + ,date_dim + where c_customer_sk = ss_customer_sk + and ss_sold_date_sk = d_date_sk + group by c_customer_id + ,c_first_name + ,c_last_name + ,c_preferred_cust_flag + ,c_birth_country + ,c_login + ,c_email_address + ,d_year + union all + select c_customer_id customer_id + ,c_first_name customer_first_name + ,c_last_name customer_last_name + ,c_preferred_cust_flag customer_preferred_cust_flag + ,c_birth_country customer_birth_country + ,c_login customer_login + ,c_email_address customer_email_address + ,d_year dyear + ,sum(ws_ext_list_price-ws_ext_discount_amt) year_total + ,'w' sale_type + from customer + ,web_sales + ,date_dim + where c_customer_sk = ws_bill_customer_sk + and ws_sold_date_sk = d_date_sk + group by c_customer_id + ,c_first_name + ,c_last_name + ,c_preferred_cust_flag + ,c_birth_country + ,c_login + ,c_email_address + ,d_year + ) + select + t_s_secyear.customer_id + ,t_s_secyear.customer_first_name + ,t_s_secyear.customer_last_name + ,t_s_secyear.customer_birth_country + from year_total t_s_firstyear + ,year_total t_s_secyear + ,year_total t_w_firstyear + ,year_total t_w_secyear + where t_s_secyear.customer_id = t_s_firstyear.customer_id + and t_s_firstyear.customer_id = t_w_secyear.customer_id + and t_s_firstyear.customer_id = t_w_firstyear.customer_id + and t_s_firstyear.sale_type = 's' + and t_w_firstyear.sale_type = 'w' + and t_s_secyear.sale_type = 's' + and t_w_secyear.sale_type = 'w' + and t_s_firstyear.dyear = 1999 + and t_s_secyear.dyear = 1999+1 + and t_w_firstyear.dyear = 1999 + and t_w_secyear.dyear = 1999+1 + and t_s_firstyear.year_total > 0 + and t_w_firstyear.year_total > 0 + and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else 0.0 end + > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else 0.0 end + order by t_s_secyear.customer_id + ,t_s_secyear.customer_first_name + ,t_s_secyear.customer_last_name + ,t_s_secyear.customer_birth_country +limit 100; + +-- end query 1 in stream 0 using template query11.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query12.sql b/src/s3select/TPCDS/sample-queries-tpcds/query12.sql new file mode 100644 index 000000000..077223c32 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query12.sql @@ -0,0 +1,34 @@ +-- start query 1 in stream 0 using template query12.tpl and seed 345591136 +select i_item_id + ,i_item_desc + ,i_category + ,i_class + ,i_current_price + ,sum(ws_ext_sales_price) as itemrevenue + ,sum(ws_ext_sales_price)*100/sum(sum(ws_ext_sales_price)) over + (partition by i_class) as revenueratio +from + web_sales + ,item + ,date_dim +where + ws_item_sk = i_item_sk + and i_category in ('Electronics', 'Books', 'Women') + and ws_sold_date_sk = d_date_sk + and d_date between cast('1998-01-06' as date) + and (cast('1998-01-06' as date) + interval '30' day) +group by + i_item_id + ,i_item_desc + ,i_category + ,i_class + ,i_current_price +order by + i_category + ,i_class + ,i_item_id + ,i_item_desc + ,revenueratio +limit 100; + +-- end query 1 in stream 0 using template query12.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query13.sql b/src/s3select/TPCDS/sample-queries-tpcds/query13.sql new file mode 100644 index 000000000..cdef84660 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query13.sql @@ -0,0 +1,52 @@ +-- start query 1 in stream 0 using template query13.tpl and seed 622697896 +select avg(ss_quantity) + ,avg(ss_ext_sales_price) + ,avg(ss_ext_wholesale_cost) + ,sum(ss_ext_wholesale_cost) + from store_sales + ,store + ,customer_demographics + ,household_demographics + ,customer_address + ,date_dim + where s_store_sk = ss_store_sk + and ss_sold_date_sk = d_date_sk and d_year = 2001 + and((ss_hdemo_sk=hd_demo_sk + and cd_demo_sk = ss_cdemo_sk + and cd_marital_status = 'U' + and cd_education_status = 'Secondary' + and ss_sales_price between 100.00 and 150.00 + and hd_dep_count = 3 + )or + (ss_hdemo_sk=hd_demo_sk + and cd_demo_sk = ss_cdemo_sk + and cd_marital_status = 'W' + and cd_education_status = 'College' + and ss_sales_price between 50.00 and 100.00 + and hd_dep_count = 1 + ) or + (ss_hdemo_sk=hd_demo_sk + and cd_demo_sk = ss_cdemo_sk + and cd_marital_status = 'D' + and cd_education_status = 'Primary' + and ss_sales_price between 150.00 and 200.00 + and hd_dep_count = 1 + )) + and((ss_addr_sk = ca_address_sk + and ca_country = 'United States' + and ca_state in ('TX', 'OK', 'MI') + and ss_net_profit between 100 and 200 + ) or + (ss_addr_sk = ca_address_sk + and ca_country = 'United States' + and ca_state in ('WA', 'NC', 'OH') + and ss_net_profit between 150 and 300 + ) or + (ss_addr_sk = ca_address_sk + and ca_country = 'United States' + and ca_state in ('MT', 'FL', 'GA') + and ss_net_profit between 50 and 250 + )) +; + +-- end query 1 in stream 0 using template query13.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query14.sql b/src/s3select/TPCDS/sample-queries-tpcds/query14.sql new file mode 100644 index 000000000..923814404 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query14.sql @@ -0,0 +1,210 @@ +-- start query 1 in stream 0 using template query14.tpl and seed 1819994127 +with cross_items as + (select i_item_sk ss_item_sk + from item, + (select iss.i_brand_id brand_id + ,iss.i_class_id class_id + ,iss.i_category_id category_id + from store_sales + ,item iss + ,date_dim d1 + where ss_item_sk = iss.i_item_sk + and ss_sold_date_sk = d1.d_date_sk + and d1.d_year between 2000 AND 2000 + 2 + intersect + select ics.i_brand_id + ,ics.i_class_id + ,ics.i_category_id + from catalog_sales + ,item ics + ,date_dim d2 + where cs_item_sk = ics.i_item_sk + and cs_sold_date_sk = d2.d_date_sk + and d2.d_year between 2000 AND 2000 + 2 + intersect + select iws.i_brand_id + ,iws.i_class_id + ,iws.i_category_id + from web_sales + ,item iws + ,date_dim d3 + where ws_item_sk = iws.i_item_sk + and ws_sold_date_sk = d3.d_date_sk + and d3.d_year between 2000 AND 2000 + 2) x + where i_brand_id = brand_id + and i_class_id = class_id + and i_category_id = category_id +), + avg_sales as + (select avg(quantity*list_price) average_sales + from (select ss_quantity quantity + ,ss_list_price list_price + from store_sales + ,date_dim + where ss_sold_date_sk = d_date_sk + and d_year between 2000 and 2000 + 2 + union all + select cs_quantity quantity + ,cs_list_price list_price + from catalog_sales + ,date_dim + where cs_sold_date_sk = d_date_sk + and d_year between 2000 and 2000 + 2 + union all + select ws_quantity quantity + ,ws_list_price list_price + from web_sales + ,date_dim + where ws_sold_date_sk = d_date_sk + and d_year between 2000 and 2000 + 2) x) + select channel, i_brand_id,i_class_id,i_category_id,sum(sales), sum(number_sales) + from( + select 'store' channel, i_brand_id,i_class_id + ,i_category_id,sum(ss_quantity*ss_list_price) sales + , count(*) number_sales + from store_sales + ,item + ,date_dim + where ss_item_sk in (select ss_item_sk from cross_items) + and ss_item_sk = i_item_sk + and ss_sold_date_sk = d_date_sk + and d_year = 2000+2 + and d_moy = 11 + group by i_brand_id,i_class_id,i_category_id + having sum(ss_quantity*ss_list_price) > (select average_sales from avg_sales) + union all + select 'catalog' channel, i_brand_id,i_class_id,i_category_id, sum(cs_quantity*cs_list_price) sales, count(*) number_sales + from catalog_sales + ,item + ,date_dim + where cs_item_sk in (select ss_item_sk from cross_items) + and cs_item_sk = i_item_sk + and cs_sold_date_sk = d_date_sk + and d_year = 2000+2 + and d_moy = 11 + group by i_brand_id,i_class_id,i_category_id + having sum(cs_quantity*cs_list_price) > (select average_sales from avg_sales) + union all + select 'web' channel, i_brand_id,i_class_id,i_category_id, sum(ws_quantity*ws_list_price) sales , count(*) number_sales + from web_sales + ,item + ,date_dim + where ws_item_sk in (select ss_item_sk from cross_items) + and ws_item_sk = i_item_sk + and ws_sold_date_sk = d_date_sk + and d_year = 2000+2 + and d_moy = 11 + group by i_brand_id,i_class_id,i_category_id + having sum(ws_quantity*ws_list_price) > (select average_sales from avg_sales) + ) y + group by rollup (channel, i_brand_id,i_class_id,i_category_id) + order by channel,i_brand_id,i_class_id,i_category_id + limit 100; +with cross_items as + (select i_item_sk ss_item_sk + from item, + (select iss.i_brand_id brand_id + ,iss.i_class_id class_id + ,iss.i_category_id category_id + from store_sales + ,item iss + ,date_dim d1 + where ss_item_sk = iss.i_item_sk + and ss_sold_date_sk = d1.d_date_sk + and d1.d_year between 2000 AND 2000 + 2 + intersect + select ics.i_brand_id + ,ics.i_class_id + ,ics.i_category_id + from catalog_sales + ,item ics + ,date_dim d2 + where cs_item_sk = ics.i_item_sk + and cs_sold_date_sk = d2.d_date_sk + and d2.d_year between 2000 AND 2000 + 2 + intersect + select iws.i_brand_id + ,iws.i_class_id + ,iws.i_category_id + from web_sales + ,item iws + ,date_dim d3 + where ws_item_sk = iws.i_item_sk + and ws_sold_date_sk = d3.d_date_sk + and d3.d_year between 2000 AND 2000 + 2) x + where i_brand_id = brand_id + and i_class_id = class_id + and i_category_id = category_id +), + avg_sales as +(select avg(quantity*list_price) average_sales + from (select ss_quantity quantity + ,ss_list_price list_price + from store_sales + ,date_dim + where ss_sold_date_sk = d_date_sk + and d_year between 2000 and 2000 + 2 + union all + select cs_quantity quantity + ,cs_list_price list_price + from catalog_sales + ,date_dim + where cs_sold_date_sk = d_date_sk + and d_year between 2000 and 2000 + 2 + union all + select ws_quantity quantity + ,ws_list_price list_price + from web_sales + ,date_dim + where ws_sold_date_sk = d_date_sk + and d_year between 2000 and 2000 + 2) x) + select this_year.channel ty_channel + ,this_year.i_brand_id ty_brand + ,this_year.i_class_id ty_class + ,this_year.i_category_id ty_category + ,this_year.sales ty_sales + ,this_year.number_sales ty_number_sales + ,last_year.channel ly_channel + ,last_year.i_brand_id ly_brand + ,last_year.i_class_id ly_class + ,last_year.i_category_id ly_category + ,last_year.sales ly_sales + ,last_year.number_sales ly_number_sales + from + (select 'store' channel, i_brand_id,i_class_id,i_category_id + ,sum(ss_quantity*ss_list_price) sales, count(*) number_sales + from store_sales + ,item + ,date_dim + where ss_item_sk in (select ss_item_sk from cross_items) + and ss_item_sk = i_item_sk + and ss_sold_date_sk = d_date_sk + and d_week_seq = (select d_week_seq + from date_dim + where d_year = 2000 + 1 + and d_moy = 12 + and d_dom = 15) + group by i_brand_id,i_class_id,i_category_id + having sum(ss_quantity*ss_list_price) > (select average_sales from avg_sales)) this_year, + (select 'store' channel, i_brand_id,i_class_id + ,i_category_id, sum(ss_quantity*ss_list_price) sales, count(*) number_sales + from store_sales + ,item + ,date_dim + where ss_item_sk in (select ss_item_sk from cross_items) + and ss_item_sk = i_item_sk + and ss_sold_date_sk = d_date_sk + and d_week_seq = (select d_week_seq + from date_dim + where d_year = 2000 + and d_moy = 12 + and d_dom = 15) + group by i_brand_id,i_class_id,i_category_id + having sum(ss_quantity*ss_list_price) > (select average_sales from avg_sales)) last_year + where this_year.i_brand_id= last_year.i_brand_id + and this_year.i_class_id = last_year.i_class_id + and this_year.i_category_id = last_year.i_category_id + order by this_year.channel, this_year.i_brand_id, this_year.i_class_id, this_year.i_category_id + limit 100; + +-- end query 1 in stream 0 using template query14.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query15.sql b/src/s3select/TPCDS/sample-queries-tpcds/query15.sql new file mode 100644 index 000000000..cb489cf33 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query15.sql @@ -0,0 +1,20 @@ +-- start query 1 in stream 0 using template query15.tpl and seed 1819994127 +select ca_zip + ,sum(cs_sales_price) + from catalog_sales + ,customer + ,customer_address + ,date_dim + where cs_bill_customer_sk = c_customer_sk + and c_current_addr_sk = ca_address_sk + and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', + '85392', '85460', '80348', '81792') + or ca_state in ('CA','WA','GA') + or cs_sales_price > 500) + and cs_sold_date_sk = d_date_sk + and d_qoy = 2 and d_year = 1998 + group by ca_zip + order by ca_zip + limit 100; + +-- end query 1 in stream 0 using template query15.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query16.sql b/src/s3select/TPCDS/sample-queries-tpcds/query16.sql new file mode 100644 index 000000000..f942c731f --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query16.sql @@ -0,0 +1,31 @@ +-- start query 1 in stream 0 using template query16.tpl and seed 171719422 +select + count(distinct cs_order_number) as "order count" + ,sum(cs_ext_ship_cost) as "total shipping cost" + ,sum(cs_net_profit) as "total net profit" +from + catalog_sales cs1 + ,date_dim + ,customer_address + ,call_center +where + d_date between cast('1999-4-01' as date) and + (cast('1999-4-01' as date) + interval '60' day) +and cs1.cs_ship_date_sk = d_date_sk +and cs1.cs_ship_addr_sk = ca_address_sk +and ca_state = 'IL' +and cs1.cs_call_center_sk = cc_call_center_sk +and cc_county in ('Richland County','Bronx County','Maverick County','Mesa County', + 'Raleigh County' +) +and exists (select * + from catalog_sales cs2 + where cs1.cs_order_number = cs2.cs_order_number + and cs1.cs_warehouse_sk <> cs2.cs_warehouse_sk) +and not exists(select * + from catalog_returns cr1 + where cs1.cs_order_number = cr1.cr_order_number) +order by count(distinct cs_order_number) +limit 100; + +-- end query 1 in stream 0 using template query16.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query17.sql b/src/s3select/TPCDS/sample-queries-tpcds/query17.sql new file mode 100644 index 000000000..b369def17 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query17.sql @@ -0,0 +1,45 @@ +-- start query 1 in stream 0 using template query17.tpl and seed 1819994127 +select i_item_id + ,i_item_desc + ,s_state + ,count(ss_quantity) as store_sales_quantitycount + ,avg(ss_quantity) as store_sales_quantityave + ,stddev_samp(ss_quantity) as store_sales_quantitystdev + ,stddev_samp(ss_quantity)/avg(ss_quantity) as store_sales_quantitycov + ,count(sr_return_quantity) as store_returns_quantitycount + ,avg(sr_return_quantity) as store_returns_quantityave + ,stddev_samp(sr_return_quantity) as store_returns_quantitystdev + ,stddev_samp(sr_return_quantity)/avg(sr_return_quantity) as store_returns_quantitycov + ,count(cs_quantity) as catalog_sales_quantitycount ,avg(cs_quantity) as catalog_sales_quantityave + ,stddev_samp(cs_quantity) as catalog_sales_quantitystdev + ,stddev_samp(cs_quantity)/avg(cs_quantity) as catalog_sales_quantitycov + from store_sales + ,store_returns + ,catalog_sales + ,date_dim d1 + ,date_dim d2 + ,date_dim d3 + ,store + ,item + where d1.d_quarter_name = '2000Q1' + and d1.d_date_sk = ss_sold_date_sk + and i_item_sk = ss_item_sk + and s_store_sk = ss_store_sk + and ss_customer_sk = sr_customer_sk + and ss_item_sk = sr_item_sk + and ss_ticket_number = sr_ticket_number + and sr_returned_date_sk = d2.d_date_sk + and d2.d_quarter_name in ('2000Q1','2000Q2','2000Q3') + and sr_customer_sk = cs_bill_customer_sk + and sr_item_sk = cs_item_sk + and cs_sold_date_sk = d3.d_date_sk + and d3.d_quarter_name in ('2000Q1','2000Q2','2000Q3') + group by i_item_id + ,i_item_desc + ,s_state + order by i_item_id + ,i_item_desc + ,s_state +limit 100; + +-- end query 1 in stream 0 using template query17.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query18.sql b/src/s3select/TPCDS/sample-queries-tpcds/query18.sql new file mode 100644 index 000000000..ee3b5b643 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query18.sql @@ -0,0 +1,34 @@ +-- start query 1 in stream 0 using template query18.tpl and seed 1978355063 +select i_item_id, + ca_country, + ca_state, + ca_county, + avg( cast(cs_quantity as decimal(12,2))) agg1, + avg( cast(cs_list_price as decimal(12,2))) agg2, + avg( cast(cs_coupon_amt as decimal(12,2))) agg3, + avg( cast(cs_sales_price as decimal(12,2))) agg4, + avg( cast(cs_net_profit as decimal(12,2))) agg5, + avg( cast(c_birth_year as decimal(12,2))) agg6, + avg( cast(cd1.cd_dep_count as decimal(12,2))) agg7 + from catalog_sales, customer_demographics cd1, + customer_demographics cd2, customer, customer_address, date_dim, item + where cs_sold_date_sk = d_date_sk and + cs_item_sk = i_item_sk and + cs_bill_cdemo_sk = cd1.cd_demo_sk and + cs_bill_customer_sk = c_customer_sk and + cd1.cd_gender = 'M' and + cd1.cd_education_status = 'Unknown' and + c_current_cdemo_sk = cd2.cd_demo_sk and + c_current_addr_sk = ca_address_sk and + c_birth_month in (5,1,4,7,8,9) and + d_year = 2002 and + ca_state in ('AR','TX','NC' + ,'GA','MS','WV','AL') + group by rollup (i_item_id, ca_country, ca_state, ca_county) + order by ca_country, + ca_state, + ca_county, + i_item_id + limit 100; + +-- end query 1 in stream 0 using template query18.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query19.sql b/src/s3select/TPCDS/sample-queries-tpcds/query19.sql new file mode 100644 index 000000000..e4c65411b --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query19.sql @@ -0,0 +1,25 @@ +-- start query 1 in stream 0 using template query19.tpl and seed 1930872976 +select i_brand_id brand_id, i_brand brand, i_manufact_id, i_manufact, + sum(ss_ext_sales_price) ext_price + from date_dim, store_sales, item,customer,customer_address,store + where d_date_sk = ss_sold_date_sk + and ss_item_sk = i_item_sk + and i_manager_id=16 + and d_moy=12 + and d_year=1998 + and ss_customer_sk = c_customer_sk + and c_current_addr_sk = ca_address_sk + and substr(ca_zip,1,5) <> substr(s_zip,1,5) + and ss_store_sk = s_store_sk + group by i_brand + ,i_brand_id + ,i_manufact_id + ,i_manufact + order by ext_price desc + ,i_brand + ,i_brand_id + ,i_manufact_id + ,i_manufact +limit 100 ; + +-- end query 1 in stream 0 using template query19.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query2.sql b/src/s3select/TPCDS/sample-queries-tpcds/query2.sql new file mode 100644 index 000000000..cb6f026d2 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query2.sql @@ -0,0 +1,60 @@ +-- start query 1 in stream 0 using template query2.tpl and seed 1819994127 +with wscs as + (select sold_date_sk + ,sales_price + from (select ws_sold_date_sk sold_date_sk + ,ws_ext_sales_price sales_price + from web_sales) x + union all + (select cs_sold_date_sk sold_date_sk + ,cs_ext_sales_price sales_price + from catalog_sales)), + wswscs as + (select d_week_seq, + sum(case when (d_day_name='Sunday') then sales_price else null end) sun_sales, + sum(case when (d_day_name='Monday') then sales_price else null end) mon_sales, + sum(case when (d_day_name='Tuesday') then sales_price else null end) tue_sales, + sum(case when (d_day_name='Wednesday') then sales_price else null end) wed_sales, + sum(case when (d_day_name='Thursday') then sales_price else null end) thu_sales, + sum(case when (d_day_name='Friday') then sales_price else null end) fri_sales, + sum(case when (d_day_name='Saturday') then sales_price else null end) sat_sales + from wscs + ,date_dim + where d_date_sk = sold_date_sk + group by d_week_seq) + select d_week_seq1 + ,round(sun_sales1/sun_sales2,2) + ,round(mon_sales1/mon_sales2,2) + ,round(tue_sales1/tue_sales2,2) + ,round(wed_sales1/wed_sales2,2) + ,round(thu_sales1/thu_sales2,2) + ,round(fri_sales1/fri_sales2,2) + ,round(sat_sales1/sat_sales2,2) + from + (select wswscs.d_week_seq d_week_seq1 + ,sun_sales sun_sales1 + ,mon_sales mon_sales1 + ,tue_sales tue_sales1 + ,wed_sales wed_sales1 + ,thu_sales thu_sales1 + ,fri_sales fri_sales1 + ,sat_sales sat_sales1 + from wswscs,date_dim + where date_dim.d_week_seq = wswscs.d_week_seq and + d_year = 1998) y, + (select wswscs.d_week_seq d_week_seq2 + ,sun_sales sun_sales2 + ,mon_sales mon_sales2 + ,tue_sales tue_sales2 + ,wed_sales wed_sales2 + ,thu_sales thu_sales2 + ,fri_sales fri_sales2 + ,sat_sales sat_sales2 + from wswscs + ,date_dim + where date_dim.d_week_seq = wswscs.d_week_seq and + d_year = 1998+1) z + where d_week_seq1=d_week_seq2-53 + order by d_week_seq1; + +-- end query 1 in stream 0 using template query2.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query20.sql b/src/s3select/TPCDS/sample-queries-tpcds/query20.sql new file mode 100644 index 000000000..abe7e08a3 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query20.sql @@ -0,0 +1,30 @@ +-- start query 1 in stream 0 using template query20.tpl and seed 345591136 +select i_item_id + ,i_item_desc + ,i_category + ,i_class + ,i_current_price + ,sum(cs_ext_sales_price) as itemrevenue + ,sum(cs_ext_sales_price)*100/sum(sum(cs_ext_sales_price)) over + (partition by i_class) as revenueratio + from catalog_sales + ,item + ,date_dim + where cs_item_sk = i_item_sk + and i_category in ('Shoes', 'Electronics', 'Children') + and cs_sold_date_sk = d_date_sk + and d_date between cast('2001-03-14' as date) + and (cast('2001-03-14' as date) + interval '30' day) + group by i_item_id + ,i_item_desc + ,i_category + ,i_class + ,i_current_price + order by i_category + ,i_class + ,i_item_id + ,i_item_desc + ,revenueratio +limit 100; + +-- end query 1 in stream 0 using template query20.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query21.sql b/src/s3select/TPCDS/sample-queries-tpcds/query21.sql new file mode 100644 index 000000000..2d7a0f900 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query21.sql @@ -0,0 +1,30 @@ +-- start query 1 in stream 0 using template query21.tpl and seed 1819994127 +select * + from(select w_warehouse_name + ,i_item_id + ,sum(case when (cast(d_date as date) < cast ('1999-03-20' as date)) + then inv_quantity_on_hand + else 0 end) as inv_before + ,sum(case when (cast(d_date as date) >= cast ('1999-03-20' as date)) + then inv_quantity_on_hand + else 0 end) as inv_after + from inventory + ,warehouse + ,item + ,date_dim + where i_current_price between 0.99 and 1.49 + and i_item_sk = inv_item_sk + and inv_warehouse_sk = w_warehouse_sk + and inv_date_sk = d_date_sk + and d_date between (cast ('1999-03-20' as date) - interval '30' day) + and (cast ('1999-03-20' as date) + interval '30' day) + group by w_warehouse_name, i_item_id) x + where (case when inv_before > 0 + then inv_after / inv_before + else null + end) between 2.0/3.0 and 3.0/2.0 + order by w_warehouse_name + ,i_item_id + limit 100; + +-- end query 1 in stream 0 using template query21.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query22.sql b/src/s3select/TPCDS/sample-queries-tpcds/query22.sql new file mode 100644 index 000000000..07c4869a4 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query22.sql @@ -0,0 +1,20 @@ +-- start query 1 in stream 0 using template query22.tpl and seed 1819994127 +select i_product_name + ,i_brand + ,i_class + ,i_category + ,avg(inv_quantity_on_hand) qoh + from inventory + ,date_dim + ,item + where inv_date_sk=d_date_sk + and inv_item_sk=i_item_sk + and d_month_seq between 1186 and 1186 + 11 + group by rollup(i_product_name + ,i_brand + ,i_class + ,i_category) +order by qoh, i_product_name, i_brand, i_class, i_category +limit 100; + +-- end query 1 in stream 0 using template query22.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query23.sql b/src/s3select/TPCDS/sample-queries-tpcds/query23.sql new file mode 100644 index 000000000..80526b130 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query23.sql @@ -0,0 +1,107 @@ +-- start query 1 in stream 0 using template query23.tpl and seed 2031708268 +with frequent_ss_items as + (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt + from store_sales + ,date_dim + ,item + where ss_sold_date_sk = d_date_sk + and ss_item_sk = i_item_sk + and d_year in (2000,2000+1,2000+2,2000+3) + group by substr(i_item_desc,1,30),i_item_sk,d_date + having count(*) >4), + max_store_sales as + (select max(csales) tpcds_cmax + from (select c_customer_sk,sum(ss_quantity*ss_sales_price) csales + from store_sales + ,customer + ,date_dim + where ss_customer_sk = c_customer_sk + and ss_sold_date_sk = d_date_sk + and d_year in (2000,2000+1,2000+2,2000+3) + group by c_customer_sk) x), + best_ss_customer as + (select c_customer_sk,sum(ss_quantity*ss_sales_price) ssales + from store_sales + ,customer + where ss_customer_sk = c_customer_sk + group by c_customer_sk + having sum(ss_quantity*ss_sales_price) > (95/100.0) * (select + * +from + max_store_sales)) + select sum(sales) + from (select cs_quantity*cs_list_price sales + from catalog_sales + ,date_dim + where d_year = 2000 + and d_moy = 3 + and cs_sold_date_sk = d_date_sk + and cs_item_sk in (select item_sk from frequent_ss_items) + and cs_bill_customer_sk in (select c_customer_sk from best_ss_customer) + union all + select ws_quantity*ws_list_price sales + from web_sales + ,date_dim + where d_year = 2000 + and d_moy = 3 + and ws_sold_date_sk = d_date_sk + and ws_item_sk in (select item_sk from frequent_ss_items) + and ws_bill_customer_sk in (select c_customer_sk from best_ss_customer)) y + limit 100; +with frequent_ss_items as + (select substr(i_item_desc,1,30) itemdesc,i_item_sk item_sk,d_date solddate,count(*) cnt + from store_sales + ,date_dim + ,item + where ss_sold_date_sk = d_date_sk + and ss_item_sk = i_item_sk + and d_year in (2000,2000 + 1,2000 + 2,2000 + 3) + group by substr(i_item_desc,1,30),i_item_sk,d_date + having count(*) >4), + max_store_sales as + (select max(csales) tpcds_cmax + from (select c_customer_sk,sum(ss_quantity*ss_sales_price) csales + from store_sales + ,customer + ,date_dim + where ss_customer_sk = c_customer_sk + and ss_sold_date_sk = d_date_sk + and d_year in (2000,2000+1,2000+2,2000+3) + group by c_customer_sk) x), + best_ss_customer as + (select c_customer_sk,sum(ss_quantity*ss_sales_price) ssales + from store_sales + ,customer + where ss_customer_sk = c_customer_sk + group by c_customer_sk + having sum(ss_quantity*ss_sales_price) > (95/100.0) * (select + * + from max_store_sales)) + select c_last_name,c_first_name,sales + from (select c_last_name,c_first_name,sum(cs_quantity*cs_list_price) sales + from catalog_sales + ,customer + ,date_dim + where d_year = 2000 + and d_moy = 3 + and cs_sold_date_sk = d_date_sk + and cs_item_sk in (select item_sk from frequent_ss_items) + and cs_bill_customer_sk in (select c_customer_sk from best_ss_customer) + and cs_bill_customer_sk = c_customer_sk + group by c_last_name,c_first_name + union all + select c_last_name,c_first_name,sum(ws_quantity*ws_list_price) sales + from web_sales + ,customer + ,date_dim + where d_year = 2000 + and d_moy = 3 + and ws_sold_date_sk = d_date_sk + and ws_item_sk in (select item_sk from frequent_ss_items) + and ws_bill_customer_sk in (select c_customer_sk from best_ss_customer) + and ws_bill_customer_sk = c_customer_sk + group by c_last_name,c_first_name + order by c_last_name,c_first_name,sales) y + limit 100; + +-- end query 1 in stream 0 using template query23.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query24.sql b/src/s3select/TPCDS/sample-queries-tpcds/query24.sql new file mode 100644 index 000000000..034b9cf17 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query24.sql @@ -0,0 +1,107 @@ +-- start query 1 in stream 0 using template query24.tpl and seed 1220860970 +with ssales as +(select c_last_name + ,c_first_name + ,s_store_name + ,ca_state + ,s_state + ,i_color + ,i_current_price + ,i_manager_id + ,i_units + ,i_size + ,sum(ss_sales_price) netpaid +from store_sales + ,store_returns + ,store + ,item + ,customer + ,customer_address +where ss_ticket_number = sr_ticket_number + and ss_item_sk = sr_item_sk + and ss_customer_sk = c_customer_sk + and ss_item_sk = i_item_sk + and ss_store_sk = s_store_sk + and c_current_addr_sk = ca_address_sk + and c_birth_country <> upper(ca_country) + and s_zip = ca_zip +and s_market_id=10 +group by c_last_name + ,c_first_name + ,s_store_name + ,ca_state + ,s_state + ,i_color + ,i_current_price + ,i_manager_id + ,i_units + ,i_size) +select c_last_name + ,c_first_name + ,s_store_name + ,sum(netpaid) paid +from ssales +where i_color = 'snow' +group by c_last_name + ,c_first_name + ,s_store_name +having sum(netpaid) > (select 0.05*avg(netpaid) + from ssales) +order by c_last_name + ,c_first_name + ,s_store_name +; +with ssales as +(select c_last_name + ,c_first_name + ,s_store_name + ,ca_state + ,s_state + ,i_color + ,i_current_price + ,i_manager_id + ,i_units + ,i_size + ,sum(ss_sales_price) netpaid +from store_sales + ,store_returns + ,store + ,item + ,customer + ,customer_address +where ss_ticket_number = sr_ticket_number + and ss_item_sk = sr_item_sk + and ss_customer_sk = c_customer_sk + and ss_item_sk = i_item_sk + and ss_store_sk = s_store_sk + and c_current_addr_sk = ca_address_sk + and c_birth_country <> upper(ca_country) + and s_zip = ca_zip + and s_market_id = 10 +group by c_last_name + ,c_first_name + ,s_store_name + ,ca_state + ,s_state + ,i_color + ,i_current_price + ,i_manager_id + ,i_units + ,i_size) +select c_last_name + ,c_first_name + ,s_store_name + ,sum(netpaid) paid +from ssales +where i_color = 'chiffon' +group by c_last_name + ,c_first_name + ,s_store_name +having sum(netpaid) > (select 0.05*avg(netpaid) + from ssales) +order by c_last_name + ,c_first_name + ,s_store_name +; + +-- end query 1 in stream 0 using template query24.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query25.sql b/src/s3select/TPCDS/sample-queries-tpcds/query25.sql new file mode 100644 index 000000000..3e2624c02 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query25.sql @@ -0,0 +1,48 @@ +-- start query 1 in stream 0 using template query25.tpl and seed 1819994127 +select + i_item_id + ,i_item_desc + ,s_store_id + ,s_store_name + ,sum(ss_net_profit) as store_sales_profit + ,sum(sr_net_loss) as store_returns_loss + ,sum(cs_net_profit) as catalog_sales_profit + from + store_sales + ,store_returns + ,catalog_sales + ,date_dim d1 + ,date_dim d2 + ,date_dim d3 + ,store + ,item + where + d1.d_moy = 4 + and d1.d_year = 2000 + and d1.d_date_sk = ss_sold_date_sk + and i_item_sk = ss_item_sk + and s_store_sk = ss_store_sk + and ss_customer_sk = sr_customer_sk + and ss_item_sk = sr_item_sk + and ss_ticket_number = sr_ticket_number + and sr_returned_date_sk = d2.d_date_sk + and d2.d_moy between 4 and 10 + and d2.d_year = 2000 + and sr_customer_sk = cs_bill_customer_sk + and sr_item_sk = cs_item_sk + and cs_sold_date_sk = d3.d_date_sk + and d3.d_moy between 4 and 10 + and d3.d_year = 2000 + group by + i_item_id + ,i_item_desc + ,s_store_id + ,s_store_name + order by + i_item_id + ,i_item_desc + ,s_store_id + ,s_store_name + limit 100; + +-- end query 1 in stream 0 using template query25.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query26.sql b/src/s3select/TPCDS/sample-queries-tpcds/query26.sql new file mode 100644 index 000000000..7298126cf --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query26.sql @@ -0,0 +1,21 @@ +-- start query 1 in stream 0 using template query26.tpl and seed 1930872976 +select i_item_id, + avg(cs_quantity) agg1, + avg(cs_list_price) agg2, + avg(cs_coupon_amt) agg3, + avg(cs_sales_price) agg4 + from catalog_sales, customer_demographics, date_dim, item, promotion + where cs_sold_date_sk = d_date_sk and + cs_item_sk = i_item_sk and + cs_bill_cdemo_sk = cd_demo_sk and + cs_promo_sk = p_promo_sk and + cd_gender = 'F' and + cd_marital_status = 'S' and + cd_education_status = 'College' and + (p_channel_email = 'N' or p_channel_event = 'N') and + d_year = 1998 + group by i_item_id + order by i_item_id + limit 100; + +-- end query 1 in stream 0 using template query26.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query27.sql b/src/s3select/TPCDS/sample-queries-tpcds/query27.sql new file mode 100644 index 000000000..9a906c4a0 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query27.sql @@ -0,0 +1,23 @@ +-- start query 1 in stream 0 using template query27.tpl and seed 2017787633 +select i_item_id, + s_state, grouping(s_state) g_state, + avg(ss_quantity) agg1, + avg(ss_list_price) agg2, + avg(ss_coupon_amt) agg3, + avg(ss_sales_price) agg4 + from store_sales, customer_demographics, date_dim, store, item + where ss_sold_date_sk = d_date_sk and + ss_item_sk = i_item_sk and + ss_store_sk = s_store_sk and + ss_cdemo_sk = cd_demo_sk and + cd_gender = 'F' and + cd_marital_status = 'U' and + cd_education_status = '2 yr Degree' and + d_year = 2000 and + s_state in ('AL','IN', 'SC', 'NY', 'OH', 'FL') + group by rollup (i_item_id, s_state) + order by i_item_id + ,s_state + limit 100; + +-- end query 1 in stream 0 using template query27.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query28.sql b/src/s3select/TPCDS/sample-queries-tpcds/query28.sql new file mode 100644 index 000000000..17133f91f --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query28.sql @@ -0,0 +1,53 @@ +-- start query 1 in stream 0 using template query28.tpl and seed 444293455 +select * +from (select avg(ss_list_price) B1_LP + ,count(ss_list_price) B1_CNT + ,count(distinct ss_list_price) B1_CNTD + from store_sales + where ss_quantity between 0 and 5 + and (ss_list_price between 73 and 73+10 + or ss_coupon_amt between 7826 and 7826+1000 + or ss_wholesale_cost between 70 and 70+20)) B1, + (select avg(ss_list_price) B2_LP + ,count(ss_list_price) B2_CNT + ,count(distinct ss_list_price) B2_CNTD + from store_sales + where ss_quantity between 6 and 10 + and (ss_list_price between 152 and 152+10 + or ss_coupon_amt between 2196 and 2196+1000 + or ss_wholesale_cost between 56 and 56+20)) B2, + (select avg(ss_list_price) B3_LP + ,count(ss_list_price) B3_CNT + ,count(distinct ss_list_price) B3_CNTD + from store_sales + where ss_quantity between 11 and 15 + and (ss_list_price between 53 and 53+10 + or ss_coupon_amt between 3430 and 3430+1000 + or ss_wholesale_cost between 13 and 13+20)) B3, + (select avg(ss_list_price) B4_LP + ,count(ss_list_price) B4_CNT + ,count(distinct ss_list_price) B4_CNTD + from store_sales + where ss_quantity between 16 and 20 + and (ss_list_price between 182 and 182+10 + or ss_coupon_amt between 3262 and 3262+1000 + or ss_wholesale_cost between 20 and 20+20)) B4, + (select avg(ss_list_price) B5_LP + ,count(ss_list_price) B5_CNT + ,count(distinct ss_list_price) B5_CNTD + from store_sales + where ss_quantity between 21 and 25 + and (ss_list_price between 85 and 85+10 + or ss_coupon_amt between 3310 and 3310+1000 + or ss_wholesale_cost between 37 and 37+20)) B5, + (select avg(ss_list_price) B6_LP + ,count(ss_list_price) B6_CNT + ,count(distinct ss_list_price) B6_CNTD + from store_sales + where ss_quantity between 26 and 30 + and (ss_list_price between 180 and 180+10 + or ss_coupon_amt between 12592 and 12592+1000 + or ss_wholesale_cost between 22 and 22+20)) B6 +limit 100; + +-- end query 1 in stream 0 using template query28.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query29.sql b/src/s3select/TPCDS/sample-queries-tpcds/query29.sql new file mode 100644 index 000000000..4491e5393 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query29.sql @@ -0,0 +1,47 @@ +-- start query 1 in stream 0 using template query29.tpl and seed 2031708268 +select + i_item_id + ,i_item_desc + ,s_store_id + ,s_store_name + ,stddev_samp(ss_quantity) as store_sales_quantity + ,stddev_samp(sr_return_quantity) as store_returns_quantity + ,stddev_samp(cs_quantity) as catalog_sales_quantity + from + store_sales + ,store_returns + ,catalog_sales + ,date_dim d1 + ,date_dim d2 + ,date_dim d3 + ,store + ,item + where + d1.d_moy = 4 + and d1.d_year = 1998 + and d1.d_date_sk = ss_sold_date_sk + and i_item_sk = ss_item_sk + and s_store_sk = ss_store_sk + and ss_customer_sk = sr_customer_sk + and ss_item_sk = sr_item_sk + and ss_ticket_number = sr_ticket_number + and sr_returned_date_sk = d2.d_date_sk + and d2.d_moy between 4 and 4 + 3 + and d2.d_year = 1998 + and sr_customer_sk = cs_bill_customer_sk + and sr_item_sk = cs_item_sk + and cs_sold_date_sk = d3.d_date_sk + and d3.d_year in (1998,1998+1,1998+2) + group by + i_item_id + ,i_item_desc + ,s_store_id + ,s_store_name + order by + i_item_id + ,i_item_desc + ,s_store_id + ,s_store_name + limit 100; + +-- end query 1 in stream 0 using template query29.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query3.sql b/src/s3select/TPCDS/sample-queries-tpcds/query3.sql new file mode 100644 index 000000000..1944edb87 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query3.sql @@ -0,0 +1,21 @@ +-- start query 1 in stream 0 using template query3.tpl and seed 2031708268 +select dt.d_year + ,item.i_brand_id brand_id + ,item.i_brand brand + ,sum(ss_sales_price) sum_agg + from date_dim dt + ,store_sales + ,item + where dt.d_date_sk = store_sales.ss_sold_date_sk + and store_sales.ss_item_sk = item.i_item_sk + and item.i_manufact_id = 816 + and dt.d_moy=11 + group by dt.d_year + ,item.i_brand + ,item.i_brand_id + order by dt.d_year + ,sum_agg desc + ,brand_id + limit 100; + +-- end query 1 in stream 0 using template query3.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query30.sql b/src/s3select/TPCDS/sample-queries-tpcds/query30.sql new file mode 100644 index 000000000..a1702d12f --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query30.sql @@ -0,0 +1,31 @@ +-- start query 1 in stream 0 using template query30.tpl and seed 1819994127 +with customer_total_return as + (select wr_returning_customer_sk as ctr_customer_sk + ,ca_state as ctr_state, + sum(wr_return_amt) as ctr_total_return + from web_returns + ,date_dim + ,customer_address + where wr_returned_date_sk = d_date_sk + and d_year =2000 + and wr_returning_addr_sk = ca_address_sk + group by wr_returning_customer_sk + ,ca_state) + select c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag + ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address + ,c_last_review_date_sk,ctr_total_return + from customer_total_return ctr1 + ,customer_address + ,customer + where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 + from customer_total_return ctr2 + where ctr1.ctr_state = ctr2.ctr_state) + and ca_address_sk = c_current_addr_sk + and ca_state = 'GA' + and ctr1.ctr_customer_sk = c_customer_sk + order by c_customer_id,c_salutation,c_first_name,c_last_name,c_preferred_cust_flag + ,c_birth_day,c_birth_month,c_birth_year,c_birth_country,c_login,c_email_address + ,c_last_review_date_sk,ctr_total_return +limit 100; + +-- end query 1 in stream 0 using template query30.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query31.sql b/src/s3select/TPCDS/sample-queries-tpcds/query31.sql new file mode 100644 index 000000000..1c89b6551 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query31.sql @@ -0,0 +1,52 @@ +-- start query 1 in stream 0 using template query31.tpl and seed 1819994127 +with ss as + (select ca_county,d_qoy, d_year,sum(ss_ext_sales_price) as store_sales + from store_sales,date_dim,customer_address + where ss_sold_date_sk = d_date_sk + and ss_addr_sk=ca_address_sk + group by ca_county,d_qoy, d_year), + ws as + (select ca_county,d_qoy, d_year,sum(ws_ext_sales_price) as web_sales + from web_sales,date_dim,customer_address + where ws_sold_date_sk = d_date_sk + and ws_bill_addr_sk=ca_address_sk + group by ca_county,d_qoy, d_year) + select + ss1.ca_county + ,ss1.d_year + ,ws2.web_sales/ws1.web_sales web_q1_q2_increase + ,ss2.store_sales/ss1.store_sales store_q1_q2_increase + ,ws3.web_sales/ws2.web_sales web_q2_q3_increase + ,ss3.store_sales/ss2.store_sales store_q2_q3_increase + from + ss ss1 + ,ss ss2 + ,ss ss3 + ,ws ws1 + ,ws ws2 + ,ws ws3 + where + ss1.d_qoy = 1 + and ss1.d_year = 1999 + and ss1.ca_county = ss2.ca_county + and ss2.d_qoy = 2 + and ss2.d_year = 1999 + and ss2.ca_county = ss3.ca_county + and ss3.d_qoy = 3 + and ss3.d_year = 1999 + and ss1.ca_county = ws1.ca_county + and ws1.d_qoy = 1 + and ws1.d_year = 1999 + and ws1.ca_county = ws2.ca_county + and ws2.d_qoy = 2 + and ws2.d_year = 1999 + and ws1.ca_county = ws3.ca_county + and ws3.d_qoy = 3 + and ws3.d_year =1999 + and case when ws1.web_sales > 0 then ws2.web_sales/ws1.web_sales else null end + > case when ss1.store_sales > 0 then ss2.store_sales/ss1.store_sales else null end + and case when ws2.web_sales > 0 then ws3.web_sales/ws2.web_sales else null end + > case when ss2.store_sales > 0 then ss3.store_sales/ss2.store_sales else null end + order by ss1.d_year; + +-- end query 1 in stream 0 using template query31.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query32.sql b/src/s3select/TPCDS/sample-queries-tpcds/query32.sql new file mode 100644 index 000000000..604557a07 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query32.sql @@ -0,0 +1,28 @@ +-- start query 1 in stream 0 using template query32.tpl and seed 2031708268 +select sum(cs_ext_discount_amt) as "excess discount amount" +from + catalog_sales + ,item + ,date_dim +where +i_manufact_id = 66 +and i_item_sk = cs_item_sk +and d_date between cast('2002-03-29' as date) and + (cast('2002-03-29' as date) + interval '90' day) +and d_date_sk = cs_sold_date_sk +and cs_ext_discount_amt + > ( + select + 1.3 * avg(cs_ext_discount_amt) + from + catalog_sales + ,date_dim + where + cs_item_sk = i_item_sk + and d_date between cast('2002-03-29' as date) and + (cast('2002-03-29' as date) + interval '90' day) + and d_date_sk = cs_sold_date_sk + ) +limit 100; + +-- end query 1 in stream 0 using template query32.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query33.sql b/src/s3select/TPCDS/sample-queries-tpcds/query33.sql new file mode 100644 index 000000000..d075de468 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query33.sql @@ -0,0 +1,75 @@ +-- start query 1 in stream 0 using template query33.tpl and seed 1930872976 +with ss as ( + select + i_manufact_id,sum(ss_ext_sales_price) total_sales + from + store_sales, + date_dim, + customer_address, + item + where + i_manufact_id in (select + i_manufact_id +from + item +where i_category in ('Home')) + and ss_item_sk = i_item_sk + and ss_sold_date_sk = d_date_sk + and d_year = 1998 + and d_moy = 5 + and ss_addr_sk = ca_address_sk + and ca_gmt_offset = -6 + group by i_manufact_id), + cs as ( + select + i_manufact_id,sum(cs_ext_sales_price) total_sales + from + catalog_sales, + date_dim, + customer_address, + item + where + i_manufact_id in (select + i_manufact_id +from + item +where i_category in ('Home')) + and cs_item_sk = i_item_sk + and cs_sold_date_sk = d_date_sk + and d_year = 1998 + and d_moy = 5 + and cs_bill_addr_sk = ca_address_sk + and ca_gmt_offset = -6 + group by i_manufact_id), + ws as ( + select + i_manufact_id,sum(ws_ext_sales_price) total_sales + from + web_sales, + date_dim, + customer_address, + item + where + i_manufact_id in (select + i_manufact_id +from + item +where i_category in ('Home')) + and ws_item_sk = i_item_sk + and ws_sold_date_sk = d_date_sk + and d_year = 1998 + and d_moy = 5 + and ws_bill_addr_sk = ca_address_sk + and ca_gmt_offset = -6 + group by i_manufact_id) + select i_manufact_id ,sum(total_sales) total_sales + from (select * from ss + union all + select * from cs + union all + select * from ws) tmp1 + group by i_manufact_id + order by total_sales +limit 100; + +-- end query 1 in stream 0 using template query33.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query34.sql b/src/s3select/TPCDS/sample-queries-tpcds/query34.sql new file mode 100644 index 000000000..e5ddc4c1d --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query34.sql @@ -0,0 +1,31 @@ +-- start query 1 in stream 0 using template query34.tpl and seed 1971067816 +select c_last_name + ,c_first_name + ,c_salutation + ,c_preferred_cust_flag + ,ss_ticket_number + ,cnt from + (select ss_ticket_number + ,ss_customer_sk + ,count(*) cnt + from store_sales,date_dim,store,household_demographics + where store_sales.ss_sold_date_sk = date_dim.d_date_sk + and store_sales.ss_store_sk = store.s_store_sk + and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk + and (date_dim.d_dom between 1 and 3 or date_dim.d_dom between 25 and 28) + and (household_demographics.hd_buy_potential = '>10000' or + household_demographics.hd_buy_potential = 'Unknown') + and household_demographics.hd_vehicle_count > 0 + and (case when household_demographics.hd_vehicle_count > 0 + then household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count + else null + end) > 1.2 + and date_dim.d_year in (2000,2000+1,2000+2) + and store.s_county in ('Salem County','Terrell County','Arthur County','Oglethorpe County', + 'Lunenburg County','Perry County','Halifax County','Sumner County') + group by ss_ticket_number,ss_customer_sk) dn,customer + where ss_customer_sk = c_customer_sk + and cnt between 15 and 20 + order by c_last_name,c_first_name,c_salutation,c_preferred_cust_flag desc, ss_ticket_number; + +-- end query 1 in stream 0 using template query34.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query35.sql b/src/s3select/TPCDS/sample-queries-tpcds/query35.sql new file mode 100644 index 000000000..ef7cdf0bb --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query35.sql @@ -0,0 +1,58 @@ +-- start query 1 in stream 0 using template query35.tpl and seed 1930872976 +select + ca_state, + cd_gender, + cd_marital_status, + cd_dep_count, + count(*) cnt1, + avg(cd_dep_count), + min(cd_dep_count), + stddev_samp(cd_dep_count), + cd_dep_employed_count, + count(*) cnt2, + avg(cd_dep_employed_count), + min(cd_dep_employed_count), + stddev_samp(cd_dep_employed_count), + cd_dep_college_count, + count(*) cnt3, + avg(cd_dep_college_count), + min(cd_dep_college_count), + stddev_samp(cd_dep_college_count) + from + customer c,customer_address ca,customer_demographics + where + c.c_current_addr_sk = ca.ca_address_sk and + cd_demo_sk = c.c_current_cdemo_sk and + exists (select * + from store_sales,date_dim + where c.c_customer_sk = ss_customer_sk and + ss_sold_date_sk = d_date_sk and + d_year = 2001 and + d_qoy < 4) and + (exists (select * + from web_sales,date_dim + where c.c_customer_sk = ws_bill_customer_sk and + ws_sold_date_sk = d_date_sk and + d_year = 2001 and + d_qoy < 4) or + exists (select * + from catalog_sales,date_dim + where c.c_customer_sk = cs_ship_customer_sk and + cs_sold_date_sk = d_date_sk and + d_year = 2001 and + d_qoy < 4)) + group by ca_state, + cd_gender, + cd_marital_status, + cd_dep_count, + cd_dep_employed_count, + cd_dep_college_count + order by ca_state, + cd_gender, + cd_marital_status, + cd_dep_count, + cd_dep_employed_count, + cd_dep_college_count + limit 100; + +-- end query 1 in stream 0 using template query35.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query36.sql b/src/s3select/TPCDS/sample-queries-tpcds/query36.sql new file mode 100644 index 000000000..8456fc8c4 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query36.sql @@ -0,0 +1,30 @@ +-- start query 1 in stream 0 using template query36.tpl and seed 1544728811 +select + sum(ss_net_profit)/sum(ss_ext_sales_price) as gross_margin + ,i_category + ,i_class + ,grouping(i_category)+grouping(i_class) as lochierarchy + ,rank() over ( + partition by grouping(i_category)+grouping(i_class), + case when grouping(i_class) = 0 then i_category end + order by sum(ss_net_profit)/sum(ss_ext_sales_price) asc) as rank_within_parent + from + store_sales + ,date_dim d1 + ,item + ,store + where + d1.d_year = 1999 + and d1.d_date_sk = ss_sold_date_sk + and i_item_sk = ss_item_sk + and s_store_sk = ss_store_sk + and s_state in ('IN','AL','MI','MN', + 'TN','LA','FL','NM') + group by rollup(i_category,i_class) + order by + lochierarchy desc + ,case when lochierarchy = 0 then i_category end + ,rank_within_parent + limit 100; + +-- end query 1 in stream 0 using template query36.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query37.sql b/src/s3select/TPCDS/sample-queries-tpcds/query37.sql new file mode 100644 index 000000000..dc799dcf5 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query37.sql @@ -0,0 +1,17 @@ +-- start query 1 in stream 0 using template query37.tpl and seed 301843662 +select i_item_id + ,i_item_desc + ,i_current_price + from item, inventory, date_dim, catalog_sales + where i_current_price between 39 and 39 + 30 + and inv_item_sk = i_item_sk + and d_date_sk=inv_date_sk + and d_date between cast('2001-01-16' as date) and (cast('2001-01-16' as date) + interval '60' day) + and i_manufact_id in (765,886,889,728) + and inv_quantity_on_hand between 100 and 500 + and cs_item_sk = i_item_sk + group by i_item_id,i_item_desc,i_current_price + order by i_item_id + limit 100; + +-- end query 1 in stream 0 using template query37.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query38.sql b/src/s3select/TPCDS/sample-queries-tpcds/query38.sql new file mode 100644 index 000000000..5570ae202 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query38.sql @@ -0,0 +1,23 @@ +-- start query 1 in stream 0 using template query38.tpl and seed 1819994127 +select count(*) from ( + select distinct c_last_name, c_first_name, d_date + from store_sales, date_dim, customer + where store_sales.ss_sold_date_sk = date_dim.d_date_sk + and store_sales.ss_customer_sk = customer.c_customer_sk + and d_month_seq between 1186 and 1186 + 11 + intersect + select distinct c_last_name, c_first_name, d_date + from catalog_sales, date_dim, customer + where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk + and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk + and d_month_seq between 1186 and 1186 + 11 + intersect + select distinct c_last_name, c_first_name, d_date + from web_sales, date_dim, customer + where web_sales.ws_sold_date_sk = date_dim.d_date_sk + and web_sales.ws_bill_customer_sk = customer.c_customer_sk + and d_month_seq between 1186 and 1186 + 11 +) hot_cust +limit 100; + +-- end query 1 in stream 0 using template query38.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query39.sql b/src/s3select/TPCDS/sample-queries-tpcds/query39.sql new file mode 100644 index 000000000..9c714fca0 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query39.sql @@ -0,0 +1,54 @@ +-- start query 1 in stream 0 using template query39.tpl and seed 1327317894 +with inv as +(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy + ,stdev,mean, case mean when 0 then null else stdev/mean end cov + from(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy + ,stddev_samp(inv_quantity_on_hand) stdev,avg(inv_quantity_on_hand) mean + from inventory + ,item + ,warehouse + ,date_dim + where inv_item_sk = i_item_sk + and inv_warehouse_sk = w_warehouse_sk + and inv_date_sk = d_date_sk + and d_year =2000 + group by w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy) foo + where case mean when 0 then 0 else stdev/mean end > 1) +select inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean, inv1.cov + ,inv2.w_warehouse_sk,inv2.i_item_sk,inv2.d_moy,inv2.mean, inv2.cov +from inv inv1,inv inv2 +where inv1.i_item_sk = inv2.i_item_sk + and inv1.w_warehouse_sk = inv2.w_warehouse_sk + and inv1.d_moy=2 + and inv2.d_moy=2+1 +order by inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean,inv1.cov + ,inv2.d_moy,inv2.mean, inv2.cov +; +with inv as +(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy + ,stdev,mean, case mean when 0 then null else stdev/mean end cov + from(select w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy + ,stddev_samp(inv_quantity_on_hand) stdev,avg(inv_quantity_on_hand) mean + from inventory + ,item + ,warehouse + ,date_dim + where inv_item_sk = i_item_sk + and inv_warehouse_sk = w_warehouse_sk + and inv_date_sk = d_date_sk + and d_year =2000 + group by w_warehouse_name,w_warehouse_sk,i_item_sk,d_moy) foo + where case mean when 0 then 0 else stdev/mean end > 1) +select inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean, inv1.cov + ,inv2.w_warehouse_sk,inv2.i_item_sk,inv2.d_moy,inv2.mean, inv2.cov +from inv inv1,inv inv2 +where inv1.i_item_sk = inv2.i_item_sk + and inv1.w_warehouse_sk = inv2.w_warehouse_sk + and inv1.d_moy=2 + and inv2.d_moy=2+1 + and inv1.cov > 1.5 +order by inv1.w_warehouse_sk,inv1.i_item_sk,inv1.d_moy,inv1.mean,inv1.cov + ,inv2.d_moy,inv2.mean, inv2.cov +; + +-- end query 1 in stream 0 using template query39.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query4.sql b/src/s3select/TPCDS/sample-queries-tpcds/query4.sql new file mode 100644 index 000000000..b4fd65bea --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query4.sql @@ -0,0 +1,116 @@ +-- start query 1 in stream 0 using template query4.tpl and seed 1819994127 +with year_total as ( + select c_customer_id customer_id + ,c_first_name customer_first_name + ,c_last_name customer_last_name + ,c_preferred_cust_flag customer_preferred_cust_flag + ,c_birth_country customer_birth_country + ,c_login customer_login + ,c_email_address customer_email_address + ,d_year dyear + ,sum(((ss_ext_list_price-ss_ext_wholesale_cost-ss_ext_discount_amt)+ss_ext_sales_price)/2) year_total + ,'s' sale_type + from customer + ,store_sales + ,date_dim + where c_customer_sk = ss_customer_sk + and ss_sold_date_sk = d_date_sk + group by c_customer_id + ,c_first_name + ,c_last_name + ,c_preferred_cust_flag + ,c_birth_country + ,c_login + ,c_email_address + ,d_year + union all + select c_customer_id customer_id + ,c_first_name customer_first_name + ,c_last_name customer_last_name + ,c_preferred_cust_flag customer_preferred_cust_flag + ,c_birth_country customer_birth_country + ,c_login customer_login + ,c_email_address customer_email_address + ,d_year dyear + ,sum((((cs_ext_list_price-cs_ext_wholesale_cost-cs_ext_discount_amt)+cs_ext_sales_price)/2) ) year_total + ,'c' sale_type + from customer + ,catalog_sales + ,date_dim + where c_customer_sk = cs_bill_customer_sk + and cs_sold_date_sk = d_date_sk + group by c_customer_id + ,c_first_name + ,c_last_name + ,c_preferred_cust_flag + ,c_birth_country + ,c_login + ,c_email_address + ,d_year +union all + select c_customer_id customer_id + ,c_first_name customer_first_name + ,c_last_name customer_last_name + ,c_preferred_cust_flag customer_preferred_cust_flag + ,c_birth_country customer_birth_country + ,c_login customer_login + ,c_email_address customer_email_address + ,d_year dyear + ,sum((((ws_ext_list_price-ws_ext_wholesale_cost-ws_ext_discount_amt)+ws_ext_sales_price)/2) ) year_total + ,'w' sale_type + from customer + ,web_sales + ,date_dim + where c_customer_sk = ws_bill_customer_sk + and ws_sold_date_sk = d_date_sk + group by c_customer_id + ,c_first_name + ,c_last_name + ,c_preferred_cust_flag + ,c_birth_country + ,c_login + ,c_email_address + ,d_year + ) + select + t_s_secyear.customer_id + ,t_s_secyear.customer_first_name + ,t_s_secyear.customer_last_name + ,t_s_secyear.customer_birth_country + from year_total t_s_firstyear + ,year_total t_s_secyear + ,year_total t_c_firstyear + ,year_total t_c_secyear + ,year_total t_w_firstyear + ,year_total t_w_secyear + where t_s_secyear.customer_id = t_s_firstyear.customer_id + and t_s_firstyear.customer_id = t_c_secyear.customer_id + and t_s_firstyear.customer_id = t_c_firstyear.customer_id + and t_s_firstyear.customer_id = t_w_firstyear.customer_id + and t_s_firstyear.customer_id = t_w_secyear.customer_id + and t_s_firstyear.sale_type = 's' + and t_c_firstyear.sale_type = 'c' + and t_w_firstyear.sale_type = 'w' + and t_s_secyear.sale_type = 's' + and t_c_secyear.sale_type = 'c' + and t_w_secyear.sale_type = 'w' + and t_s_firstyear.dyear = 1999 + and t_s_secyear.dyear = 1999+1 + and t_c_firstyear.dyear = 1999 + and t_c_secyear.dyear = 1999+1 + and t_w_firstyear.dyear = 1999 + and t_w_secyear.dyear = 1999+1 + and t_s_firstyear.year_total > 0 + and t_c_firstyear.year_total > 0 + and t_w_firstyear.year_total > 0 + and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end + > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end + and case when t_c_firstyear.year_total > 0 then t_c_secyear.year_total / t_c_firstyear.year_total else null end + > case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end + order by t_s_secyear.customer_id + ,t_s_secyear.customer_first_name + ,t_s_secyear.customer_last_name + ,t_s_secyear.customer_birth_country +limit 100; + +-- end query 1 in stream 0 using template query4.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query40.sql b/src/s3select/TPCDS/sample-queries-tpcds/query40.sql new file mode 100644 index 000000000..f5e72e595 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query40.sql @@ -0,0 +1,28 @@ +-- start query 1 in stream 0 using template query40.tpl and seed 1819994127 +select + w_state + ,i_item_id + ,sum(case when (cast(d_date as date) < cast ('2000-03-18' as date)) + then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_before + ,sum(case when (cast(d_date as date) >= cast ('2000-03-18' as date)) + then cs_sales_price - coalesce(cr_refunded_cash,0) else 0 end) as sales_after + from + catalog_sales left outer join catalog_returns on + (cs_order_number = cr_order_number + and cs_item_sk = cr_item_sk) + ,warehouse + ,item + ,date_dim + where + i_current_price between 0.99 and 1.49 + and i_item_sk = cs_item_sk + and cs_warehouse_sk = w_warehouse_sk + and cs_sold_date_sk = d_date_sk + and d_date between (cast ('2000-03-18' as date) - interval '30' day) + and (cast ('2000-03-18' as date) + interval '30' day) + group by + w_state,i_item_id + order by w_state,i_item_id +limit 100; + +-- end query 1 in stream 0 using template query40.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query41.sql b/src/s3select/TPCDS/sample-queries-tpcds/query41.sql new file mode 100644 index 000000000..6eb01d582 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query41.sql @@ -0,0 +1,52 @@ +-- start query 1 in stream 0 using template query41.tpl and seed 1581015815 +select distinct(i_product_name) + from item i1 + where i_manufact_id between 970 and 970+40 + and (select count(*) as item_cnt + from item + where (i_manufact = i1.i_manufact and + ((i_category = 'Women' and + (i_color = 'frosted' or i_color = 'rose') and + (i_units = 'Lb' or i_units = 'Gross') and + (i_size = 'medium' or i_size = 'large') + ) or + (i_category = 'Women' and + (i_color = 'chocolate' or i_color = 'black') and + (i_units = 'Box' or i_units = 'Dram') and + (i_size = 'economy' or i_size = 'petite') + ) or + (i_category = 'Men' and + (i_color = 'slate' or i_color = 'magenta') and + (i_units = 'Carton' or i_units = 'Bundle') and + (i_size = 'N/A' or i_size = 'small') + ) or + (i_category = 'Men' and + (i_color = 'cornflower' or i_color = 'firebrick') and + (i_units = 'Pound' or i_units = 'Oz') and + (i_size = 'medium' or i_size = 'large') + ))) or + (i_manufact = i1.i_manufact and + ((i_category = 'Women' and + (i_color = 'almond' or i_color = 'steel') and + (i_units = 'Tsp' or i_units = 'Case') and + (i_size = 'medium' or i_size = 'large') + ) or + (i_category = 'Women' and + (i_color = 'purple' or i_color = 'aquamarine') and + (i_units = 'Bunch' or i_units = 'Gram') and + (i_size = 'economy' or i_size = 'petite') + ) or + (i_category = 'Men' and + (i_color = 'lavender' or i_color = 'papaya') and + (i_units = 'Pallet' or i_units = 'Cup') and + (i_size = 'N/A' or i_size = 'small') + ) or + (i_category = 'Men' and + (i_color = 'maroon' or i_color = 'cyan') and + (i_units = 'Each' or i_units = 'N/A') and + (i_size = 'medium' or i_size = 'large') + )))) > 0 + order by i_product_name + limit 100; + +-- end query 1 in stream 0 using template query41.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query42.sql b/src/s3select/TPCDS/sample-queries-tpcds/query42.sql new file mode 100644 index 000000000..bbc053206 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query42.sql @@ -0,0 +1,22 @@ +-- start query 1 in stream 0 using template query42.tpl and seed 1819994127 +select dt.d_year + ,item.i_category_id + ,item.i_category + ,sum(ss_ext_sales_price) + from date_dim dt + ,store_sales + ,item + where dt.d_date_sk = store_sales.ss_sold_date_sk + and store_sales.ss_item_sk = item.i_item_sk + and item.i_manager_id = 1 + and dt.d_moy=12 + and dt.d_year=1998 + group by dt.d_year + ,item.i_category_id + ,item.i_category + order by sum(ss_ext_sales_price) desc,dt.d_year + ,item.i_category_id + ,item.i_category +limit 100 ; + +-- end query 1 in stream 0 using template query42.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query43.sql b/src/s3select/TPCDS/sample-queries-tpcds/query43.sql new file mode 100644 index 000000000..89843d1e7 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query43.sql @@ -0,0 +1,19 @@ +-- start query 1 in stream 0 using template query43.tpl and seed 1819994127 +select s_store_name, s_store_id, + sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, + sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, + sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, + sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, + sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, + sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, + sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales + from date_dim, store_sales, store + where d_date_sk = ss_sold_date_sk and + s_store_sk = ss_store_sk and + s_gmt_offset = -6 and + d_year = 2001 + group by s_store_name, s_store_id + order by s_store_name, s_store_id,sun_sales,mon_sales,tue_sales,wed_sales,thu_sales,fri_sales,sat_sales + limit 100; + +-- end query 1 in stream 0 using template query43.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query44.sql b/src/s3select/TPCDS/sample-queries-tpcds/query44.sql new file mode 100644 index 000000000..92d5e0259 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query44.sql @@ -0,0 +1,35 @@ +-- start query 1 in stream 0 using template query44.tpl and seed 1819994127 +select asceding.rnk, i1.i_product_name best_performing, i2.i_product_name worst_performing +from(select * + from (select item_sk,rank() over (order by rank_col asc) rnk + from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col + from store_sales ss1 + where ss_store_sk = 366 + group by ss_item_sk + having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col + from store_sales + where ss_store_sk = 366 + and ss_cdemo_sk is null + group by ss_store_sk))V1)V11 + where rnk < 11) asceding, + (select * + from (select item_sk,rank() over (order by rank_col desc) rnk + from (select ss_item_sk item_sk,avg(ss_net_profit) rank_col + from store_sales ss1 + where ss_store_sk = 366 + group by ss_item_sk + having avg(ss_net_profit) > 0.9*(select avg(ss_net_profit) rank_col + from store_sales + where ss_store_sk = 366 + and ss_cdemo_sk is null + group by ss_store_sk))V2)V21 + where rnk < 11) descending, +item i1, +item i2 +where asceding.rnk = descending.rnk + and i1.i_item_sk=asceding.item_sk + and i2.i_item_sk=descending.item_sk +order by asceding.rnk +limit 100; + +-- end query 1 in stream 0 using template query44.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query45.sql b/src/s3select/TPCDS/sample-queries-tpcds/query45.sql new file mode 100644 index 000000000..16d50682a --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query45.sql @@ -0,0 +1,20 @@ +-- start query 1 in stream 0 using template query45.tpl and seed 2031708268 +select ca_zip, ca_county, sum(ws_sales_price) + from web_sales, customer, customer_address, date_dim, item + where ws_bill_customer_sk = c_customer_sk + and c_current_addr_sk = ca_address_sk + and ws_item_sk = i_item_sk + and ( substr(ca_zip,1,5) in ('85669', '86197','88274','83405','86475', '85392', '85460', '80348', '81792') + or + i_item_id in (select i_item_id + from item + where i_item_sk in (2, 3, 5, 7, 11, 13, 17, 19, 23, 29) + ) + ) + and ws_sold_date_sk = d_date_sk + and d_qoy = 1 and d_year = 1998 + group by ca_zip, ca_county + order by ca_zip, ca_county + limit 100; + +-- end query 1 in stream 0 using template query45.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query46.sql b/src/s3select/TPCDS/sample-queries-tpcds/query46.sql new file mode 100644 index 000000000..1adf55dd9 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query46.sql @@ -0,0 +1,35 @@ +-- start query 1 in stream 0 using template query46.tpl and seed 803547492 +select c_last_name + ,c_first_name + ,ca_city + ,bought_city + ,ss_ticket_number + ,amt,profit + from + (select ss_ticket_number + ,ss_customer_sk + ,ca_city bought_city + ,sum(ss_coupon_amt) amt + ,sum(ss_net_profit) profit + from store_sales,date_dim,store,household_demographics,customer_address + where store_sales.ss_sold_date_sk = date_dim.d_date_sk + and store_sales.ss_store_sk = store.s_store_sk + and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk + and store_sales.ss_addr_sk = customer_address.ca_address_sk + and (household_demographics.hd_dep_count = 0 or + household_demographics.hd_vehicle_count= 1) + and date_dim.d_dow in (6,0) + and date_dim.d_year in (2000,2000+1,2000+2) + and store.s_city in ('Five Forks','Oakland','Fairview','Winchester','Farmington') + group by ss_ticket_number,ss_customer_sk,ss_addr_sk,ca_city) dn,customer,customer_address current_addr + where ss_customer_sk = c_customer_sk + and customer.c_current_addr_sk = current_addr.ca_address_sk + and current_addr.ca_city <> bought_city + order by c_last_name + ,c_first_name + ,ca_city + ,bought_city + ,ss_ticket_number + limit 100; + +-- end query 1 in stream 0 using template query46.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query47.sql b/src/s3select/TPCDS/sample-queries-tpcds/query47.sql new file mode 100644 index 000000000..8a1437acf --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query47.sql @@ -0,0 +1,51 @@ +-- start query 1 in stream 0 using template query47.tpl and seed 2031708268 +with v1 as( + select i_category, i_brand, + s_store_name, s_company_name, + d_year, d_moy, + sum(ss_sales_price) sum_sales, + avg(sum(ss_sales_price)) over + (partition by i_category, i_brand, + s_store_name, s_company_name, d_year) + avg_monthly_sales, + rank() over + (partition by i_category, i_brand, + s_store_name, s_company_name + order by d_year, d_moy) rn + from item, store_sales, date_dim, store + where ss_item_sk = i_item_sk and + ss_sold_date_sk = d_date_sk and + ss_store_sk = s_store_sk and + ( + d_year = 1999 or + ( d_year = 1999-1 and d_moy =12) or + ( d_year = 1999+1 and d_moy =1) + ) + group by i_category, i_brand, + s_store_name, s_company_name, + d_year, d_moy), + v2 as( + select v1.s_store_name + ,v1.d_year, v1.d_moy + ,v1.avg_monthly_sales + ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum + from v1, v1 v1_lag, v1 v1_lead + where v1.i_category = v1_lag.i_category and + v1.i_category = v1_lead.i_category and + v1.i_brand = v1_lag.i_brand and + v1.i_brand = v1_lead.i_brand and + v1.s_store_name = v1_lag.s_store_name and + v1.s_store_name = v1_lead.s_store_name and + v1.s_company_name = v1_lag.s_company_name and + v1.s_company_name = v1_lead.s_company_name and + v1.rn = v1_lag.rn + 1 and + v1.rn = v1_lead.rn - 1) + select * + from v2 + where d_year = 1999 and + avg_monthly_sales > 0 and + case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 + order by sum_sales - avg_monthly_sales, sum_sales + limit 100; + +-- end query 1 in stream 0 using template query47.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query48.sql b/src/s3select/TPCDS/sample-queries-tpcds/query48.sql new file mode 100644 index 000000000..da87862a3 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query48.sql @@ -0,0 +1,67 @@ +-- start query 1 in stream 0 using template query48.tpl and seed 622697896 +select sum (ss_quantity) + from store_sales, store, customer_demographics, customer_address, date_dim + where s_store_sk = ss_store_sk + and ss_sold_date_sk = d_date_sk and d_year = 1998 + and + ( + ( + cd_demo_sk = ss_cdemo_sk + and + cd_marital_status = 'M' + and + cd_education_status = 'Unknown' + and + ss_sales_price between 100.00 and 150.00 + ) + or + ( + cd_demo_sk = ss_cdemo_sk + and + cd_marital_status = 'W' + and + cd_education_status = 'College' + and + ss_sales_price between 50.00 and 100.00 + ) + or + ( + cd_demo_sk = ss_cdemo_sk + and + cd_marital_status = 'D' + and + cd_education_status = 'Primary' + and + ss_sales_price between 150.00 and 200.00 + ) + ) + and + ( + ( + ss_addr_sk = ca_address_sk + and + ca_country = 'United States' + and + ca_state in ('MI', 'GA', 'NH') + and ss_net_profit between 0 and 2000 + ) + or + (ss_addr_sk = ca_address_sk + and + ca_country = 'United States' + and + ca_state in ('TX', 'KY', 'SD') + and ss_net_profit between 150 and 3000 + ) + or + (ss_addr_sk = ca_address_sk + and + ca_country = 'United States' + and + ca_state in ('NY', 'OH', 'FL') + and ss_net_profit between 50 and 25000 + ) + ) +; + +-- end query 1 in stream 0 using template query48.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query49.sql b/src/s3select/TPCDS/sample-queries-tpcds/query49.sql new file mode 100644 index 000000000..ac029bd51 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query49.sql @@ -0,0 +1,129 @@ +-- start query 1 in stream 0 using template query49.tpl and seed 1819994127 +select channel, item, return_ratio, return_rank, currency_rank from + (select + 'web' as channel + ,web.item as item + ,web.return_ratio as return_ratio + ,web.return_rank as return_rank + ,web.currency_rank as currency_rank + from ( + select + item + ,return_ratio + ,currency_ratio + ,rank() over (order by return_ratio) as return_rank + ,rank() over (order by currency_ratio) as currency_rank + from + ( select ws.ws_item_sk as item + ,(cast(sum(coalesce(wr.wr_return_quantity,0)) as decimal(15,4))/ + cast(sum(coalesce(ws.ws_quantity,0)) as decimal(15,4) )) as return_ratio + ,(cast(sum(coalesce(wr.wr_return_amt,0)) as decimal(15,4))/ + cast(sum(coalesce(ws.ws_net_paid,0)) as decimal(15,4) )) as currency_ratio + from + web_sales ws left outer join web_returns wr + on (ws.ws_order_number = wr.wr_order_number and + ws.ws_item_sk = wr.wr_item_sk) + ,date_dim + where + wr.wr_return_amt > 10000 + and ws.ws_net_profit > 1 + and ws.ws_net_paid > 0 + and ws.ws_quantity > 0 + and ws_sold_date_sk = d_date_sk + and d_year = 2000 + and d_moy = 12 + group by ws.ws_item_sk + ) in_web + ) web + where + ( + web.return_rank <= 10 + or + web.currency_rank <= 10 + ) + union + select + 'catalog' as channel + ,catalog.item as item + ,catalog.return_ratio as return_ratio + ,catalog.return_rank as return_rank + ,catalog.currency_rank as currency_rank + from ( + select + item + ,return_ratio + ,currency_ratio + ,rank() over (order by return_ratio) as return_rank + ,rank() over (order by currency_ratio) as currency_rank + from + ( select + cs.cs_item_sk as item + ,(cast(sum(coalesce(cr.cr_return_quantity,0)) as decimal(15,4))/ + cast(sum(coalesce(cs.cs_quantity,0)) as decimal(15,4) )) as return_ratio + ,(cast(sum(coalesce(cr.cr_return_amount,0)) as decimal(15,4))/ + cast(sum(coalesce(cs.cs_net_paid,0)) as decimal(15,4) )) as currency_ratio + from + catalog_sales cs left outer join catalog_returns cr + on (cs.cs_order_number = cr.cr_order_number and + cs.cs_item_sk = cr.cr_item_sk) + ,date_dim + where + cr.cr_return_amount > 10000 + and cs.cs_net_profit > 1 + and cs.cs_net_paid > 0 + and cs.cs_quantity > 0 + and cs_sold_date_sk = d_date_sk + and d_year = 2000 + and d_moy = 12 + group by cs.cs_item_sk + ) in_cat + ) catalog + where + ( + catalog.return_rank <= 10 + or + catalog.currency_rank <=10 + ) + union + select + 'store' as channel + ,store.item as item + ,store.return_ratio as return_ratio + ,store.return_rank as return_rank + ,store.currency_rank as currency_rank + from ( + select + item + ,return_ratio + ,currency_ratio + ,rank() over (order by return_ratio) as return_rank + ,rank() over (order by currency_ratio) as currency_rank + from + ( select sts.ss_item_sk as item + ,(cast(sum(coalesce(sr.sr_return_quantity,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_quantity,0)) as decimal(15,4) )) as return_ratio + ,(cast(sum(coalesce(sr.sr_return_amt,0)) as decimal(15,4))/cast(sum(coalesce(sts.ss_net_paid,0)) as decimal(15,4) )) as currency_ratio + from + store_sales sts left outer join store_returns sr + on (sts.ss_ticket_number = sr.sr_ticket_number and sts.ss_item_sk = sr.sr_item_sk) + ,date_dim + where + sr.sr_return_amt > 10000 + and sts.ss_net_profit > 1 + and sts.ss_net_paid > 0 + and sts.ss_quantity > 0 + and ss_sold_date_sk = d_date_sk + and d_year = 2000 + and d_moy = 12 + group by sts.ss_item_sk + ) in_store + ) store + where ( + store.return_rank <= 10 + or + store.currency_rank <= 10 + ) + ) y + order by 1,4,5,2 + limit 100; + +-- end query 1 in stream 0 using template query49.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query5.sql b/src/s3select/TPCDS/sample-queries-tpcds/query5.sql new file mode 100644 index 000000000..3400b6763 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query5.sql @@ -0,0 +1,128 @@ +-- start query 1 in stream 0 using template query5.tpl and seed 1819994127 +with ssr as + (select s_store_id, + sum(sales_price) as sales, + sum(profit) as profit, + sum(return_amt) as returns, + sum(net_loss) as profit_loss + from + ( select ss_store_sk as store_sk, + ss_sold_date_sk as date_sk, + ss_ext_sales_price as sales_price, + ss_net_profit as profit, + cast(0 as decimal(7,2)) as return_amt, + cast(0 as decimal(7,2)) as net_loss + from store_sales + union all + select sr_store_sk as store_sk, + sr_returned_date_sk as date_sk, + cast(0 as decimal(7,2)) as sales_price, + cast(0 as decimal(7,2)) as profit, + sr_return_amt as return_amt, + sr_net_loss as net_loss + from store_returns + ) salesreturns, + date_dim, + store + where date_sk = d_date_sk + and d_date between cast('2000-08-19' as date) + and (cast('2000-08-19' as date) + interval '14' day) + and store_sk = s_store_sk + group by s_store_id) + , + csr as + (select cp_catalog_page_id, + sum(sales_price) as sales, + sum(profit) as profit, + sum(return_amt) as returns, + sum(net_loss) as profit_loss + from + ( select cs_catalog_page_sk as page_sk, + cs_sold_date_sk as date_sk, + cs_ext_sales_price as sales_price, + cs_net_profit as profit, + cast(0 as decimal(7,2)) as return_amt, + cast(0 as decimal(7,2)) as net_loss + from catalog_sales + union all + select cr_catalog_page_sk as page_sk, + cr_returned_date_sk as date_sk, + cast(0 as decimal(7,2)) as sales_price, + cast(0 as decimal(7,2)) as profit, + cr_return_amount as return_amt, + cr_net_loss as net_loss + from catalog_returns + ) salesreturns, + date_dim, + catalog_page + where date_sk = d_date_sk + and d_date between cast('2000-08-19' as date) + and (cast('2000-08-19' as date) + interval '14' day) + and page_sk = cp_catalog_page_sk + group by cp_catalog_page_id) + , + wsr as + (select web_site_id, + sum(sales_price) as sales, + sum(profit) as profit, + sum(return_amt) as returns, + sum(net_loss) as profit_loss + from + ( select ws_web_site_sk as wsr_web_site_sk, + ws_sold_date_sk as date_sk, + ws_ext_sales_price as sales_price, + ws_net_profit as profit, + cast(0 as decimal(7,2)) as return_amt, + cast(0 as decimal(7,2)) as net_loss + from web_sales + union all + select ws_web_site_sk as wsr_web_site_sk, + wr_returned_date_sk as date_sk, + cast(0 as decimal(7,2)) as sales_price, + cast(0 as decimal(7,2)) as profit, + wr_return_amt as return_amt, + wr_net_loss as net_loss + from web_returns left outer join web_sales on + ( wr_item_sk = ws_item_sk + and wr_order_number = ws_order_number) + ) salesreturns, + date_dim, + web_site + where date_sk = d_date_sk + and d_date between cast('2000-08-19' as date) + and (cast('2000-08-19' as date) + interval '14' day) + and wsr_web_site_sk = web_site_sk + group by web_site_id) + select channel + , id + , sum(sales) as sales + , sum(returns) as returns + , sum(profit) as profit + from + (select 'store channel' as channel + , 'store' || s_store_id as id + , sales + , returns + , (profit - profit_loss) as profit + from ssr + union all + select 'catalog channel' as channel + , 'catalog_page' || cp_catalog_page_id as id + , sales + , returns + , (profit - profit_loss) as profit + from csr + union all + select 'web channel' as channel + , 'web_site' || web_site_id as id + , sales + , returns + , (profit - profit_loss) as profit + from wsr + ) x + group by rollup (channel, id) + order by channel + ,id + limit 100; + +-- end query 1 in stream 0 using template query5.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query50.sql b/src/s3select/TPCDS/sample-queries-tpcds/query50.sql new file mode 100644 index 000000000..206432b8e --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query50.sql @@ -0,0 +1,59 @@ +-- start query 1 in stream 0 using template query50.tpl and seed 1819994127 +select + s_store_name + ,s_company_id + ,s_street_number + ,s_street_name + ,s_street_type + ,s_suite_number + ,s_city + ,s_county + ,s_state + ,s_zip + ,sum(case when (sr_returned_date_sk - ss_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" + ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 30) and + (sr_returned_date_sk - ss_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" + ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 60) and + (sr_returned_date_sk - ss_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" + ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 90) and + (sr_returned_date_sk - ss_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" + ,sum(case when (sr_returned_date_sk - ss_sold_date_sk > 120) then 1 else 0 end) as ">120 days" +from + store_sales + ,store_returns + ,store + ,date_dim d1 + ,date_dim d2 +where + d2.d_year = 1998 +and d2.d_moy = 9 +and ss_ticket_number = sr_ticket_number +and ss_item_sk = sr_item_sk +and ss_sold_date_sk = d1.d_date_sk +and sr_returned_date_sk = d2.d_date_sk +and ss_customer_sk = sr_customer_sk +and ss_store_sk = s_store_sk +group by + s_store_name + ,s_company_id + ,s_street_number + ,s_street_name + ,s_street_type + ,s_suite_number + ,s_city + ,s_county + ,s_state + ,s_zip +order by s_store_name + ,s_company_id + ,s_street_number + ,s_street_name + ,s_street_type + ,s_suite_number + ,s_city + ,s_county + ,s_state + ,s_zip +limit 100; + +-- end query 1 in stream 0 using template query50.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query51.sql b/src/s3select/TPCDS/sample-queries-tpcds/query51.sql new file mode 100644 index 000000000..dbf9807f5 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query51.sql @@ -0,0 +1,45 @@ +-- start query 1 in stream 0 using template query51.tpl and seed 1819994127 +WITH web_v1 as ( +select + ws_item_sk item_sk, d_date, + sum(sum(ws_sales_price)) + over (partition by ws_item_sk order by d_date rows between unbounded preceding and current row) cume_sales +from web_sales + ,date_dim +where ws_sold_date_sk=d_date_sk + and d_month_seq between 1214 and 1214+11 + and ws_item_sk is not NULL +group by ws_item_sk, d_date), +store_v1 as ( +select + ss_item_sk item_sk, d_date, + sum(sum(ss_sales_price)) + over (partition by ss_item_sk order by d_date rows between unbounded preceding and current row) cume_sales +from store_sales + ,date_dim +where ss_sold_date_sk=d_date_sk + and d_month_seq between 1214 and 1214+11 + and ss_item_sk is not NULL +group by ss_item_sk, d_date) + select * +from (select item_sk + ,d_date + ,web_sales + ,store_sales + ,max(web_sales) + over (partition by item_sk order by d_date rows between unbounded preceding and current row) web_cumulative + ,max(store_sales) + over (partition by item_sk order by d_date rows between unbounded preceding and current row) store_cumulative + from (select case when web.item_sk is not null then web.item_sk else store.item_sk end item_sk + ,case when web.d_date is not null then web.d_date else store.d_date end d_date + ,web.cume_sales web_sales + ,store.cume_sales store_sales + from web_v1 web full outer join store_v1 store on (web.item_sk = store.item_sk + and web.d_date = store.d_date) + )x )y +where web_cumulative > store_cumulative +order by item_sk + ,d_date +limit 100; + +-- end query 1 in stream 0 using template query51.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query52.sql b/src/s3select/TPCDS/sample-queries-tpcds/query52.sql new file mode 100644 index 000000000..316e40e67 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query52.sql @@ -0,0 +1,22 @@ +-- start query 1 in stream 0 using template query52.tpl and seed 1819994127 +select dt.d_year + ,item.i_brand_id brand_id + ,item.i_brand brand + ,sum(ss_ext_sales_price) ext_price + from date_dim dt + ,store_sales + ,item + where dt.d_date_sk = store_sales.ss_sold_date_sk + and store_sales.ss_item_sk = item.i_item_sk + and item.i_manager_id = 1 + and dt.d_moy=12 + and dt.d_year=2000 + group by dt.d_year + ,item.i_brand + ,item.i_brand_id + order by dt.d_year + ,ext_price desc + ,brand_id +limit 100 ; + +-- end query 1 in stream 0 using template query52.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query53.sql b/src/s3select/TPCDS/sample-queries-tpcds/query53.sql new file mode 100644 index 000000000..ab08dbd4f --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query53.sql @@ -0,0 +1,28 @@ +-- start query 1 in stream 0 using template query53.tpl and seed 1819994127 +select * from +(select i_manufact_id, +sum(ss_sales_price) sum_sales, +avg(sum(ss_sales_price)) over (partition by i_manufact_id) avg_quarterly_sales +from item, store_sales, date_dim, store +where ss_item_sk = i_item_sk and +ss_sold_date_sk = d_date_sk and +ss_store_sk = s_store_sk and +d_month_seq in (1212,1212+1,1212+2,1212+3,1212+4,1212+5,1212+6,1212+7,1212+8,1212+9,1212+10,1212+11) and +((i_category in ('Books','Children','Electronics') and +i_class in ('personal','portable','reference','self-help') and +i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', + 'exportiunivamalg #9','scholaramalgamalg #9')) +or(i_category in ('Women','Music','Men') and +i_class in ('accessories','classical','fragrances','pants') and +i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', + 'importoamalg #1'))) +group by i_manufact_id, d_qoy ) tmp1 +where case when avg_quarterly_sales > 0 + then abs (sum_sales - avg_quarterly_sales)/ avg_quarterly_sales + else null end > 0.1 +order by avg_quarterly_sales, + sum_sales, + i_manufact_id +limit 100; + +-- end query 1 in stream 0 using template query53.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query54.sql b/src/s3select/TPCDS/sample-queries-tpcds/query54.sql new file mode 100644 index 000000000..453c5110e --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query54.sql @@ -0,0 +1,56 @@ +-- start query 1 in stream 0 using template query54.tpl and seed 1930872976 +with my_customers as ( + select distinct c_customer_sk + , c_current_addr_sk + from + ( select cs_sold_date_sk sold_date_sk, + cs_bill_customer_sk customer_sk, + cs_item_sk item_sk + from catalog_sales + union all + select ws_sold_date_sk sold_date_sk, + ws_bill_customer_sk customer_sk, + ws_item_sk item_sk + from web_sales + ) cs_or_ws_sales, + item, + date_dim, + customer + where sold_date_sk = d_date_sk + and item_sk = i_item_sk + and i_category = 'Books' + and i_class = 'business' + and c_customer_sk = cs_or_ws_sales.customer_sk + and d_moy = 2 + and d_year = 2000 + ) + , my_revenue as ( + select c_customer_sk, + sum(ss_ext_sales_price) as revenue + from my_customers, + store_sales, + customer_address, + store, + date_dim + where c_current_addr_sk = ca_address_sk + and ca_county = s_county + and ca_state = s_state + and ss_sold_date_sk = d_date_sk + and c_customer_sk = ss_customer_sk + and d_month_seq between (select distinct d_month_seq+1 + from date_dim where d_year = 2000 and d_moy = 2) + and (select distinct d_month_seq+3 + from date_dim where d_year = 2000 and d_moy = 2) + group by c_customer_sk + ) + , segments as + (select cast((revenue/50) as int) as segment + from my_revenue + ) + select segment, count(*) as num_customers, segment*50 as segment_base + from segments + group by segment + order by segment, num_customers + limit 100; + +-- end query 1 in stream 0 using template query54.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query55.sql b/src/s3select/TPCDS/sample-queries-tpcds/query55.sql new file mode 100644 index 000000000..09f2b7cf4 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query55.sql @@ -0,0 +1,14 @@ +-- start query 1 in stream 0 using template query55.tpl and seed 2031708268 +select i_brand_id brand_id, i_brand brand, + sum(ss_ext_sales_price) ext_price + from date_dim, store_sales, item + where d_date_sk = ss_sold_date_sk + and ss_item_sk = i_item_sk + and i_manager_id=13 + and d_moy=11 + and d_year=1999 + group by i_brand, i_brand_id + order by ext_price desc, i_brand_id +limit 100 ; + +-- end query 1 in stream 0 using template query55.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query56.sql b/src/s3select/TPCDS/sample-queries-tpcds/query56.sql new file mode 100644 index 000000000..a0e245c42 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query56.sql @@ -0,0 +1,69 @@ +-- start query 1 in stream 0 using template query56.tpl and seed 1951559352 +with ss as ( + select i_item_id,sum(ss_ext_sales_price) total_sales + from + store_sales, + date_dim, + customer_address, + item + where i_item_id in (select + i_item_id +from item +where i_color in ('chiffon','smoke','lace')) + and ss_item_sk = i_item_sk + and ss_sold_date_sk = d_date_sk + and d_year = 2001 + and d_moy = 5 + and ss_addr_sk = ca_address_sk + and ca_gmt_offset = -6 + group by i_item_id), + cs as ( + select i_item_id,sum(cs_ext_sales_price) total_sales + from + catalog_sales, + date_dim, + customer_address, + item + where + i_item_id in (select + i_item_id +from item +where i_color in ('chiffon','smoke','lace')) + and cs_item_sk = i_item_sk + and cs_sold_date_sk = d_date_sk + and d_year = 2001 + and d_moy = 5 + and cs_bill_addr_sk = ca_address_sk + and ca_gmt_offset = -6 + group by i_item_id), + ws as ( + select i_item_id,sum(ws_ext_sales_price) total_sales + from + web_sales, + date_dim, + customer_address, + item + where + i_item_id in (select + i_item_id +from item +where i_color in ('chiffon','smoke','lace')) + and ws_item_sk = i_item_sk + and ws_sold_date_sk = d_date_sk + and d_year = 2001 + and d_moy = 5 + and ws_bill_addr_sk = ca_address_sk + and ca_gmt_offset = -6 + group by i_item_id) + select i_item_id ,sum(total_sales) total_sales + from (select * from ss + union all + select * from cs + union all + select * from ws) tmp1 + group by i_item_id + order by total_sales, + i_item_id + limit 100; + +-- end query 1 in stream 0 using template query56.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query57.sql b/src/s3select/TPCDS/sample-queries-tpcds/query57.sql new file mode 100644 index 000000000..10602da67 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query57.sql @@ -0,0 +1,48 @@ +-- start query 1 in stream 0 using template query57.tpl and seed 2031708268 +with v1 as( + select i_category, i_brand, + cc_name, + d_year, d_moy, + sum(cs_sales_price) sum_sales, + avg(sum(cs_sales_price)) over + (partition by i_category, i_brand, + cc_name, d_year) + avg_monthly_sales, + rank() over + (partition by i_category, i_brand, + cc_name + order by d_year, d_moy) rn + from item, catalog_sales, date_dim, call_center + where cs_item_sk = i_item_sk and + cs_sold_date_sk = d_date_sk and + cc_call_center_sk= cs_call_center_sk and + ( + d_year = 1999 or + ( d_year = 1999-1 and d_moy =12) or + ( d_year = 1999+1 and d_moy =1) + ) + group by i_category, i_brand, + cc_name , d_year, d_moy), + v2 as( + select v1.i_category, v1.i_brand + ,v1.d_year, v1.d_moy + ,v1.avg_monthly_sales + ,v1.sum_sales, v1_lag.sum_sales psum, v1_lead.sum_sales nsum + from v1, v1 v1_lag, v1 v1_lead + where v1.i_category = v1_lag.i_category and + v1.i_category = v1_lead.i_category and + v1.i_brand = v1_lag.i_brand and + v1.i_brand = v1_lead.i_brand and + v1. cc_name = v1_lag. cc_name and + v1. cc_name = v1_lead. cc_name and + v1.rn = v1_lag.rn + 1 and + v1.rn = v1_lead.rn - 1) + select * + from v2 + where d_year = 1999 and + avg_monthly_sales > 0 and + case when avg_monthly_sales > 0 then abs(sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 + order by sum_sales - avg_monthly_sales, avg_monthly_sales + limit 100; + +-- end query 1 in stream 0 using template query57.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query58.sql b/src/s3select/TPCDS/sample-queries-tpcds/query58.sql new file mode 100644 index 000000000..93ac07686 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query58.sql @@ -0,0 +1,65 @@ +-- start query 1 in stream 0 using template query58.tpl and seed 1819994127 +with ss_items as + (select i_item_id item_id + ,sum(ss_ext_sales_price) ss_item_rev + from store_sales + ,item + ,date_dim + where ss_item_sk = i_item_sk + and d_date in (select d_date + from date_dim + where d_week_seq = (select d_week_seq + from date_dim + where d_date = cast('1998-02-21' as date))) + and ss_sold_date_sk = d_date_sk + group by i_item_id), + cs_items as + (select i_item_id item_id + ,sum(cs_ext_sales_price) cs_item_rev + from catalog_sales + ,item + ,date_dim + where cs_item_sk = i_item_sk + and d_date in (select d_date + from date_dim + where d_week_seq = (select d_week_seq + from date_dim + where d_date = cast('1998-02-21' as date))) + and cs_sold_date_sk = d_date_sk + group by i_item_id), + ws_items as + (select i_item_id item_id + ,sum(ws_ext_sales_price) ws_item_rev + from web_sales + ,item + ,date_dim + where ws_item_sk = i_item_sk + and d_date in (select d_date + from date_dim + where d_week_seq =(select d_week_seq + from date_dim + where d_date = cast('1998-02-21' as date))) + and ws_sold_date_sk = d_date_sk + group by i_item_id) + select ss_items.item_id + ,ss_item_rev + ,ss_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ss_dev + ,cs_item_rev + ,cs_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 cs_dev + ,ws_item_rev + ,ws_item_rev/((ss_item_rev+cs_item_rev+ws_item_rev)/3) * 100 ws_dev + ,(ss_item_rev+cs_item_rev+ws_item_rev)/3 average + from ss_items,cs_items,ws_items + where ss_items.item_id=cs_items.item_id + and ss_items.item_id=ws_items.item_id + and ss_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev + and ss_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev + and cs_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev + and cs_item_rev between 0.9 * ws_item_rev and 1.1 * ws_item_rev + and ws_item_rev between 0.9 * ss_item_rev and 1.1 * ss_item_rev + and ws_item_rev between 0.9 * cs_item_rev and 1.1 * cs_item_rev + order by item_id + ,ss_item_rev + limit 100; + +-- end query 1 in stream 0 using template query58.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query59.sql b/src/s3select/TPCDS/sample-queries-tpcds/query59.sql new file mode 100644 index 000000000..9e74b19b9 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query59.sql @@ -0,0 +1,44 @@ +-- start query 1 in stream 0 using template query59.tpl and seed 1819994127 +with wss as + (select d_week_seq, + ss_store_sk, + sum(case when (d_day_name='Sunday') then ss_sales_price else null end) sun_sales, + sum(case when (d_day_name='Monday') then ss_sales_price else null end) mon_sales, + sum(case when (d_day_name='Tuesday') then ss_sales_price else null end) tue_sales, + sum(case when (d_day_name='Wednesday') then ss_sales_price else null end) wed_sales, + sum(case when (d_day_name='Thursday') then ss_sales_price else null end) thu_sales, + sum(case when (d_day_name='Friday') then ss_sales_price else null end) fri_sales, + sum(case when (d_day_name='Saturday') then ss_sales_price else null end) sat_sales + from store_sales,date_dim + where d_date_sk = ss_sold_date_sk + group by d_week_seq,ss_store_sk + ) + select s_store_name1,s_store_id1,d_week_seq1 + ,sun_sales1/sun_sales2,mon_sales1/mon_sales2 + ,tue_sales1/tue_sales2,wed_sales1/wed_sales2,thu_sales1/thu_sales2 + ,fri_sales1/fri_sales2,sat_sales1/sat_sales2 + from + (select s_store_name s_store_name1,wss.d_week_seq d_week_seq1 + ,s_store_id s_store_id1,sun_sales sun_sales1 + ,mon_sales mon_sales1,tue_sales tue_sales1 + ,wed_sales wed_sales1,thu_sales thu_sales1 + ,fri_sales fri_sales1,sat_sales sat_sales1 + from wss,store,date_dim d + where d.d_week_seq = wss.d_week_seq and + ss_store_sk = s_store_sk and + d_month_seq between 1205 and 1205 + 11) y, + (select s_store_name s_store_name2,wss.d_week_seq d_week_seq2 + ,s_store_id s_store_id2,sun_sales sun_sales2 + ,mon_sales mon_sales2,tue_sales tue_sales2 + ,wed_sales wed_sales2,thu_sales thu_sales2 + ,fri_sales fri_sales2,sat_sales sat_sales2 + from wss,store,date_dim d + where d.d_week_seq = wss.d_week_seq and + ss_store_sk = s_store_sk and + d_month_seq between 1205+ 12 and 1205 + 23) x + where s_store_id1=s_store_id2 + and d_week_seq1=d_week_seq2-52 + order by s_store_name1,s_store_id1,d_week_seq1 +limit 100; + +-- end query 1 in stream 0 using template query59.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query6.sql b/src/s3select/TPCDS/sample-queries-tpcds/query6.sql new file mode 100644 index 000000000..21a8a618c --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query6.sql @@ -0,0 +1,26 @@ +-- start query 1 in stream 0 using template query6.tpl and seed 1819994127 +select a.ca_state state, count(*) cnt + from customer_address a + ,customer c + ,store_sales s + ,date_dim d + ,item i + where a.ca_address_sk = c.c_current_addr_sk + and c.c_customer_sk = s.ss_customer_sk + and s.ss_sold_date_sk = d.d_date_sk + and s.ss_item_sk = i.i_item_sk + and d.d_month_seq = + (select distinct (d_month_seq) + from date_dim + where d_year = 2002 + and d_moy = 3 ) + and i.i_current_price > 1.2 * + (select avg(j.i_current_price) + from item j + where j.i_category = i.i_category) + group by a.ca_state + having count(*) >= 10 + order by cnt, a.ca_state + limit 100; + +-- end query 1 in stream 0 using template query6.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query60.sql b/src/s3select/TPCDS/sample-queries-tpcds/query60.sql new file mode 100644 index 000000000..0f96b97d5 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query60.sql @@ -0,0 +1,78 @@ +-- start query 1 in stream 0 using template query60.tpl and seed 1930872976 +with ss as ( + select + i_item_id,sum(ss_ext_sales_price) total_sales + from + store_sales, + date_dim, + customer_address, + item + where + i_item_id in (select + i_item_id +from + item +where i_category in ('Children')) + and ss_item_sk = i_item_sk + and ss_sold_date_sk = d_date_sk + and d_year = 1998 + and d_moy = 10 + and ss_addr_sk = ca_address_sk + and ca_gmt_offset = -5 + group by i_item_id), + cs as ( + select + i_item_id,sum(cs_ext_sales_price) total_sales + from + catalog_sales, + date_dim, + customer_address, + item + where + i_item_id in (select + i_item_id +from + item +where i_category in ('Children')) + and cs_item_sk = i_item_sk + and cs_sold_date_sk = d_date_sk + and d_year = 1998 + and d_moy = 10 + and cs_bill_addr_sk = ca_address_sk + and ca_gmt_offset = -5 + group by i_item_id), + ws as ( + select + i_item_id,sum(ws_ext_sales_price) total_sales + from + web_sales, + date_dim, + customer_address, + item + where + i_item_id in (select + i_item_id +from + item +where i_category in ('Children')) + and ws_item_sk = i_item_sk + and ws_sold_date_sk = d_date_sk + and d_year = 1998 + and d_moy = 10 + and ws_bill_addr_sk = ca_address_sk + and ca_gmt_offset = -5 + group by i_item_id) + select + i_item_id +,sum(total_sales) total_sales + from (select * from ss + union all + select * from cs + union all + select * from ws) tmp1 + group by i_item_id + order by i_item_id + ,total_sales + limit 100; + +-- end query 1 in stream 0 using template query60.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query61.sql b/src/s3select/TPCDS/sample-queries-tpcds/query61.sql new file mode 100644 index 000000000..4be762663 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query61.sql @@ -0,0 +1,44 @@ +-- start query 1 in stream 0 using template query61.tpl and seed 1930872976 +select promotions,total,cast(promotions as decimal(15,4))/cast(total as decimal(15,4))*100 +from + (select sum(ss_ext_sales_price) promotions + from store_sales + ,store + ,promotion + ,date_dim + ,customer + ,customer_address + ,item + where ss_sold_date_sk = d_date_sk + and ss_store_sk = s_store_sk + and ss_promo_sk = p_promo_sk + and ss_customer_sk= c_customer_sk + and ca_address_sk = c_current_addr_sk + and ss_item_sk = i_item_sk + and ca_gmt_offset = -6 + and i_category = 'Sports' + and (p_channel_dmail = 'Y' or p_channel_email = 'Y' or p_channel_tv = 'Y') + and s_gmt_offset = -6 + and d_year = 2001 + and d_moy = 12) promotional_sales, + (select sum(ss_ext_sales_price) total + from store_sales + ,store + ,date_dim + ,customer + ,customer_address + ,item + where ss_sold_date_sk = d_date_sk + and ss_store_sk = s_store_sk + and ss_customer_sk= c_customer_sk + and ca_address_sk = c_current_addr_sk + and ss_item_sk = i_item_sk + and ca_gmt_offset = -6 + and i_category = 'Sports' + and s_gmt_offset = -6 + and d_year = 2001 + and d_moy = 12) all_sales +order by promotions, total +limit 100; + +-- end query 1 in stream 0 using template query61.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query62.sql b/src/s3select/TPCDS/sample-queries-tpcds/query62.sql new file mode 100644 index 000000000..0f8c79422 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query62.sql @@ -0,0 +1,35 @@ +-- start query 1 in stream 0 using template query62.tpl and seed 1819994127 +select + substr(w_warehouse_name,1,20) + ,sm_type + ,web_name + ,sum(case when (ws_ship_date_sk - ws_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" + ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 30) and + (ws_ship_date_sk - ws_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" + ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 60) and + (ws_ship_date_sk - ws_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" + ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 90) and + (ws_ship_date_sk - ws_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" + ,sum(case when (ws_ship_date_sk - ws_sold_date_sk > 120) then 1 else 0 end) as ">120 days" +from + web_sales + ,warehouse + ,ship_mode + ,web_site + ,date_dim +where + d_month_seq between 1215 and 1215 + 11 +and ws_ship_date_sk = d_date_sk +and ws_warehouse_sk = w_warehouse_sk +and ws_ship_mode_sk = sm_ship_mode_sk +and ws_web_site_sk = web_site_sk +group by + substr(w_warehouse_name,1,20) + ,sm_type + ,web_name +order by substr(w_warehouse_name,1,20) + ,sm_type + ,web_name +limit 100; + +-- end query 1 in stream 0 using template query62.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query63.sql b/src/s3select/TPCDS/sample-queries-tpcds/query63.sql new file mode 100644 index 000000000..599a2d023 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query63.sql @@ -0,0 +1,29 @@ +-- start query 1 in stream 0 using template query63.tpl and seed 1819994127 +select * +from (select i_manager_id + ,sum(ss_sales_price) sum_sales + ,avg(sum(ss_sales_price)) over (partition by i_manager_id) avg_monthly_sales + from item + ,store_sales + ,date_dim + ,store + where ss_item_sk = i_item_sk + and ss_sold_date_sk = d_date_sk + and ss_store_sk = s_store_sk + and d_month_seq in (1211,1211+1,1211+2,1211+3,1211+4,1211+5,1211+6,1211+7,1211+8,1211+9,1211+10,1211+11) + and (( i_category in ('Books','Children','Electronics') + and i_class in ('personal','portable','reference','self-help') + and i_brand in ('scholaramalgamalg #14','scholaramalgamalg #7', + 'exportiunivamalg #9','scholaramalgamalg #9')) + or( i_category in ('Women','Music','Men') + and i_class in ('accessories','classical','fragrances','pants') + and i_brand in ('amalgimporto #1','edu packscholar #1','exportiimporto #1', + 'importoamalg #1'))) +group by i_manager_id, d_moy) tmp1 +where case when avg_monthly_sales > 0 then abs (sum_sales - avg_monthly_sales) / avg_monthly_sales else null end > 0.1 +order by i_manager_id + ,avg_monthly_sales + ,sum_sales +limit 100; + +-- end query 1 in stream 0 using template query63.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query64.sql b/src/s3select/TPCDS/sample-queries-tpcds/query64.sql new file mode 100644 index 000000000..e353b930d --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query64.sql @@ -0,0 +1,121 @@ +-- start query 1 in stream 0 using template query64.tpl and seed 1220860970 +with cs_ui as + (select cs_item_sk + ,sum(cs_ext_list_price) as sale,sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit) as refund + from catalog_sales + ,catalog_returns + where cs_item_sk = cr_item_sk + and cs_order_number = cr_order_number + group by cs_item_sk + having sum(cs_ext_list_price)>2*sum(cr_refunded_cash+cr_reversed_charge+cr_store_credit)), +cross_sales as + (select i_product_name product_name + ,i_item_sk item_sk + ,s_store_name store_name + ,s_zip store_zip + ,ad1.ca_street_number b_street_number + ,ad1.ca_street_name b_street_name + ,ad1.ca_city b_city + ,ad1.ca_zip b_zip + ,ad2.ca_street_number c_street_number + ,ad2.ca_street_name c_street_name + ,ad2.ca_city c_city + ,ad2.ca_zip c_zip + ,d1.d_year as syear + ,d2.d_year as fsyear + ,d3.d_year s2year + ,count(*) cnt + ,sum(ss_wholesale_cost) s1 + ,sum(ss_list_price) s2 + ,sum(ss_coupon_amt) s3 + FROM store_sales + ,store_returns + ,cs_ui + ,date_dim d1 + ,date_dim d2 + ,date_dim d3 + ,store + ,customer + ,customer_demographics cd1 + ,customer_demographics cd2 + ,promotion + ,household_demographics hd1 + ,household_demographics hd2 + ,customer_address ad1 + ,customer_address ad2 + ,income_band ib1 + ,income_band ib2 + ,item + WHERE ss_store_sk = s_store_sk AND + ss_sold_date_sk = d1.d_date_sk AND + ss_customer_sk = c_customer_sk AND + ss_cdemo_sk= cd1.cd_demo_sk AND + ss_hdemo_sk = hd1.hd_demo_sk AND + ss_addr_sk = ad1.ca_address_sk and + ss_item_sk = i_item_sk and + ss_item_sk = sr_item_sk and + ss_ticket_number = sr_ticket_number and + ss_item_sk = cs_ui.cs_item_sk and + c_current_cdemo_sk = cd2.cd_demo_sk AND + c_current_hdemo_sk = hd2.hd_demo_sk AND + c_current_addr_sk = ad2.ca_address_sk and + c_first_sales_date_sk = d2.d_date_sk and + c_first_shipto_date_sk = d3.d_date_sk and + ss_promo_sk = p_promo_sk and + hd1.hd_income_band_sk = ib1.ib_income_band_sk and + hd2.hd_income_band_sk = ib2.ib_income_band_sk and + cd1.cd_marital_status <> cd2.cd_marital_status and + i_color in ('azure','gainsboro','misty','blush','hot','lemon') and + i_current_price between 80 and 80 + 10 and + i_current_price between 80 + 1 and 80 + 15 +group by i_product_name + ,i_item_sk + ,s_store_name + ,s_zip + ,ad1.ca_street_number + ,ad1.ca_street_name + ,ad1.ca_city + ,ad1.ca_zip + ,ad2.ca_street_number + ,ad2.ca_street_name + ,ad2.ca_city + ,ad2.ca_zip + ,d1.d_year + ,d2.d_year + ,d3.d_year +) +select cs1.product_name + ,cs1.store_name + ,cs1.store_zip + ,cs1.b_street_number + ,cs1.b_street_name + ,cs1.b_city + ,cs1.b_zip + ,cs1.c_street_number + ,cs1.c_street_name + ,cs1.c_city + ,cs1.c_zip + ,cs1.syear + ,cs1.cnt + ,cs1.s1 as s11 + ,cs1.s2 as s21 + ,cs1.s3 as s31 + ,cs2.s1 as s12 + ,cs2.s2 as s22 + ,cs2.s3 as s32 + ,cs2.syear + ,cs2.cnt +from cross_sales cs1,cross_sales cs2 +where cs1.item_sk=cs2.item_sk and + cs1.syear = 1999 and + cs2.syear = 1999 + 1 and + cs2.cnt <= cs1.cnt and + cs1.store_name = cs2.store_name and + cs1.store_zip = cs2.store_zip +order by cs1.product_name + ,cs1.store_name + ,cs2.cnt + ,cs1.s1 + ,cs2.s1; + +-- end query 1 in stream 0 using template query64.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query65.sql b/src/s3select/TPCDS/sample-queries-tpcds/query65.sql new file mode 100644 index 000000000..826ecc644 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query65.sql @@ -0,0 +1,29 @@ +-- start query 1 in stream 0 using template query65.tpl and seed 1819994127 +select + s_store_name, + i_item_desc, + sc.revenue, + i_current_price, + i_wholesale_cost, + i_brand + from store, item, + (select ss_store_sk, avg(revenue) as ave + from + (select ss_store_sk, ss_item_sk, + sum(ss_sales_price) as revenue + from store_sales, date_dim + where ss_sold_date_sk = d_date_sk and d_month_seq between 1186 and 1186+11 + group by ss_store_sk, ss_item_sk) sa + group by ss_store_sk) sb, + (select ss_store_sk, ss_item_sk, sum(ss_sales_price) as revenue + from store_sales, date_dim + where ss_sold_date_sk = d_date_sk and d_month_seq between 1186 and 1186+11 + group by ss_store_sk, ss_item_sk) sc + where sb.ss_store_sk = sc.ss_store_sk and + sc.revenue <= 0.1 * sb.ave and + s_store_sk = sc.ss_store_sk and + i_item_sk = sc.ss_item_sk + order by s_store_name, i_item_desc +limit 100; + +-- end query 1 in stream 0 using template query65.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query66.sql b/src/s3select/TPCDS/sample-queries-tpcds/query66.sql new file mode 100644 index 000000000..987238b12 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query66.sql @@ -0,0 +1,220 @@ +-- start query 1 in stream 0 using template query66.tpl and seed 2042478054 +select + w_warehouse_name + ,w_warehouse_sq_ft + ,w_city + ,w_county + ,w_state + ,w_country + ,ship_carriers + ,year + ,sum(jan_sales) as jan_sales + ,sum(feb_sales) as feb_sales + ,sum(mar_sales) as mar_sales + ,sum(apr_sales) as apr_sales + ,sum(may_sales) as may_sales + ,sum(jun_sales) as jun_sales + ,sum(jul_sales) as jul_sales + ,sum(aug_sales) as aug_sales + ,sum(sep_sales) as sep_sales + ,sum(oct_sales) as oct_sales + ,sum(nov_sales) as nov_sales + ,sum(dec_sales) as dec_sales + ,sum(jan_sales/w_warehouse_sq_ft) as jan_sales_per_sq_foot + ,sum(feb_sales/w_warehouse_sq_ft) as feb_sales_per_sq_foot + ,sum(mar_sales/w_warehouse_sq_ft) as mar_sales_per_sq_foot + ,sum(apr_sales/w_warehouse_sq_ft) as apr_sales_per_sq_foot + ,sum(may_sales/w_warehouse_sq_ft) as may_sales_per_sq_foot + ,sum(jun_sales/w_warehouse_sq_ft) as jun_sales_per_sq_foot + ,sum(jul_sales/w_warehouse_sq_ft) as jul_sales_per_sq_foot + ,sum(aug_sales/w_warehouse_sq_ft) as aug_sales_per_sq_foot + ,sum(sep_sales/w_warehouse_sq_ft) as sep_sales_per_sq_foot + ,sum(oct_sales/w_warehouse_sq_ft) as oct_sales_per_sq_foot + ,sum(nov_sales/w_warehouse_sq_ft) as nov_sales_per_sq_foot + ,sum(dec_sales/w_warehouse_sq_ft) as dec_sales_per_sq_foot + ,sum(jan_net) as jan_net + ,sum(feb_net) as feb_net + ,sum(mar_net) as mar_net + ,sum(apr_net) as apr_net + ,sum(may_net) as may_net + ,sum(jun_net) as jun_net + ,sum(jul_net) as jul_net + ,sum(aug_net) as aug_net + ,sum(sep_net) as sep_net + ,sum(oct_net) as oct_net + ,sum(nov_net) as nov_net + ,sum(dec_net) as dec_net + from ( + select + w_warehouse_name + ,w_warehouse_sq_ft + ,w_city + ,w_county + ,w_state + ,w_country + ,'MSC' || ',' || 'GERMA' as ship_carriers + ,d_year as year + ,sum(case when d_moy = 1 + then ws_sales_price* ws_quantity else 0 end) as jan_sales + ,sum(case when d_moy = 2 + then ws_sales_price* ws_quantity else 0 end) as feb_sales + ,sum(case when d_moy = 3 + then ws_sales_price* ws_quantity else 0 end) as mar_sales + ,sum(case when d_moy = 4 + then ws_sales_price* ws_quantity else 0 end) as apr_sales + ,sum(case when d_moy = 5 + then ws_sales_price* ws_quantity else 0 end) as may_sales + ,sum(case when d_moy = 6 + then ws_sales_price* ws_quantity else 0 end) as jun_sales + ,sum(case when d_moy = 7 + then ws_sales_price* ws_quantity else 0 end) as jul_sales + ,sum(case when d_moy = 8 + then ws_sales_price* ws_quantity else 0 end) as aug_sales + ,sum(case when d_moy = 9 + then ws_sales_price* ws_quantity else 0 end) as sep_sales + ,sum(case when d_moy = 10 + then ws_sales_price* ws_quantity else 0 end) as oct_sales + ,sum(case when d_moy = 11 + then ws_sales_price* ws_quantity else 0 end) as nov_sales + ,sum(case when d_moy = 12 + then ws_sales_price* ws_quantity else 0 end) as dec_sales + ,sum(case when d_moy = 1 + then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jan_net + ,sum(case when d_moy = 2 + then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as feb_net + ,sum(case when d_moy = 3 + then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as mar_net + ,sum(case when d_moy = 4 + then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as apr_net + ,sum(case when d_moy = 5 + then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as may_net + ,sum(case when d_moy = 6 + then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jun_net + ,sum(case when d_moy = 7 + then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as jul_net + ,sum(case when d_moy = 8 + then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as aug_net + ,sum(case when d_moy = 9 + then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as sep_net + ,sum(case when d_moy = 10 + then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as oct_net + ,sum(case when d_moy = 11 + then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as nov_net + ,sum(case when d_moy = 12 + then ws_net_paid_inc_ship_tax * ws_quantity else 0 end) as dec_net + from + web_sales + ,warehouse + ,date_dim + ,time_dim + ,ship_mode + where + ws_warehouse_sk = w_warehouse_sk + and ws_sold_date_sk = d_date_sk + and ws_sold_time_sk = t_time_sk + and ws_ship_mode_sk = sm_ship_mode_sk + and d_year = 2001 + and t_time between 9453 and 9453+28800 + and sm_carrier in ('MSC','GERMA') + group by + w_warehouse_name + ,w_warehouse_sq_ft + ,w_city + ,w_county + ,w_state + ,w_country + ,d_year + union all + select + w_warehouse_name + ,w_warehouse_sq_ft + ,w_city + ,w_county + ,w_state + ,w_country + ,'MSC' || ',' || 'GERMA' as ship_carriers + ,d_year as year + ,sum(case when d_moy = 1 + then cs_ext_list_price* cs_quantity else 0 end) as jan_sales + ,sum(case when d_moy = 2 + then cs_ext_list_price* cs_quantity else 0 end) as feb_sales + ,sum(case when d_moy = 3 + then cs_ext_list_price* cs_quantity else 0 end) as mar_sales + ,sum(case when d_moy = 4 + then cs_ext_list_price* cs_quantity else 0 end) as apr_sales + ,sum(case when d_moy = 5 + then cs_ext_list_price* cs_quantity else 0 end) as may_sales + ,sum(case when d_moy = 6 + then cs_ext_list_price* cs_quantity else 0 end) as jun_sales + ,sum(case when d_moy = 7 + then cs_ext_list_price* cs_quantity else 0 end) as jul_sales + ,sum(case when d_moy = 8 + then cs_ext_list_price* cs_quantity else 0 end) as aug_sales + ,sum(case when d_moy = 9 + then cs_ext_list_price* cs_quantity else 0 end) as sep_sales + ,sum(case when d_moy = 10 + then cs_ext_list_price* cs_quantity else 0 end) as oct_sales + ,sum(case when d_moy = 11 + then cs_ext_list_price* cs_quantity else 0 end) as nov_sales + ,sum(case when d_moy = 12 + then cs_ext_list_price* cs_quantity else 0 end) as dec_sales + ,sum(case when d_moy = 1 + then cs_net_paid_inc_ship * cs_quantity else 0 end) as jan_net + ,sum(case when d_moy = 2 + then cs_net_paid_inc_ship * cs_quantity else 0 end) as feb_net + ,sum(case when d_moy = 3 + then cs_net_paid_inc_ship * cs_quantity else 0 end) as mar_net + ,sum(case when d_moy = 4 + then cs_net_paid_inc_ship * cs_quantity else 0 end) as apr_net + ,sum(case when d_moy = 5 + then cs_net_paid_inc_ship * cs_quantity else 0 end) as may_net + ,sum(case when d_moy = 6 + then cs_net_paid_inc_ship * cs_quantity else 0 end) as jun_net + ,sum(case when d_moy = 7 + then cs_net_paid_inc_ship * cs_quantity else 0 end) as jul_net + ,sum(case when d_moy = 8 + then cs_net_paid_inc_ship * cs_quantity else 0 end) as aug_net + ,sum(case when d_moy = 9 + then cs_net_paid_inc_ship * cs_quantity else 0 end) as sep_net + ,sum(case when d_moy = 10 + then cs_net_paid_inc_ship * cs_quantity else 0 end) as oct_net + ,sum(case when d_moy = 11 + then cs_net_paid_inc_ship * cs_quantity else 0 end) as nov_net + ,sum(case when d_moy = 12 + then cs_net_paid_inc_ship * cs_quantity else 0 end) as dec_net + from + catalog_sales + ,warehouse + ,date_dim + ,time_dim + ,ship_mode + where + cs_warehouse_sk = w_warehouse_sk + and cs_sold_date_sk = d_date_sk + and cs_sold_time_sk = t_time_sk + and cs_ship_mode_sk = sm_ship_mode_sk + and d_year = 2001 + and t_time between 9453 AND 9453+28800 + and sm_carrier in ('MSC','GERMA') + group by + w_warehouse_name + ,w_warehouse_sq_ft + ,w_city + ,w_county + ,w_state + ,w_country + ,d_year + ) x + group by + w_warehouse_name + ,w_warehouse_sq_ft + ,w_city + ,w_county + ,w_state + ,w_country + ,ship_carriers + ,year + order by w_warehouse_name + limit 100; + +-- end query 1 in stream 0 using template query66.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query67.sql b/src/s3select/TPCDS/sample-queries-tpcds/query67.sql new file mode 100644 index 000000000..df485b9ce --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query67.sql @@ -0,0 +1,44 @@ +-- start query 1 in stream 0 using template query67.tpl and seed 1819994127 +select * +from (select i_category + ,i_class + ,i_brand + ,i_product_name + ,d_year + ,d_qoy + ,d_moy + ,s_store_id + ,sumsales + ,rank() over (partition by i_category order by sumsales desc) rk + from (select i_category + ,i_class + ,i_brand + ,i_product_name + ,d_year + ,d_qoy + ,d_moy + ,s_store_id + ,sum(coalesce(ss_sales_price*ss_quantity,0)) sumsales + from store_sales + ,date_dim + ,store + ,item + where ss_sold_date_sk=d_date_sk + and ss_item_sk=i_item_sk + and ss_store_sk = s_store_sk + and d_month_seq between 1185 and 1185+11 + group by rollup(i_category, i_class, i_brand, i_product_name, d_year, d_qoy, d_moy,s_store_id))dw1) dw2 +where rk <= 100 +order by i_category + ,i_class + ,i_brand + ,i_product_name + ,d_year + ,d_qoy + ,d_moy + ,s_store_id + ,sumsales + ,rk +limit 100; + +-- end query 1 in stream 0 using template query67.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query68.sql b/src/s3select/TPCDS/sample-queries-tpcds/query68.sql new file mode 100644 index 000000000..9c16a7eed --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query68.sql @@ -0,0 +1,42 @@ +-- start query 1 in stream 0 using template query68.tpl and seed 803547492 +select c_last_name + ,c_first_name + ,ca_city + ,bought_city + ,ss_ticket_number + ,extended_price + ,extended_tax + ,list_price + from (select ss_ticket_number + ,ss_customer_sk + ,ca_city bought_city + ,sum(ss_ext_sales_price) extended_price + ,sum(ss_ext_list_price) list_price + ,sum(ss_ext_tax) extended_tax + from store_sales + ,date_dim + ,store + ,household_demographics + ,customer_address + where store_sales.ss_sold_date_sk = date_dim.d_date_sk + and store_sales.ss_store_sk = store.s_store_sk + and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk + and store_sales.ss_addr_sk = customer_address.ca_address_sk + and date_dim.d_dom between 1 and 2 + and (household_demographics.hd_dep_count = 4 or + household_demographics.hd_vehicle_count= 0) + and date_dim.d_year in (1999,1999+1,1999+2) + and store.s_city in ('Pleasant Hill','Bethel') + group by ss_ticket_number + ,ss_customer_sk + ,ss_addr_sk,ca_city) dn + ,customer + ,customer_address current_addr + where ss_customer_sk = c_customer_sk + and customer.c_current_addr_sk = current_addr.ca_address_sk + and current_addr.ca_city <> bought_city + order by c_last_name + ,ss_ticket_number + limit 100; + +-- end query 1 in stream 0 using template query68.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query69.sql b/src/s3select/TPCDS/sample-queries-tpcds/query69.sql new file mode 100644 index 000000000..441077440 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query69.sql @@ -0,0 +1,47 @@ +-- start query 1 in stream 0 using template query69.tpl and seed 797269820 +select + cd_gender, + cd_marital_status, + cd_education_status, + count(*) cnt1, + cd_purchase_estimate, + count(*) cnt2, + cd_credit_rating, + count(*) cnt3 + from + customer c,customer_address ca,customer_demographics + where + c.c_current_addr_sk = ca.ca_address_sk and + ca_state in ('MO','MN','AZ') and + cd_demo_sk = c.c_current_cdemo_sk and + exists (select * + from store_sales,date_dim + where c.c_customer_sk = ss_customer_sk and + ss_sold_date_sk = d_date_sk and + d_year = 2003 and + d_moy between 2 and 2+2) and + (not exists (select * + from web_sales,date_dim + where c.c_customer_sk = ws_bill_customer_sk and + ws_sold_date_sk = d_date_sk and + d_year = 2003 and + d_moy between 2 and 2+2) and + not exists (select * + from catalog_sales,date_dim + where c.c_customer_sk = cs_ship_customer_sk and + cs_sold_date_sk = d_date_sk and + d_year = 2003 and + d_moy between 2 and 2+2)) + group by cd_gender, + cd_marital_status, + cd_education_status, + cd_purchase_estimate, + cd_credit_rating + order by cd_gender, + cd_marital_status, + cd_education_status, + cd_purchase_estimate, + cd_credit_rating + limit 100; + +-- end query 1 in stream 0 using template query69.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query7.sql b/src/s3select/TPCDS/sample-queries-tpcds/query7.sql new file mode 100644 index 000000000..f17231ebb --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query7.sql @@ -0,0 +1,21 @@ +-- start query 1 in stream 0 using template query7.tpl and seed 1930872976 +select i_item_id, + avg(ss_quantity) agg1, + avg(ss_list_price) agg2, + avg(ss_coupon_amt) agg3, + avg(ss_sales_price) agg4 + from store_sales, customer_demographics, date_dim, item, promotion + where ss_sold_date_sk = d_date_sk and + ss_item_sk = i_item_sk and + ss_cdemo_sk = cd_demo_sk and + ss_promo_sk = p_promo_sk and + cd_gender = 'F' and + cd_marital_status = 'W' and + cd_education_status = 'College' and + (p_channel_email = 'N' or p_channel_event = 'N') and + d_year = 2001 + group by i_item_id + order by i_item_id + limit 100; + +-- end query 1 in stream 0 using template query7.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query70.sql b/src/s3select/TPCDS/sample-queries-tpcds/query70.sql new file mode 100644 index 000000000..1d44fbd6a --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query70.sql @@ -0,0 +1,38 @@ +-- start query 1 in stream 0 using template query70.tpl and seed 1819994127 +select + sum(ss_net_profit) as total_sum + ,s_state + ,s_county + ,grouping(s_state)+grouping(s_county) as lochierarchy + ,rank() over ( + partition by grouping(s_state)+grouping(s_county), + case when grouping(s_county) = 0 then s_state end + order by sum(ss_net_profit) desc) as rank_within_parent + from + store_sales + ,date_dim d1 + ,store + where + d1.d_month_seq between 1218 and 1218+11 + and d1.d_date_sk = ss_sold_date_sk + and s_store_sk = ss_store_sk + and s_state in + ( select s_state + from (select s_state as s_state, + rank() over ( partition by s_state order by sum(ss_net_profit) desc) as ranking + from store_sales, store, date_dim + where d_month_seq between 1218 and 1218+11 + and d_date_sk = ss_sold_date_sk + and s_store_sk = ss_store_sk + group by s_state + ) tmp1 + where ranking <= 5 + ) + group by rollup(s_state,s_county) + order by + lochierarchy desc + ,case when lochierarchy = 0 then s_state end + ,rank_within_parent + limit 100; + +-- end query 1 in stream 0 using template query70.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query71.sql b/src/s3select/TPCDS/sample-queries-tpcds/query71.sql new file mode 100644 index 000000000..eee496fe8 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query71.sql @@ -0,0 +1,40 @@ +-- start query 1 in stream 0 using template query71.tpl and seed 2031708268 +select i_brand_id brand_id, i_brand brand,t_hour,t_minute, + sum(ext_price) ext_price + from item, (select ws_ext_sales_price as ext_price, + ws_sold_date_sk as sold_date_sk, + ws_item_sk as sold_item_sk, + ws_sold_time_sk as time_sk + from web_sales,date_dim + where d_date_sk = ws_sold_date_sk + and d_moy=12 + and d_year=2000 + union all + select cs_ext_sales_price as ext_price, + cs_sold_date_sk as sold_date_sk, + cs_item_sk as sold_item_sk, + cs_sold_time_sk as time_sk + from catalog_sales,date_dim + where d_date_sk = cs_sold_date_sk + and d_moy=12 + and d_year=2000 + union all + select ss_ext_sales_price as ext_price, + ss_sold_date_sk as sold_date_sk, + ss_item_sk as sold_item_sk, + ss_sold_time_sk as time_sk + from store_sales,date_dim + where d_date_sk = ss_sold_date_sk + and d_moy=12 + and d_year=2000 + ) tmp,time_dim + where + sold_item_sk = i_item_sk + and i_manager_id=1 + and time_sk = t_time_sk + and (t_meal_time = 'breakfast' or t_meal_time = 'dinner') + group by i_brand, i_brand_id,t_hour,t_minute + order by ext_price desc, i_brand_id + ; + +-- end query 1 in stream 0 using template query71.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query72.sql b/src/s3select/TPCDS/sample-queries-tpcds/query72.sql new file mode 100644 index 000000000..9ac928d60 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query72.sql @@ -0,0 +1,29 @@ +-- start query 1 in stream 0 using template query72.tpl and seed 2031708268 +select i_item_desc + ,w_warehouse_name + ,d1.d_week_seq + ,sum(case when p_promo_sk is null then 1 else 0 end) no_promo + ,sum(case when p_promo_sk is not null then 1 else 0 end) promo + ,count(*) total_cnt +from catalog_sales +join inventory on (cs_item_sk = inv_item_sk) +join warehouse on (w_warehouse_sk=inv_warehouse_sk) +join item on (i_item_sk = cs_item_sk) +join customer_demographics on (cs_bill_cdemo_sk = cd_demo_sk) +join household_demographics on (cs_bill_hdemo_sk = hd_demo_sk) +join date_dim d1 on (cs_sold_date_sk = d1.d_date_sk) +join date_dim d2 on (inv_date_sk = d2.d_date_sk) +join date_dim d3 on (cs_ship_date_sk = d3.d_date_sk) +left outer join promotion on (cs_promo_sk=p_promo_sk) +left outer join catalog_returns on (cr_item_sk = cs_item_sk and cr_order_number = cs_order_number) +where d1.d_week_seq = d2.d_week_seq + and inv_quantity_on_hand < cs_quantity + and d3.d_date > d1.d_date + INTERVAL '5' DAY + and hd_buy_potential = '1001-5000' + and d1.d_year = 2000 + and cd_marital_status = 'D' +group by i_item_desc,w_warehouse_name,d1.d_week_seq +order by total_cnt desc, i_item_desc, w_warehouse_name, d_week_seq +limit 100; + +-- end query 1 in stream 0 using template query72.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query73.sql b/src/s3select/TPCDS/sample-queries-tpcds/query73.sql new file mode 100644 index 000000000..2daa968d7 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query73.sql @@ -0,0 +1,28 @@ +-- start query 1 in stream 0 using template query73.tpl and seed 1971067816 +select c_last_name + ,c_first_name + ,c_salutation + ,c_preferred_cust_flag + ,ss_ticket_number + ,cnt from + (select ss_ticket_number + ,ss_customer_sk + ,count(*) cnt + from store_sales,date_dim,store,household_demographics + where store_sales.ss_sold_date_sk = date_dim.d_date_sk + and store_sales.ss_store_sk = store.s_store_sk + and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk + and date_dim.d_dom between 1 and 2 + and (household_demographics.hd_buy_potential = '>10000' or + household_demographics.hd_buy_potential = '5001-10000') + and household_demographics.hd_vehicle_count > 0 + and case when household_demographics.hd_vehicle_count > 0 then + household_demographics.hd_dep_count/ household_demographics.hd_vehicle_count else null end > 1 + and date_dim.d_year in (2000,2000+1,2000+2) + and store.s_county in ('Lea County','Furnas County','Pennington County','Bronx County') + group by ss_ticket_number,ss_customer_sk) dj,customer + where ss_customer_sk = c_customer_sk + and cnt between 1 and 5 + order by cnt desc, c_last_name asc; + +-- end query 1 in stream 0 using template query73.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query74.sql b/src/s3select/TPCDS/sample-queries-tpcds/query74.sql new file mode 100644 index 000000000..d44235f5c --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query74.sql @@ -0,0 +1,61 @@ +-- start query 1 in stream 0 using template query74.tpl and seed 1556717815 +with year_total as ( + select c_customer_id customer_id + ,c_first_name customer_first_name + ,c_last_name customer_last_name + ,d_year as year + ,sum(ss_net_paid) year_total + ,'s' sale_type + from customer + ,store_sales + ,date_dim + where c_customer_sk = ss_customer_sk + and ss_sold_date_sk = d_date_sk + and d_year in (1998,1998+1) + group by c_customer_id + ,c_first_name + ,c_last_name + ,d_year + union all + select c_customer_id customer_id + ,c_first_name customer_first_name + ,c_last_name customer_last_name + ,d_year as year + ,sum(ws_net_paid) year_total + ,'w' sale_type + from customer + ,web_sales + ,date_dim + where c_customer_sk = ws_bill_customer_sk + and ws_sold_date_sk = d_date_sk + and d_year in (1998,1998+1) + group by c_customer_id + ,c_first_name + ,c_last_name + ,d_year + ) + select + t_s_secyear.customer_id, t_s_secyear.customer_first_name, t_s_secyear.customer_last_name + from year_total t_s_firstyear + ,year_total t_s_secyear + ,year_total t_w_firstyear + ,year_total t_w_secyear + where t_s_secyear.customer_id = t_s_firstyear.customer_id + and t_s_firstyear.customer_id = t_w_secyear.customer_id + and t_s_firstyear.customer_id = t_w_firstyear.customer_id + and t_s_firstyear.sale_type = 's' + and t_w_firstyear.sale_type = 'w' + and t_s_secyear.sale_type = 's' + and t_w_secyear.sale_type = 'w' + and t_s_firstyear.year = 1998 + and t_s_secyear.year = 1998+1 + and t_w_firstyear.year = 1998 + and t_w_secyear.year = 1998+1 + and t_s_firstyear.year_total > 0 + and t_w_firstyear.year_total > 0 + and case when t_w_firstyear.year_total > 0 then t_w_secyear.year_total / t_w_firstyear.year_total else null end + > case when t_s_firstyear.year_total > 0 then t_s_secyear.year_total / t_s_firstyear.year_total else null end + order by 3,1,2 +limit 100; + +-- end query 1 in stream 0 using template query74.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query75.sql b/src/s3select/TPCDS/sample-queries-tpcds/query75.sql new file mode 100644 index 000000000..50b507e1b --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query75.sql @@ -0,0 +1,70 @@ +-- start query 1 in stream 0 using template query75.tpl and seed 1819994127 +WITH all_sales AS ( + SELECT d_year + ,i_brand_id + ,i_class_id + ,i_category_id + ,i_manufact_id + ,SUM(sales_cnt) AS sales_cnt + ,SUM(sales_amt) AS sales_amt + FROM (SELECT d_year + ,i_brand_id + ,i_class_id + ,i_category_id + ,i_manufact_id + ,cs_quantity - COALESCE(cr_return_quantity,0) AS sales_cnt + ,cs_ext_sales_price - COALESCE(cr_return_amount,0.0) AS sales_amt + FROM catalog_sales JOIN item ON i_item_sk=cs_item_sk + JOIN date_dim ON d_date_sk=cs_sold_date_sk + LEFT JOIN catalog_returns ON (cs_order_number=cr_order_number + AND cs_item_sk=cr_item_sk) + WHERE i_category='Sports' + UNION + SELECT d_year + ,i_brand_id + ,i_class_id + ,i_category_id + ,i_manufact_id + ,ss_quantity - COALESCE(sr_return_quantity,0) AS sales_cnt + ,ss_ext_sales_price - COALESCE(sr_return_amt,0.0) AS sales_amt + FROM store_sales JOIN item ON i_item_sk=ss_item_sk + JOIN date_dim ON d_date_sk=ss_sold_date_sk + LEFT JOIN store_returns ON (ss_ticket_number=sr_ticket_number + AND ss_item_sk=sr_item_sk) + WHERE i_category='Sports' + UNION + SELECT d_year + ,i_brand_id + ,i_class_id + ,i_category_id + ,i_manufact_id + ,ws_quantity - COALESCE(wr_return_quantity,0) AS sales_cnt + ,ws_ext_sales_price - COALESCE(wr_return_amt,0.0) AS sales_amt + FROM web_sales JOIN item ON i_item_sk=ws_item_sk + JOIN date_dim ON d_date_sk=ws_sold_date_sk + LEFT JOIN web_returns ON (ws_order_number=wr_order_number + AND ws_item_sk=wr_item_sk) + WHERE i_category='Sports') sales_detail + GROUP BY d_year, i_brand_id, i_class_id, i_category_id, i_manufact_id) + SELECT prev_yr.d_year AS prev_year + ,curr_yr.d_year AS year + ,curr_yr.i_brand_id + ,curr_yr.i_class_id + ,curr_yr.i_category_id + ,curr_yr.i_manufact_id + ,prev_yr.sales_cnt AS prev_yr_cnt + ,curr_yr.sales_cnt AS curr_yr_cnt + ,curr_yr.sales_cnt-prev_yr.sales_cnt AS sales_cnt_diff + ,curr_yr.sales_amt-prev_yr.sales_amt AS sales_amt_diff + FROM all_sales curr_yr, all_sales prev_yr + WHERE curr_yr.i_brand_id=prev_yr.i_brand_id + AND curr_yr.i_class_id=prev_yr.i_class_id + AND curr_yr.i_category_id=prev_yr.i_category_id + AND curr_yr.i_manufact_id=prev_yr.i_manufact_id + AND curr_yr.d_year=2001 + AND prev_yr.d_year=2001-1 + AND CAST(curr_yr.sales_cnt AS DECIMAL(17,2))/CAST(prev_yr.sales_cnt AS DECIMAL(17,2))<0.9 + ORDER BY sales_cnt_diff,sales_amt_diff + limit 100; + +-- end query 1 in stream 0 using template query75.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query76.sql b/src/s3select/TPCDS/sample-queries-tpcds/query76.sql new file mode 100644 index 000000000..eeb5d287c --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query76.sql @@ -0,0 +1,24 @@ +-- start query 1 in stream 0 using template query76.tpl and seed 2031708268 +select channel, col_name, d_year, d_qoy, i_category, COUNT(*) sales_cnt, SUM(ext_sales_price) sales_amt FROM ( + SELECT 'store' as channel, 'ss_customer_sk' col_name, d_year, d_qoy, i_category, ss_ext_sales_price ext_sales_price + FROM store_sales, item, date_dim + WHERE ss_customer_sk IS NULL + AND ss_sold_date_sk=d_date_sk + AND ss_item_sk=i_item_sk + UNION ALL + SELECT 'web' as channel, 'ws_ship_addr_sk' col_name, d_year, d_qoy, i_category, ws_ext_sales_price ext_sales_price + FROM web_sales, item, date_dim + WHERE ws_ship_addr_sk IS NULL + AND ws_sold_date_sk=d_date_sk + AND ws_item_sk=i_item_sk + UNION ALL + SELECT 'catalog' as channel, 'cs_ship_mode_sk' col_name, d_year, d_qoy, i_category, cs_ext_sales_price ext_sales_price + FROM catalog_sales, item, date_dim + WHERE cs_ship_mode_sk IS NULL + AND cs_sold_date_sk=d_date_sk + AND cs_item_sk=i_item_sk) foo +GROUP BY channel, col_name, d_year, d_qoy, i_category +ORDER BY channel, col_name, d_year, d_qoy, i_category +limit 100; + +-- end query 1 in stream 0 using template query76.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query77.sql b/src/s3select/TPCDS/sample-queries-tpcds/query77.sql new file mode 100644 index 000000000..9da7766b1 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query77.sql @@ -0,0 +1,108 @@ +-- start query 1 in stream 0 using template query77.tpl and seed 1819994127 +with ss as + (select s_store_sk, + sum(ss_ext_sales_price) as sales, + sum(ss_net_profit) as profit + from store_sales, + date_dim, + store + where ss_sold_date_sk = d_date_sk + and d_date between cast('2000-08-16' as date) + and (cast('2000-08-16' as date) + interval '30' day) + and ss_store_sk = s_store_sk + group by s_store_sk) + , + sr as + (select s_store_sk, + sum(sr_return_amt) as returns, + sum(sr_net_loss) as profit_loss + from store_returns, + date_dim, + store + where sr_returned_date_sk = d_date_sk + and d_date between cast('2000-08-16' as date) + and (cast('2000-08-16' as date) + interval '30' day) + and sr_store_sk = s_store_sk + group by s_store_sk), + cs as + (select cs_call_center_sk, + sum(cs_ext_sales_price) as sales, + sum(cs_net_profit) as profit + from catalog_sales, + date_dim + where cs_sold_date_sk = d_date_sk + and d_date between cast('2000-08-16' as date) + and (cast('2000-08-16' as date) + interval '30' day) + group by cs_call_center_sk + ), + cr as + (select cr_call_center_sk, + sum(cr_return_amount) as returns, + sum(cr_net_loss) as profit_loss + from catalog_returns, + date_dim + where cr_returned_date_sk = d_date_sk + and d_date between cast('2000-08-16' as date) + and (cast('2000-08-16' as date) + interval '30' day) + group by cr_call_center_sk + ), + ws as + ( select wp_web_page_sk, + sum(ws_ext_sales_price) as sales, + sum(ws_net_profit) as profit + from web_sales, + date_dim, + web_page + where ws_sold_date_sk = d_date_sk + and d_date between cast('2000-08-16' as date) + and (cast('2000-08-16' as date) + interval '30' day) + and ws_web_page_sk = wp_web_page_sk + group by wp_web_page_sk), + wr as + (select wp_web_page_sk, + sum(wr_return_amt) as returns, + sum(wr_net_loss) as profit_loss + from web_returns, + date_dim, + web_page + where wr_returned_date_sk = d_date_sk + and d_date between cast('2000-08-16' as date) + and (cast('2000-08-16' as date) + interval '30' day) + and wr_web_page_sk = wp_web_page_sk + group by wp_web_page_sk) + select channel + , id + , sum(sales) as sales + , sum(returns) as returns + , sum(profit) as profit + from + (select 'store channel' as channel + , ss.s_store_sk as id + , sales + , coalesce(returns, 0) as returns + , (profit - coalesce(profit_loss,0)) as profit + from ss left join sr + on ss.s_store_sk = sr.s_store_sk + union all + select 'catalog channel' as channel + , cs_call_center_sk as id + , sales + , returns + , (profit - profit_loss) as profit + from cs + , cr + union all + select 'web channel' as channel + , ws.wp_web_page_sk as id + , sales + , coalesce(returns, 0) returns + , (profit - coalesce(profit_loss,0)) as profit + from ws left join wr + on ws.wp_web_page_sk = wr.wp_web_page_sk + ) x + group by rollup (channel, id) + order by channel + ,id + limit 100; + +-- end query 1 in stream 0 using template query77.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query78.sql b/src/s3select/TPCDS/sample-queries-tpcds/query78.sql new file mode 100644 index 000000000..51edefc50 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query78.sql @@ -0,0 +1,58 @@ +-- start query 1 in stream 0 using template query78.tpl and seed 1819994127 +with ws as + (select d_year AS ws_sold_year, ws_item_sk, + ws_bill_customer_sk ws_customer_sk, + sum(ws_quantity) ws_qty, + sum(ws_wholesale_cost) ws_wc, + sum(ws_sales_price) ws_sp + from web_sales + left join web_returns on wr_order_number=ws_order_number and ws_item_sk=wr_item_sk + join date_dim on ws_sold_date_sk = d_date_sk + where wr_order_number is null + group by d_year, ws_item_sk, ws_bill_customer_sk + ), +cs as + (select d_year AS cs_sold_year, cs_item_sk, + cs_bill_customer_sk cs_customer_sk, + sum(cs_quantity) cs_qty, + sum(cs_wholesale_cost) cs_wc, + sum(cs_sales_price) cs_sp + from catalog_sales + left join catalog_returns on cr_order_number=cs_order_number and cs_item_sk=cr_item_sk + join date_dim on cs_sold_date_sk = d_date_sk + where cr_order_number is null + group by d_year, cs_item_sk, cs_bill_customer_sk + ), +ss as + (select d_year AS ss_sold_year, ss_item_sk, + ss_customer_sk, + sum(ss_quantity) ss_qty, + sum(ss_wholesale_cost) ss_wc, + sum(ss_sales_price) ss_sp + from store_sales + left join store_returns on sr_ticket_number=ss_ticket_number and ss_item_sk=sr_item_sk + join date_dim on ss_sold_date_sk = d_date_sk + where sr_ticket_number is null + group by d_year, ss_item_sk, ss_customer_sk + ) + select +ss_customer_sk, +round(ss_qty/(coalesce(ws_qty,0)+coalesce(cs_qty,0)),2) ratio, +ss_qty store_qty, ss_wc store_wholesale_cost, ss_sp store_sales_price, +coalesce(ws_qty,0)+coalesce(cs_qty,0) other_chan_qty, +coalesce(ws_wc,0)+coalesce(cs_wc,0) other_chan_wholesale_cost, +coalesce(ws_sp,0)+coalesce(cs_sp,0) other_chan_sales_price +from ss +left join ws on (ws_sold_year=ss_sold_year and ws_item_sk=ss_item_sk and ws_customer_sk=ss_customer_sk) +left join cs on (cs_sold_year=ss_sold_year and cs_item_sk=ss_item_sk and cs_customer_sk=ss_customer_sk) +where (coalesce(ws_qty,0)>0 or coalesce(cs_qty, 0)>0) and ss_sold_year=2001 +order by + ss_customer_sk, + ss_qty desc, ss_wc desc, ss_sp desc, + other_chan_qty, + other_chan_wholesale_cost, + other_chan_sales_price, + ratio +limit 100; + +-- end query 1 in stream 0 using template query78.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query79.sql b/src/s3select/TPCDS/sample-queries-tpcds/query79.sql new file mode 100644 index 000000000..72ae60c61 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query79.sql @@ -0,0 +1,23 @@ +-- start query 1 in stream 0 using template query79.tpl and seed 2031708268 +select + c_last_name,c_first_name,substr(s_city,1,30),ss_ticket_number,amt,profit + from + (select ss_ticket_number + ,ss_customer_sk + ,store.s_city + ,sum(ss_coupon_amt) amt + ,sum(ss_net_profit) profit + from store_sales,date_dim,store,household_demographics + where store_sales.ss_sold_date_sk = date_dim.d_date_sk + and store_sales.ss_store_sk = store.s_store_sk + and store_sales.ss_hdemo_sk = household_demographics.hd_demo_sk + and (household_demographics.hd_dep_count = 0 or household_demographics.hd_vehicle_count > 3) + and date_dim.d_dow = 1 + and date_dim.d_year in (1998,1998+1,1998+2) + and store.s_number_employees between 200 and 295 + group by ss_ticket_number,ss_customer_sk,ss_addr_sk,store.s_city) ms,customer + where ss_customer_sk = c_customer_sk + order by c_last_name,c_first_name,substr(s_city,1,30), profit +limit 100; + +-- end query 1 in stream 0 using template query79.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query8.sql b/src/s3select/TPCDS/sample-queries-tpcds/query8.sql new file mode 100644 index 000000000..6faf5d819 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query8.sql @@ -0,0 +1,108 @@ +-- start query 1 in stream 0 using template query8.tpl and seed 1766988859 +select s_store_name + ,sum(ss_net_profit) + from store_sales + ,date_dim + ,store, + (select ca_zip + from ( + SELECT substr(ca_zip,1,5) ca_zip + FROM customer_address + WHERE substr(ca_zip,1,5) IN ( + '47602','16704','35863','28577','83910','36201', + '58412','48162','28055','41419','80332', + '38607','77817','24891','16226','18410', + '21231','59345','13918','51089','20317', + '17167','54585','67881','78366','47770', + '18360','51717','73108','14440','21800', + '89338','45859','65501','34948','25973', + '73219','25333','17291','10374','18829', + '60736','82620','41351','52094','19326', + '25214','54207','40936','21814','79077', + '25178','75742','77454','30621','89193', + '27369','41232','48567','83041','71948', + '37119','68341','14073','16891','62878', + '49130','19833','24286','27700','40979', + '50412','81504','94835','84844','71954', + '39503','57649','18434','24987','12350', + '86379','27413','44529','98569','16515', + '27287','24255','21094','16005','56436', + '91110','68293','56455','54558','10298', + '83647','32754','27052','51766','19444', + '13869','45645','94791','57631','20712', + '37788','41807','46507','21727','71836', + '81070','50632','88086','63991','20244', + '31655','51782','29818','63792','68605', + '94898','36430','57025','20601','82080', + '33869','22728','35834','29086','92645', + '98584','98072','11652','78093','57553', + '43830','71144','53565','18700','90209', + '71256','38353','54364','28571','96560', + '57839','56355','50679','45266','84680', + '34306','34972','48530','30106','15371', + '92380','84247','92292','68852','13338', + '34594','82602','70073','98069','85066', + '47289','11686','98862','26217','47529', + '63294','51793','35926','24227','14196', + '24594','32489','99060','49472','43432', + '49211','14312','88137','47369','56877', + '20534','81755','15794','12318','21060', + '73134','41255','63073','81003','73873', + '66057','51184','51195','45676','92696', + '70450','90669','98338','25264','38919', + '59226','58581','60298','17895','19489', + '52301','80846','95464','68770','51634', + '19988','18367','18421','11618','67975', + '25494','41352','95430','15734','62585', + '97173','33773','10425','75675','53535', + '17879','41967','12197','67998','79658', + '59130','72592','14851','43933','68101', + '50636','25717','71286','24660','58058', + '72991','95042','15543','33122','69280', + '11912','59386','27642','65177','17672', + '33467','64592','36335','54010','18767', + '63193','42361','49254','33113','33159', + '36479','59080','11855','81963','31016', + '49140','29392','41836','32958','53163', + '13844','73146','23952','65148','93498', + '14530','46131','58454','13376','13378', + '83986','12320','17193','59852','46081', + '98533','52389','13086','68843','31013', + '13261','60560','13443','45533','83583', + '11489','58218','19753','22911','25115', + '86709','27156','32669','13123','51933', + '39214','41331','66943','14155','69998', + '49101','70070','35076','14242','73021', + '59494','15782','29752','37914','74686', + '83086','34473','15751','81084','49230', + '91894','60624','17819','28810','63180', + '56224','39459','55233','75752','43639', + '55349','86057','62361','50788','31830', + '58062','18218','85761','60083','45484', + '21204','90229','70041','41162','35390', + '16364','39500','68908','26689','52868', + '81335','40146','11340','61527','61794', + '71997','30415','59004','29450','58117', + '69952','33562','83833','27385','61860', + '96435','48333','23065','32961','84919', + '61997','99132','22815','56600','68730', + '48017','95694','32919','88217','27116', + '28239','58032','18884','16791','21343', + '97462','18569','75660','15475') + intersect + select ca_zip + from (SELECT substr(ca_zip,1,5) ca_zip,count(*) cnt + FROM customer_address, customer + WHERE ca_address_sk = c_current_addr_sk and + c_preferred_cust_flag='Y' + group by ca_zip + having count(*) > 10)A1)A2) V1 + where ss_store_sk = s_store_sk + and ss_sold_date_sk = d_date_sk + and d_qoy = 2 and d_year = 1998 + and (substr(s_zip,1,2) = substr(V1.ca_zip,1,2)) + group by s_store_name + order by s_store_name + limit 100; + +-- end query 1 in stream 0 using template query8.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query80.sql b/src/s3select/TPCDS/sample-queries-tpcds/query80.sql new file mode 100644 index 000000000..4471a8843 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query80.sql @@ -0,0 +1,96 @@ +-- start query 1 in stream 0 using template query80.tpl and seed 1819994127 +with ssr as + (select s_store_id as store_id, + sum(ss_ext_sales_price) as sales, + sum(coalesce(sr_return_amt, 0)) as returns, + sum(ss_net_profit - coalesce(sr_net_loss, 0)) as profit + from store_sales left outer join store_returns on + (ss_item_sk = sr_item_sk and ss_ticket_number = sr_ticket_number), + date_dim, + store, + item, + promotion + where ss_sold_date_sk = d_date_sk + and d_date between cast('2002-08-06' as date) + and (cast('2002-08-06' as date) + interval '30' day) + and ss_store_sk = s_store_sk + and ss_item_sk = i_item_sk + and i_current_price > 50 + and ss_promo_sk = p_promo_sk + and p_channel_tv = 'N' + group by s_store_id) + , + csr as + (select cp_catalog_page_id as catalog_page_id, + sum(cs_ext_sales_price) as sales, + sum(coalesce(cr_return_amount, 0)) as returns, + sum(cs_net_profit - coalesce(cr_net_loss, 0)) as profit + from catalog_sales left outer join catalog_returns on + (cs_item_sk = cr_item_sk and cs_order_number = cr_order_number), + date_dim, + catalog_page, + item, + promotion + where cs_sold_date_sk = d_date_sk + and d_date between cast('2002-08-06' as date) + and (cast('2002-08-06' as date) + interval '30' day) + and cs_catalog_page_sk = cp_catalog_page_sk + and cs_item_sk = i_item_sk + and i_current_price > 50 + and cs_promo_sk = p_promo_sk + and p_channel_tv = 'N' +group by cp_catalog_page_id) + , + wsr as + (select web_site_id, + sum(ws_ext_sales_price) as sales, + sum(coalesce(wr_return_amt, 0)) as returns, + sum(ws_net_profit - coalesce(wr_net_loss, 0)) as profit + from web_sales left outer join web_returns on + (ws_item_sk = wr_item_sk and ws_order_number = wr_order_number), + date_dim, + web_site, + item, + promotion + where ws_sold_date_sk = d_date_sk + and d_date between cast('2002-08-06' as date) + and (cast('2002-08-06' as date) + interval '30' day) + and ws_web_site_sk = web_site_sk + and ws_item_sk = i_item_sk + and i_current_price > 50 + and ws_promo_sk = p_promo_sk + and p_channel_tv = 'N' +group by web_site_id) + select channel + , id + , sum(sales) as sales + , sum(returns) as returns + , sum(profit) as profit + from + (select 'store channel' as channel + , 'store' || store_id as id + , sales + , returns + , profit + from ssr + union all + select 'catalog channel' as channel + , 'catalog_page' || catalog_page_id as id + , sales + , returns + , profit + from csr + union all + select 'web channel' as channel + , 'web_site' || web_site_id as id + , sales + , returns + , profit + from wsr + ) x + group by rollup (channel, id) + order by channel + ,id + limit 100; + +-- end query 1 in stream 0 using template query80.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query81.sql b/src/s3select/TPCDS/sample-queries-tpcds/query81.sql new file mode 100644 index 000000000..3483ab7c6 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query81.sql @@ -0,0 +1,31 @@ +-- start query 1 in stream 0 using template query81.tpl and seed 1819994127 +with customer_total_return as + (select cr_returning_customer_sk as ctr_customer_sk + ,ca_state as ctr_state, + sum(cr_return_amt_inc_tax) as ctr_total_return + from catalog_returns + ,date_dim + ,customer_address + where cr_returned_date_sk = d_date_sk + and d_year =1998 + and cr_returning_addr_sk = ca_address_sk + group by cr_returning_customer_sk + ,ca_state ) + select c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name + ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset + ,ca_location_type,ctr_total_return + from customer_total_return ctr1 + ,customer_address + ,customer + where ctr1.ctr_total_return > (select avg(ctr_total_return)*1.2 + from customer_total_return ctr2 + where ctr1.ctr_state = ctr2.ctr_state) + and ca_address_sk = c_current_addr_sk + and ca_state = 'TX' + and ctr1.ctr_customer_sk = c_customer_sk + order by c_customer_id,c_salutation,c_first_name,c_last_name,ca_street_number,ca_street_name + ,ca_street_type,ca_suite_number,ca_city,ca_county,ca_state,ca_zip,ca_country,ca_gmt_offset + ,ca_location_type,ctr_total_return + limit 100; + +-- end query 1 in stream 0 using template query81.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query82.sql b/src/s3select/TPCDS/sample-queries-tpcds/query82.sql new file mode 100644 index 000000000..fd89678c8 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query82.sql @@ -0,0 +1,17 @@ +-- start query 1 in stream 0 using template query82.tpl and seed 55585014 +select i_item_id + ,i_item_desc + ,i_current_price + from item, inventory, date_dim, store_sales + where i_current_price between 49 and 49+30 + and inv_item_sk = i_item_sk + and d_date_sk=inv_date_sk + and d_date between cast('2001-01-28' as date) and (cast('2001-01-28' as date) + interval '60' day) + and i_manufact_id in (80,675,292,17) + and inv_quantity_on_hand between 100 and 500 + and ss_item_sk = i_item_sk + group by i_item_id,i_item_desc,i_current_price + order by i_item_id + limit 100; + +-- end query 1 in stream 0 using template query82.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query83.sql b/src/s3select/TPCDS/sample-queries-tpcds/query83.sql new file mode 100644 index 000000000..d7bd1adec --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query83.sql @@ -0,0 +1,67 @@ +-- start query 1 in stream 0 using template query83.tpl and seed 1930872976 +with sr_items as + (select i_item_id item_id, + sum(sr_return_quantity) sr_item_qty + from store_returns, + item, + date_dim + where sr_item_sk = i_item_sk + and d_date in + (select d_date + from date_dim + where d_week_seq in + (select d_week_seq + from date_dim + where d_date in (cast('2000-06-17' as date),cast('2000-08-22' as date),cast('2000-11-17' as date)))) + and sr_returned_date_sk = d_date_sk + group by i_item_id), + cr_items as + (select i_item_id item_id, + sum(cr_return_quantity) cr_item_qty + from catalog_returns, + item, + date_dim + where cr_item_sk = i_item_sk + and d_date in + (select d_date + from date_dim + where d_week_seq in + (select d_week_seq + from date_dim + where d_date in (cast('2000-06-17' as date),cast('2000-08-22' as date),cast('2000-11-17' as date)))) + and cr_returned_date_sk = d_date_sk + group by i_item_id), + wr_items as + (select i_item_id item_id, + sum(wr_return_quantity) wr_item_qty + from web_returns, + item, + date_dim + where wr_item_sk = i_item_sk + and d_date in + (select d_date + from date_dim + where d_week_seq in + (select d_week_seq + from date_dim + where d_date in (cast('2000-06-17' as date),cast('2000-08-22' as date),cast('2000-11-17' as date)))) + and wr_returned_date_sk = d_date_sk + group by i_item_id) + select sr_items.item_id + ,sr_item_qty + ,sr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 sr_dev + ,cr_item_qty + ,cr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 cr_dev + ,wr_item_qty + ,wr_item_qty/(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 * 100 wr_dev + ,(sr_item_qty+cr_item_qty+wr_item_qty)/3.0 average + from sr_items + ,cr_items + ,wr_items + where sr_items.item_id=cr_items.item_id + and sr_items.item_id=wr_items.item_id + order by sr_items.item_id + ,sr_item_qty + limit 100; + +-- end query 1 in stream 0 using template query83.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query84.sql b/src/s3select/TPCDS/sample-queries-tpcds/query84.sql new file mode 100644 index 000000000..ba83c5a18 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query84.sql @@ -0,0 +1,21 @@ +-- start query 1 in stream 0 using template query84.tpl and seed 1819994127 +select c_customer_id as customer_id + , coalesce(c_last_name,'') || ', ' || coalesce(c_first_name,'') as customername + from customer + ,customer_address + ,customer_demographics + ,household_demographics + ,income_band + ,store_returns + where ca_city = 'Hopewell' + and c_current_addr_sk = ca_address_sk + and ib_lower_bound >= 37855 + and ib_upper_bound <= 37855 + 50000 + and ib_income_band_sk = hd_income_band_sk + and cd_demo_sk = c_current_cdemo_sk + and hd_demo_sk = c_current_hdemo_sk + and sr_cdemo_sk = cd_demo_sk + order by c_customer_id + limit 100; + +-- end query 1 in stream 0 using template query84.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query85.sql b/src/s3select/TPCDS/sample-queries-tpcds/query85.sql new file mode 100644 index 000000000..dc7932a90 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query85.sql @@ -0,0 +1,84 @@ +-- start query 1 in stream 0 using template query85.tpl and seed 622697896 +select substr(r_reason_desc,1,20) + ,avg(ws_quantity) + ,avg(wr_refunded_cash) + ,avg(wr_fee) + from web_sales, web_returns, web_page, customer_demographics cd1, + customer_demographics cd2, customer_address, date_dim, reason + where ws_web_page_sk = wp_web_page_sk + and ws_item_sk = wr_item_sk + and ws_order_number = wr_order_number + and ws_sold_date_sk = d_date_sk and d_year = 2001 + and cd1.cd_demo_sk = wr_refunded_cdemo_sk + and cd2.cd_demo_sk = wr_returning_cdemo_sk + and ca_address_sk = wr_refunded_addr_sk + and r_reason_sk = wr_reason_sk + and + ( + ( + cd1.cd_marital_status = 'M' + and + cd1.cd_marital_status = cd2.cd_marital_status + and + cd1.cd_education_status = '4 yr Degree' + and + cd1.cd_education_status = cd2.cd_education_status + and + ws_sales_price between 100.00 and 150.00 + ) + or + ( + cd1.cd_marital_status = 'S' + and + cd1.cd_marital_status = cd2.cd_marital_status + and + cd1.cd_education_status = 'College' + and + cd1.cd_education_status = cd2.cd_education_status + and + ws_sales_price between 50.00 and 100.00 + ) + or + ( + cd1.cd_marital_status = 'D' + and + cd1.cd_marital_status = cd2.cd_marital_status + and + cd1.cd_education_status = 'Secondary' + and + cd1.cd_education_status = cd2.cd_education_status + and + ws_sales_price between 150.00 and 200.00 + ) + ) + and + ( + ( + ca_country = 'United States' + and + ca_state in ('TX', 'VA', 'CA') + and ws_net_profit between 100 and 200 + ) + or + ( + ca_country = 'United States' + and + ca_state in ('AR', 'NE', 'MO') + and ws_net_profit between 150 and 300 + ) + or + ( + ca_country = 'United States' + and + ca_state in ('IA', 'MS', 'WA') + and ws_net_profit between 50 and 250 + ) + ) +group by r_reason_desc +order by substr(r_reason_desc,1,20) + ,avg(ws_quantity) + ,avg(wr_refunded_cash) + ,avg(wr_fee) +limit 100; + +-- end query 1 in stream 0 using template query85.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query86.sql b/src/s3select/TPCDS/sample-queries-tpcds/query86.sql new file mode 100644 index 000000000..9b65cbda8 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query86.sql @@ -0,0 +1,26 @@ +-- start query 1 in stream 0 using template query86.tpl and seed 1819994127 +select + sum(ws_net_paid) as total_sum + ,i_category + ,i_class + ,grouping(i_category)+grouping(i_class) as lochierarchy + ,rank() over ( + partition by grouping(i_category)+grouping(i_class), + case when grouping(i_class) = 0 then i_category end + order by sum(ws_net_paid) desc) as rank_within_parent + from + web_sales + ,date_dim d1 + ,item + where + d1.d_month_seq between 1215 and 1215+11 + and d1.d_date_sk = ws_sold_date_sk + and i_item_sk = ws_item_sk + group by rollup(i_category,i_class) + order by + lochierarchy desc, + case when lochierarchy = 0 then i_category end, + rank_within_parent + limit 100; + +-- end query 1 in stream 0 using template query86.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query87.sql b/src/s3select/TPCDS/sample-queries-tpcds/query87.sql new file mode 100644 index 000000000..161e46443 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query87.sql @@ -0,0 +1,23 @@ +-- start query 1 in stream 0 using template query87.tpl and seed 1819994127 +select count(*) +from ((select distinct c_last_name, c_first_name, d_date + from store_sales, date_dim, customer + where store_sales.ss_sold_date_sk = date_dim.d_date_sk + and store_sales.ss_customer_sk = customer.c_customer_sk + and d_month_seq between 1221 and 1221+11) + except + (select distinct c_last_name, c_first_name, d_date + from catalog_sales, date_dim, customer + where catalog_sales.cs_sold_date_sk = date_dim.d_date_sk + and catalog_sales.cs_bill_customer_sk = customer.c_customer_sk + and d_month_seq between 1221 and 1221+11) + except + (select distinct c_last_name, c_first_name, d_date + from web_sales, date_dim, customer + where web_sales.ws_sold_date_sk = date_dim.d_date_sk + and web_sales.ws_bill_customer_sk = customer.c_customer_sk + and d_month_seq between 1221 and 1221+11) +) cool_cust +; + +-- end query 1 in stream 0 using template query87.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query88.sql b/src/s3select/TPCDS/sample-queries-tpcds/query88.sql new file mode 100644 index 000000000..895fff5b7 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query88.sql @@ -0,0 +1,94 @@ +-- start query 1 in stream 0 using template query88.tpl and seed 318176889 +select * +from + (select count(*) h8_30_to_9 + from store_sales, household_demographics , time_dim, store + where ss_sold_time_sk = time_dim.t_time_sk + and ss_hdemo_sk = household_demographics.hd_demo_sk + and ss_store_sk = s_store_sk + and time_dim.t_hour = 8 + and time_dim.t_minute >= 30 + and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or + (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or + (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) + and store.s_store_name = 'ese') s1, + (select count(*) h9_to_9_30 + from store_sales, household_demographics , time_dim, store + where ss_sold_time_sk = time_dim.t_time_sk + and ss_hdemo_sk = household_demographics.hd_demo_sk + and ss_store_sk = s_store_sk + and time_dim.t_hour = 9 + and time_dim.t_minute < 30 + and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or + (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or + (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) + and store.s_store_name = 'ese') s2, + (select count(*) h9_30_to_10 + from store_sales, household_demographics , time_dim, store + where ss_sold_time_sk = time_dim.t_time_sk + and ss_hdemo_sk = household_demographics.hd_demo_sk + and ss_store_sk = s_store_sk + and time_dim.t_hour = 9 + and time_dim.t_minute >= 30 + and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or + (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or + (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) + and store.s_store_name = 'ese') s3, + (select count(*) h10_to_10_30 + from store_sales, household_demographics , time_dim, store + where ss_sold_time_sk = time_dim.t_time_sk + and ss_hdemo_sk = household_demographics.hd_demo_sk + and ss_store_sk = s_store_sk + and time_dim.t_hour = 10 + and time_dim.t_minute < 30 + and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or + (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or + (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) + and store.s_store_name = 'ese') s4, + (select count(*) h10_30_to_11 + from store_sales, household_demographics , time_dim, store + where ss_sold_time_sk = time_dim.t_time_sk + and ss_hdemo_sk = household_demographics.hd_demo_sk + and ss_store_sk = s_store_sk + and time_dim.t_hour = 10 + and time_dim.t_minute >= 30 + and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or + (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or + (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) + and store.s_store_name = 'ese') s5, + (select count(*) h11_to_11_30 + from store_sales, household_demographics , time_dim, store + where ss_sold_time_sk = time_dim.t_time_sk + and ss_hdemo_sk = household_demographics.hd_demo_sk + and ss_store_sk = s_store_sk + and time_dim.t_hour = 11 + and time_dim.t_minute < 30 + and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or + (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or + (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) + and store.s_store_name = 'ese') s6, + (select count(*) h11_30_to_12 + from store_sales, household_demographics , time_dim, store + where ss_sold_time_sk = time_dim.t_time_sk + and ss_hdemo_sk = household_demographics.hd_demo_sk + and ss_store_sk = s_store_sk + and time_dim.t_hour = 11 + and time_dim.t_minute >= 30 + and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or + (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or + (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) + and store.s_store_name = 'ese') s7, + (select count(*) h12_to_12_30 + from store_sales, household_demographics , time_dim, store + where ss_sold_time_sk = time_dim.t_time_sk + and ss_hdemo_sk = household_demographics.hd_demo_sk + and ss_store_sk = s_store_sk + and time_dim.t_hour = 12 + and time_dim.t_minute < 30 + and ((household_demographics.hd_dep_count = 2 and household_demographics.hd_vehicle_count<=2+2) or + (household_demographics.hd_dep_count = 4 and household_demographics.hd_vehicle_count<=4+2) or + (household_demographics.hd_dep_count = 3 and household_demographics.hd_vehicle_count<=3+2)) + and store.s_store_name = 'ese') s8 +; + +-- end query 1 in stream 0 using template query88.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query89.sql b/src/s3select/TPCDS/sample-queries-tpcds/query89.sql new file mode 100644 index 000000000..3a275d4e0 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query89.sql @@ -0,0 +1,28 @@ +-- start query 1 in stream 0 using template query89.tpl and seed 1719819282 +select * +from( +select i_category, i_class, i_brand, + s_store_name, s_company_name, + d_moy, + sum(ss_sales_price) sum_sales, + avg(sum(ss_sales_price)) over + (partition by i_category, i_brand, s_store_name, s_company_name) + avg_monthly_sales +from item, store_sales, date_dim, store +where ss_item_sk = i_item_sk and + ss_sold_date_sk = d_date_sk and + ss_store_sk = s_store_sk and + d_year in (2000) and + ((i_category in ('Home','Music','Books') and + i_class in ('glassware','classical','fiction') + ) + or (i_category in ('Jewelry','Sports','Women') and + i_class in ('semi-precious','baseball','dresses') + )) +group by i_category, i_class, i_brand, + s_store_name, s_company_name, d_moy) tmp1 +where case when (avg_monthly_sales <> 0) then (abs(sum_sales - avg_monthly_sales) / avg_monthly_sales) else null end > 0.1 +order by sum_sales - avg_monthly_sales, s_store_name +limit 100; + +-- end query 1 in stream 0 using template query89.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query9.sql b/src/s3select/TPCDS/sample-queries-tpcds/query9.sql new file mode 100644 index 000000000..059b9c5fb --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query9.sql @@ -0,0 +1,51 @@ +-- start query 1 in stream 0 using template query9.tpl and seed 1490436826 +select case when (select count(*) + from store_sales + where ss_quantity between 1 and 20) > 98972190 + then (select avg(ss_ext_discount_amt) + from store_sales + where ss_quantity between 1 and 20) + else (select avg(ss_net_profit) + from store_sales + where ss_quantity between 1 and 20) end bucket1 , + case when (select count(*) + from store_sales + where ss_quantity between 21 and 40) > 160856845 + then (select avg(ss_ext_discount_amt) + from store_sales + where ss_quantity between 21 and 40) + else (select avg(ss_net_profit) + from store_sales + where ss_quantity between 21 and 40) end bucket2, + case when (select count(*) + from store_sales + where ss_quantity between 41 and 60) > 12733327 + then (select avg(ss_ext_discount_amt) + from store_sales + where ss_quantity between 41 and 60) + else (select avg(ss_net_profit) + from store_sales + where ss_quantity between 41 and 60) end bucket3, + case when (select count(*) + from store_sales + where ss_quantity between 61 and 80) > 96251173 + then (select avg(ss_ext_discount_amt) + from store_sales + where ss_quantity between 61 and 80) + else (select avg(ss_net_profit) + from store_sales + where ss_quantity between 61 and 80) end bucket4, + case when (select count(*) + from store_sales + where ss_quantity between 81 and 100) > 80049606 + then (select avg(ss_ext_discount_amt) + from store_sales + where ss_quantity between 81 and 100) + else (select avg(ss_net_profit) + from store_sales + where ss_quantity between 81 and 100) end bucket5 +from reason +where r_reason_sk = 1 +; + +-- end query 1 in stream 0 using template query9.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query90.sql b/src/s3select/TPCDS/sample-queries-tpcds/query90.sql new file mode 100644 index 000000000..366f07068 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query90.sql @@ -0,0 +1,22 @@ +-- start query 1 in stream 0 using template query90.tpl and seed 2031708268 +select cast(amc as decimal(15,4))/cast(pmc as decimal(15,4)) am_pm_ratio + from ( select count(*) amc + from web_sales, household_demographics , time_dim, web_page + where ws_sold_time_sk = time_dim.t_time_sk + and ws_ship_hdemo_sk = household_demographics.hd_demo_sk + and ws_web_page_sk = web_page.wp_web_page_sk + and time_dim.t_hour between 9 and 9+1 + and household_demographics.hd_dep_count = 3 + and web_page.wp_char_count between 5000 and 5200) at, + ( select count(*) pmc + from web_sales, household_demographics , time_dim, web_page + where ws_sold_time_sk = time_dim.t_time_sk + and ws_ship_hdemo_sk = household_demographics.hd_demo_sk + and ws_web_page_sk = web_page.wp_web_page_sk + and time_dim.t_hour between 16 and 16+1 + and household_demographics.hd_dep_count = 3 + and web_page.wp_char_count between 5000 and 5200) pt + order by am_pm_ratio + limit 100; + +-- end query 1 in stream 0 using template query90.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query91.sql b/src/s3select/TPCDS/sample-queries-tpcds/query91.sql new file mode 100644 index 000000000..057c76ba8 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query91.sql @@ -0,0 +1,31 @@ +-- start query 1 in stream 0 using template query91.tpl and seed 1930872976 +select + cc_call_center_id Call_Center, + cc_name Call_Center_Name, + cc_manager Manager, + sum(cr_net_loss) Returns_Loss +from + call_center, + catalog_returns, + date_dim, + customer, + customer_address, + customer_demographics, + household_demographics +where + cr_call_center_sk = cc_call_center_sk +and cr_returned_date_sk = d_date_sk +and cr_returning_customer_sk= c_customer_sk +and cd_demo_sk = c_current_cdemo_sk +and hd_demo_sk = c_current_hdemo_sk +and ca_address_sk = c_current_addr_sk +and d_year = 2000 +and d_moy = 12 +and ( (cd_marital_status = 'M' and cd_education_status = 'Unknown') + or(cd_marital_status = 'W' and cd_education_status = 'Advanced Degree')) +and hd_buy_potential like 'Unknown%' +and ca_gmt_offset = -7 +group by cc_call_center_id,cc_name,cc_manager,cd_marital_status,cd_education_status +order by sum(cr_net_loss) desc; + +-- end query 1 in stream 0 using template query91.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query92.sql b/src/s3select/TPCDS/sample-queries-tpcds/query92.sql new file mode 100644 index 000000000..547599f4c --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query92.sql @@ -0,0 +1,30 @@ +-- start query 1 in stream 0 using template query92.tpl and seed 2031708268 +select + sum(ws_ext_discount_amt) as "Excess Discount Amount" +from + web_sales + ,item + ,date_dim +where +i_manufact_id = 356 +and i_item_sk = ws_item_sk +and d_date between cast('2001-03-12' as date) and + (cast('2001-03-12' as date) + interval '90' day) +and d_date_sk = ws_sold_date_sk +and ws_ext_discount_amt + > ( + SELECT + 1.3 * avg(ws_ext_discount_amt) + FROM + web_sales + ,date_dim + WHERE + ws_item_sk = i_item_sk + and d_date between cast('2001-03-12' as date) and + (cast('2001-03-12' as date) + interval '90' day) + and d_date_sk = ws_sold_date_sk + ) +order by sum(ws_ext_discount_amt) +limit 100; + +-- end query 1 in stream 0 using template query92.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query93.sql b/src/s3select/TPCDS/sample-queries-tpcds/query93.sql new file mode 100644 index 000000000..ed0b427df --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query93.sql @@ -0,0 +1,18 @@ +-- start query 1 in stream 0 using template query93.tpl and seed 1200409435 +select ss_customer_sk + ,sum(act_sales) sumsales + from (select ss_item_sk + ,ss_ticket_number + ,ss_customer_sk + ,case when sr_return_quantity is not null then (ss_quantity-sr_return_quantity)*ss_sales_price + else (ss_quantity*ss_sales_price) end act_sales + from store_sales left outer join store_returns on (sr_item_sk = ss_item_sk + and sr_ticket_number = ss_ticket_number) + ,reason + where sr_reason_sk = r_reason_sk + and r_reason_desc = 'reason 66') t + group by ss_customer_sk + order by sumsales, ss_customer_sk +limit 100; + +-- end query 1 in stream 0 using template query93.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query94.sql b/src/s3select/TPCDS/sample-queries-tpcds/query94.sql new file mode 100644 index 000000000..909281da0 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query94.sql @@ -0,0 +1,29 @@ +-- start query 1 in stream 0 using template query94.tpl and seed 2031708268 +select + count(distinct ws_order_number) as "order count" + ,sum(ws_ext_ship_cost) as "total shipping cost" + ,sum(ws_net_profit) as "total net profit" +from + web_sales ws1 + ,date_dim + ,customer_address + ,web_site +where + d_date between cast('1999-4-01' as date) and + (cast('1999-4-01' as date) + interval '60' day) +and ws1.ws_ship_date_sk = d_date_sk +and ws1.ws_ship_addr_sk = ca_address_sk +and ca_state = 'NE' +and ws1.ws_web_site_sk = web_site_sk +and web_company_name = 'pri' +and exists (select * + from web_sales ws2 + where ws1.ws_order_number = ws2.ws_order_number + and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) +and not exists(select * + from web_returns wr1 + where ws1.ws_order_number = wr1.wr_order_number) +order by count(distinct ws_order_number) +limit 100; + +-- end query 1 in stream 0 using template query94.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query95.sql b/src/s3select/TPCDS/sample-queries-tpcds/query95.sql new file mode 100644 index 000000000..e7320342c --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query95.sql @@ -0,0 +1,32 @@ +-- start query 1 in stream 0 using template query95.tpl and seed 2031708268 +with ws_wh as +(select ws1.ws_order_number,ws1.ws_warehouse_sk wh1,ws2.ws_warehouse_sk wh2 + from web_sales ws1,web_sales ws2 + where ws1.ws_order_number = ws2.ws_order_number + and ws1.ws_warehouse_sk <> ws2.ws_warehouse_sk) + select + count(distinct ws_order_number) as "order count" + ,sum(ws_ext_ship_cost) as "total shipping cost" + ,sum(ws_net_profit) as "total net profit" +from + web_sales ws1 + ,date_dim + ,customer_address + ,web_site +where + d_date between cast('2002-4-01' as date) and + (cast('2002-4-01' as date) + interval '60' day) +and ws1.ws_ship_date_sk = d_date_sk +and ws1.ws_ship_addr_sk = ca_address_sk +and ca_state = 'AL' +and ws1.ws_web_site_sk = web_site_sk +and web_company_name = 'pri' +and ws1.ws_order_number in (select ws_order_number + from ws_wh) +and ws1.ws_order_number in (select wr_order_number + from web_returns,ws_wh + where wr_order_number = ws_wh.ws_order_number) +order by count(distinct ws_order_number) +limit 100; + +-- end query 1 in stream 0 using template query95.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query96.sql b/src/s3select/TPCDS/sample-queries-tpcds/query96.sql new file mode 100644 index 000000000..90be5df59 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query96.sql @@ -0,0 +1,16 @@ +-- start query 1 in stream 0 using template query96.tpl and seed 1819994127 +select count(*) +from store_sales + ,household_demographics + ,time_dim, store +where ss_sold_time_sk = time_dim.t_time_sk + and ss_hdemo_sk = household_demographics.hd_demo_sk + and ss_store_sk = s_store_sk + and time_dim.t_hour = 16 + and time_dim.t_minute >= 30 + and household_demographics.hd_dep_count = 6 + and store.s_store_name = 'ese' +order by count(*) +limit 100; + +-- end query 1 in stream 0 using template query96.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query97.sql b/src/s3select/TPCDS/sample-queries-tpcds/query97.sql new file mode 100644 index 000000000..a063214cf --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query97.sql @@ -0,0 +1,25 @@ +-- start query 1 in stream 0 using template query97.tpl and seed 1819994127 +with ssci as ( +select ss_customer_sk customer_sk + ,ss_item_sk item_sk +from store_sales,date_dim +where ss_sold_date_sk = d_date_sk + and d_month_seq between 1190 and 1190 + 11 +group by ss_customer_sk + ,ss_item_sk), +csci as( + select cs_bill_customer_sk customer_sk + ,cs_item_sk item_sk +from catalog_sales,date_dim +where cs_sold_date_sk = d_date_sk + and d_month_seq between 1190 and 1190 + 11 +group by cs_bill_customer_sk + ,cs_item_sk) + select sum(case when ssci.customer_sk is not null and csci.customer_sk is null then 1 else 0 end) store_only + ,sum(case when ssci.customer_sk is null and csci.customer_sk is not null then 1 else 0 end) catalog_only + ,sum(case when ssci.customer_sk is not null and csci.customer_sk is not null then 1 else 0 end) store_and_catalog +from ssci full outer join csci on (ssci.customer_sk=csci.customer_sk + and ssci.item_sk = csci.item_sk) +limit 100; + +-- end query 1 in stream 0 using template query97.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query98.sql b/src/s3select/TPCDS/sample-queries-tpcds/query98.sql new file mode 100644 index 000000000..ef405583b --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query98.sql @@ -0,0 +1,33 @@ +-- start query 1 in stream 0 using template query98.tpl and seed 345591136 +select i_item_id + ,i_item_desc + ,i_category + ,i_class + ,i_current_price + ,sum(ss_ext_sales_price) as itemrevenue + ,sum(ss_ext_sales_price)*100/sum(sum(ss_ext_sales_price)) over + (partition by i_class) as revenueratio +from + store_sales + ,item + ,date_dim +where + ss_item_sk = i_item_sk + and i_category in ('Home', 'Sports', 'Men') + and ss_sold_date_sk = d_date_sk + and d_date between cast('2002-01-05' as date) + and (cast('2002-01-05' as date) + interval '30' day) +group by + i_item_id + ,i_item_desc + ,i_category + ,i_class + ,i_current_price +order by + i_category + ,i_class + ,i_item_id + ,i_item_desc + ,revenueratio; + +-- end query 1 in stream 0 using template query98.tpl diff --git a/src/s3select/TPCDS/sample-queries-tpcds/query99.sql b/src/s3select/TPCDS/sample-queries-tpcds/query99.sql new file mode 100644 index 000000000..d6dfb4ff1 --- /dev/null +++ b/src/s3select/TPCDS/sample-queries-tpcds/query99.sql @@ -0,0 +1,35 @@ +-- start query 1 in stream 0 using template query99.tpl and seed 1819994127 +select + substr(w_warehouse_name,1,20) + ,sm_type + ,cc_name + ,sum(case when (cs_ship_date_sk - cs_sold_date_sk <= 30 ) then 1 else 0 end) as "30 days" + ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 30) and + (cs_ship_date_sk - cs_sold_date_sk <= 60) then 1 else 0 end ) as "31-60 days" + ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 60) and + (cs_ship_date_sk - cs_sold_date_sk <= 90) then 1 else 0 end) as "61-90 days" + ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 90) and + (cs_ship_date_sk - cs_sold_date_sk <= 120) then 1 else 0 end) as "91-120 days" + ,sum(case when (cs_ship_date_sk - cs_sold_date_sk > 120) then 1 else 0 end) as ">120 days" +from + catalog_sales + ,warehouse + ,ship_mode + ,call_center + ,date_dim +where + d_month_seq between 1178 and 1178 + 11 +and cs_ship_date_sk = d_date_sk +and cs_warehouse_sk = w_warehouse_sk +and cs_ship_mode_sk = sm_ship_mode_sk +and cs_call_center_sk = cc_call_center_sk +group by + substr(w_warehouse_name,1,20) + ,sm_type + ,cc_name +order by substr(w_warehouse_name,1,20) + ,sm_type + ,cc_name +limit 100; + +-- end query 1 in stream 0 using template query99.tpl diff --git a/src/s3select/TPCDS/tpcds_functions.bash b/src/s3select/TPCDS/tpcds_functions.bash new file mode 100644 index 000000000..67a64ff0b --- /dev/null +++ b/src/s3select/TPCDS/tpcds_functions.bash @@ -0,0 +1,40 @@ +#!/bin/bash + +run_tpcds() +{ +## END_POINT_IP=172.21.48.86 // RGW end point ip (local or remote) +## SCALE (2-1000) the bigger the SCALE, the longer it takes, and also thee more space is taken. +## the `sleep 20` is for the HADOOP. it needs some wait time, otherwise it may get into "safe mode" and will abort execution + +## the following command executed within a dedicated container, it will connect the HADOOP to a running RGW, it will boot HADOOP, and will run the TPCDS data-set generator. +## the results reside on CEPH object storage. +sudo docker run --name tpcds_generate --rm --env SCALE=2 --env END_POINT_IP=172.21.48.86 -it galsl/hadoop:presto_hive_conn sh -c \ +'/work/generate_key.bash; +. /etc/bashrc; +deploy_ceph_s3a_ip $END_POINT_IP; +start_hadoop; +sleep 20; +start_tpcds;' + +} + +move_from_tpcds_bucket_to_hive_bucket() +{ +## for the case it needs to move into different bucket(where trino is point at) +## its is also possible to chage the `create table ... external_location = ...` statements + +aws s3 sync s3://tpcds2 s3://hive +} + +trino_load_all_tpcds_tables_into_external() +{ +## running create_tpcds_tables.sql, the "create_tpcds_tables.sql" should reside in trino container +sudo docker exec -it trino /bin/bash -c 'time trino --catalog hive --schema cephs3 -f create_tpcds_tables.sql' +} + +trino_show_tables() +{ +## running any SQL statement in Trino client. +sudo docker exec -it trino /bin/bash -c 'trino --catalog hive --schema cephs3 --execute "show tables;";' +} + diff --git a/src/s3select/container/trino/hms_trino.yaml b/src/s3select/container/trino/hms_trino.yaml new file mode 100644 index 000000000..42d22f842 --- /dev/null +++ b/src/s3select/container/trino/hms_trino.yaml @@ -0,0 +1,31 @@ +version: '3' +services: + hms: + image: galsl/hms:dev + container_name: hms + environment: + # S3_ENDPOINT the CEPH/RGW end-point-url + - S3_ENDPOINT=http://10.0.209.201:80 + - S3_ACCESS_KEY=abc1 + - S3_SECRET_KEY=abc1 + # the container starts with booting the hive metastore + command: sh -c '. ~/.bashrc; start_hive_metastore' + ports: + - 9083:9083 + networks: + - trino_hms + + trino: + image: trinodb/trino + container_name: trino + volumes: + # the trino directory contains the necessary configuration + - ./trino:/etc/trino + ports: + - 8080:8080 + networks: + - trino_hms + +networks: + trino_hms: + diff --git a/src/s3select/container/trino/run_trino_on_ceph.bash b/src/s3select/container/trino/run_trino_on_ceph.bash new file mode 100644 index 000000000..a9b1583d0 --- /dev/null +++ b/src/s3select/container/trino/run_trino_on_ceph.bash @@ -0,0 +1,86 @@ +#!/bin/bash + +root_dir() +{ + cd $(git rev-parse --show-toplevel) +} + +modify_end_point_on_hive_properties() +{ +#not in use +return; +#TODO if ./trino/catalog/hive.properties exist + + [ $# -lt 1 ] && echo type s3-endpoint-url && return + root_dir + export S3_ENDPOINT=$1 + cat container/trino/trino/catalog/hive.properties | awk -v x=${S3_ENDPOINT:-NO_SET} '{if(/hive.s3.endpoint/){print "hive.s3.endpoint="x"\n";} else {print $0;}}' > /tmp/hive.properties + cp /tmp/hive.properties container/trino/trino/catalog/hive.properties + cat ./container/trino/hms_trino.yaml | awk -v x=${S3_ENDPOINT:-NOT_SET} '{if(/[ *]- S3_ENDPOINT/){print "\t- S3_ENDPOINT="x"\n";} else {print $0;}}' > /tmp/hms_trino.yaml + cp /tmp/hms_trino.yaml ./container/trino/hms_trino.yaml + cd - +} + +trino_exec_command() +{ +## run SQL statement on trino + sudo docker exec -it trino /bin/bash -c "time trino --catalog hive --schema cephs3 --execute \"$@\"" +} + +boot_trino_hms() +{ + root_dir + [ -z ${S3_ENDPOINT} ] && echo "missing end-variable S3_ENDPOINT (URL)" && return + [ -z ${S3_ACCESS_KEY} ] && echo missing end-variable S3_ACCESS_KEY && return + [ -z ${S3_SECRET_KEY} ] && echo missing end-variable S3_SECRET_KEY && return + + # modify hms_trino.yaml according to user setup (environment variables) + cat ./container/trino/hms_trino.yaml | \ + awk -v x=${S3_ENDPOINT:-NOT_SET} '{if(/- S3_ENDPOINT/){print " - S3_ENDPOINT="x;} else {print $0;}}' | \ + awk -v x=${S3_ACCESS_KEY:-NOT_SET} '{if(/- S3_ACCESS_KEY/){print " - S3_ACCESS_KEY="x;} else {print $0;}}' | \ + awk -v x=${S3_SECRET_KEY:-NOT_SET} '{if(/- S3_SECRET_KEY/){print " - S3_SECRET_KEY="x;} else {print $0;}}' > /tmp/hms_trino.yaml + cp /tmp/hms_trino.yaml ./container/trino/hms_trino.yaml + + + + # modify hive.properties according to user setup (environment variables) + cat container/trino/trino/catalog/hive.properties | \ + awk -v x=${S3_ENDPOINT:-NO_SET} '{if(/hive.s3.endpoint/){print "hive.s3.endpoint="x"\n";} else {print $0;}}' | \ + awk -v x=${S3_ACCESS_KEY:-NO_SET} '{if(/hive.s3.aws-access-key/){print "hive.s3.aws-access-key="x;} else {print $0;}}' | \ + awk -v x=${S3_SECRET_KEY:-NO_SET} '{if(/hive.s3.aws-secret-key/){print "hive.s3.aws-secret-key="x;} else {print $0;}}' > /tmp/hive.properties + cp /tmp/hive.properties ./container/trino/trino/catalog/hive.properties + + sudo docker compose -f ./container/trino/hms_trino.yaml up -d + cd - +} + +shutdown_trino_hms() +{ + root_dir + sudo docker compose -f ./container/trino/hms_trino.yaml down + cd - +} + +trino_create_table() +{ +table_name=$1 +create_table_comm="create table hive.cephs3.${table_name}(c1 varchar,c2 varchar,c3 varchar,c4 varchar, c5 varchar,c6 varchar,c7 varchar,c8 varchar,c9 varchar,c10 varchar) + WITH ( external_location = 's3a://hive/warehouse/cephs3/${table_name}/',format = 'TEXTFILE',textfile_field_separator = ',');" +sudo docker exec -it trino /bin/bash -c "trino --catalog hive --schema cephs3 --execute \"${create_table_comm}\"" +} + +tpcds_cli() +{ +## a CLI example for generating TPCDS data +sudo docker run --env S3_ENDPOINT=172.17.0.1:8000 --env S3_ACCESS_KEY=b2345678901234567890 --env S3_SECRET_KEY=b234567890123456789012345678901234567890 --env BUCKET_NAME=hive --env SCALE=2 -it galsl/hadoop:tpcds bash -c '/root/run_tpcds_with_scale' +} + +update_table_external_location() +{ +root_dir +[ -z ${BUCKET_NAME} ] && echo need to define BUCKET_NAME && return +[ -z ${SCALE} ] && echo need to define SCALE && return + +cat TPCDS/ddl/create_tpcds_tables.sql | sed "s/tpcds2\/4/${BUCKET_NAME}\/SCALE_${SCALE}/" +} + diff --git a/src/s3select/container/trino/trino/catalog/hive.properties b/src/s3select/container/trino/trino/catalog/hive.properties new file mode 100644 index 000000000..645948f24 --- /dev/null +++ b/src/s3select/container/trino/trino/catalog/hive.properties @@ -0,0 +1,33 @@ +connector.name=hive +hive.metastore.uri=thrift://hms:9083 + +#hive.metastore.warehouse.dir=s3a://hive/ + +hive.allow-drop-table=true +hive.allow-rename-table=true +hive.allow-add-column=true +hive.allow-drop-column=true +hive.allow-rename-column=true + +hive.non-managed-table-writes-enabled=true +hive.s3select-pushdown.enabled=true +hive.s3.aws-access-key=abc1 +hive.s3.aws-secret-key=abc1 + +# should modify per s3-endpoint-url +hive.s3.endpoint=http://10.0.209.201:80 + + + + + + + + +#hive.s3.max-connections=1 +#hive.s3select-pushdown.max-connections=1 + +hive.s3.connect-timeout=100s +hive.s3.socket-timeout=100s +hive.max-splits-per-second=10000 +hive.max-split-size=128MB diff --git a/src/s3select/container/trino/trino/config.properties b/src/s3select/container/trino/trino/config.properties new file mode 100644 index 000000000..a11cba39d --- /dev/null +++ b/src/s3select/container/trino/trino/config.properties @@ -0,0 +1,5 @@ +#single node install config +coordinator=true +node-scheduler.include-coordinator=true +http-server.http.port=8080 +discovery.uri=http://localhost:8080 diff --git a/src/s3select/container/trino/trino/jvm.config b/src/s3select/container/trino/trino/jvm.config new file mode 100644 index 000000000..47e9e3176 --- /dev/null +++ b/src/s3select/container/trino/trino/jvm.config @@ -0,0 +1,19 @@ +-server +-agentpath:/usr/lib/trino/bin/libjvmkill.so +-XX:InitialRAMPercentage=80 +-XX:MaxRAMPercentage=80 +-XX:G1HeapRegionSize=32M +-XX:+ExplicitGCInvokesConcurrent +-XX:+HeapDumpOnOutOfMemoryError +-XX:+ExitOnOutOfMemoryError +-XX:-OmitStackTraceInFastThrow +-XX:ReservedCodeCacheSize=256M +-XX:PerMethodRecompilationCutoff=10000 +-XX:PerBytecodeRecompilationCutoff=10000 +-Djdk.attach.allowAttachSelf=true +-Djdk.nio.maxCachedBufferSize=2000000 +# Improve AES performance for S3, etc. on ARM64 (JDK-8271567) +-XX:+UnlockDiagnosticVMOptions +-XX:+UseAESCTRIntrinsics +# Disable Preventive GC for performance reasons (JDK-8293861) +-XX:-G1UsePreventiveGC diff --git a/src/s3select/container/trino/trino/log.properties b/src/s3select/container/trino/trino/log.properties new file mode 100644 index 000000000..abee45ebc --- /dev/null +++ b/src/s3select/container/trino/trino/log.properties @@ -0,0 +1,2 @@ +# Enable verbose logging from Trino +#io.trino=DEBUG diff --git a/src/s3select/container/trino/trino/node.properties b/src/s3select/container/trino/trino/node.properties new file mode 100644 index 000000000..5b02ff7f0 --- /dev/null +++ b/src/s3select/container/trino/trino/node.properties @@ -0,0 +1,2 @@ +node.environment=docker +node.data-dir=/data/trino diff --git a/src/s3select/example/CMakeLists.txt b/src/s3select/example/CMakeLists.txt new file mode 100644 index 000000000..8b5c8c070 --- /dev/null +++ b/src/s3select/example/CMakeLists.txt @@ -0,0 +1,23 @@ +add_executable(s3select_example s3select_example.cpp) +target_include_directories(s3select_example PUBLIC ../include ../rapidjson/include) + +find_package(Arrow QUIET) + +if(Arrow_FOUND) + message( "arrow is installed") + add_executable(csv_to_parquet csv_to_parquet.cpp) + target_include_directories(csv_to_parquet PUBLIC ../include) + target_link_libraries(s3select_example boost_date_time boost_system boost_thread parquet arrow boost_filesystem) + target_link_libraries(csv_to_parquet boost_date_time boost_system boost_thread parquet arrow) +else() + target_link_libraries(s3select_example boost_date_time boost_system boost_thread boost_filesystem) +endif() + +add_executable(generate_rand_csv generate_rand_csv.c) + +add_custom_command(OUTPUT expr_genrator.py COMMAND cp ${CMAKE_CURRENT_SOURCE_DIR}/expr_genrator.py expr_genrator.py + COMMENT "Copy expr_genrator.py" + VERBATIM) + +add_custom_target(expr_generator ALL DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/expr_genrator.py) + diff --git a/src/s3select/example/csv_to_parquet.cpp b/src/s3select/example/csv_to_parquet.cpp new file mode 100644 index 000000000..37a1ed0f2 --- /dev/null +++ b/src/s3select/example/csv_to_parquet.cpp @@ -0,0 +1,417 @@ + +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include +#include +#include +#include +#include +#include +#include "boost/date_time/gregorian/gregorian.hpp" +#include "boost/date_time/posix_time/posix_time.hpp" +#include +#include +#include +#include + + +#include +#include + +#include +#include + +using parquet::ConvertedType; +using parquet::Repetition; +using parquet::Type; +using parquet::schema::GroupNode; +using parquet::schema::PrimitiveNode; + +/* + * This example describes writing and reading Parquet Files in C++ and serves as a + * reference to the API. + * The file contains all the physical data types supported by Parquet. + * This example uses the RowGroupWriter API that supports writing RowGroups based on a + *certain size + **/ + +/* Parquet is a structured columnar file format + * Parquet File = "Parquet data" + "Parquet Metadata" + * "Parquet data" is simply a vector of RowGroups. Each RowGroup is a batch of rows in a + * columnar layout + * "Parquet Metadata" contains the "file schema" and attributes of the RowGroups and their + * Columns + * "file schema" is a tree where each node is either a primitive type (leaf nodes) or a + * complex (nested) type (internal nodes) + * For specific details, please refer the format here: + * https://github.com/apache/parquet-format/blob/master/LogicalTypes.md + **/ + +#include +#include +using namespace boost; +using namespace std; + +//constexpr int NUM_ROWS = 10000000; +constexpr int NUM_ROWS = 10000; + +//constexpr int64_t ROW_GROUP_SIZE = 16 * 1024 * 1024; // 16 MB +constexpr int64_t ROW_GROUP_SIZE = 1024 * 1024; + +const char PARQUET_FILENAME[] = "csv_converted.parquet"; + +static std::shared_ptr column_string_2(uint32_t num_of_columns) { + + parquet::schema::NodeVector fields; + + for(uint32_t i=0;i( + GroupNode::Make("schema", Repetition::REQUIRED, fields)); +} + + +class tokenize { + + public: + const char *s; + std::string input; + const char *p; + bool last_token; + + tokenize(std::string& in):s(0),input(in),p(input.c_str()),last_token(false) + { + }; + + void get_token(std::string& token) + { + if(!*p) + { + token = ""; + last_token = true; + return; + } + + + s=p; + while(*p && *p != ',' && *p != '\n') p++; + + token = std::string(s,p); + p++; + } + + bool is_last() + { + return last_token == true; + } +}; + +void generate_rand_columns_csv_datetime(std::string& out, size_t size) { + std::stringstream ss; + auto year = [](){return rand()%100 + 1900;}; + auto month = [](){return 1 + rand()%12;}; + auto day = [](){return 1 + rand()%28;}; + auto hours = [](){return rand()%24;}; + auto minutes = [](){return rand()%60;}; + auto seconds = [](){return rand()%60;}; + + for (auto i = 0U; i < size; ++i) { + ss << year() << "-" << std::setw(2) << std::setfill('0')<< month() << "-" << std::setw(2) << std::setfill('0')<< day() << "T" < out_file; + PARQUET_ASSIGN_OR_THROW(out_file, FileClass::Open(PARQUET_FILENAME)); + + // Setup the parquet schema + std::shared_ptr schema = column_string_2(csv_num_of_columns); + + // Add writer properties + parquet::WriterProperties::Builder builder; + // builder.compression(parquet::Compression::SNAPPY); + std::shared_ptr props = builder.build(); + + // Create a ParquetFileWriter instance + std::shared_ptr file_writer = + parquet::ParquetFileWriter::Open(out_file, schema, props); + + // Append a BufferedRowGroup to keep the RowGroup open until a certain size + parquet::RowGroupWriter* rg_writer = file_writer->AppendBufferedRowGroup(); + + int num_columns = file_writer->num_columns(); + std::vector buffered_values_estimate(num_columns, 0); + + for (int i = 0; !csv_tokens.is_last() && itotal_bytes_written() + rg_writer->total_compressed_bytes() + + estimated_bytes) > ROW_GROUP_SIZE) { + rg_writer->Close(); + std::fill(buffered_values_estimate.begin(), buffered_values_estimate.end(), 0); + rg_writer = file_writer->AppendBufferedRowGroup(); + } + + int col_id; + for(col_id=0;col_id(rg_writer->column(col_id)); + parquet::ByteArray ba_value; + + std::string token; + csv_tokens.get_token(token); + if(token.size() == 0) + {//null column + int16_t definition_level = 0; + ba_writer->WriteBatch(1, &definition_level, nullptr, nullptr); + } + else + { + int16_t definition_level = 1; + ba_value.ptr = (uint8_t*)(token.data()); + ba_value.len = token.size(); + ba_writer->WriteBatch(1, &definition_level, nullptr, &ba_value); + } + + buffered_values_estimate[col_id] = ba_writer->EstimatedBufferedValueBytes(); + + + } //end-for columns + + if(csv_tokens.is_last() && col_id(rg_writer->column(col_id)); + + int16_t definition_level = 0; + ba_writer->WriteBatch(1, &definition_level, nullptr, nullptr); + + buffered_values_estimate[col_id] = ba_writer->EstimatedBufferedValueBytes(); + } + + } + + } // end-for rows + + // Close the RowGroupWriter + rg_writer->Close(); + // Close the ParquetFileWriter + file_writer->Close(); + + // Write the bytes to file + DCHECK(out_file->Close().ok()); + + } catch (const std::exception& e) { + std::cerr << "Parquet write error: " << e.what() << std::endl; + return -1; + } + + return 0; +} + + +static int csv_file_to_parquet(int argc,char **argv) +{ + //open file (CSV) and load into std::string, convert to parquet(save to FS) + + if (argc<2) exit(-1); + + FILE* fp; + struct stat l_buf; + int st = lstat(argv[1], &l_buf); + if(st<0) exit(-1); + + printf("input csv file size = %ld\n",l_buf.st_size); + + char * buffer = new char[ l_buf.st_size ]; + fp = fopen(argv[1],"r"); + + if(!fp) exit(-1); + + size_t read_sz = fread(buffer, 1, l_buf.st_size,fp); + + std::string csv_obj; + csv_obj.append(buffer,read_sz); + + csv_to_parquet(csv_obj); + + return 0; +} + +int csv_object_to_parquet(int argc,char **argv) +{ + srand(time(0)); + + std::string csv_obj; + std::string expected_result; + generate_rand_columns_csv(csv_obj, 128); + //generate_rand_csv_datetime_to_string(csv_obj, expected_result, 10000); + //generate_rand_columns_csv_with_null(csv_obj, 10000); + //generate_columns_csv(csv_obj,128); + //generate_rand_columns_csv_datetime(csv_obj,10000); + generate_fix_columns_csv(csv_obj,128); + FILE *fp = fopen("10k.csv","w"); + + if(fp) + { + fwrite(csv_obj.data(),csv_obj.size(),1,fp); + fclose(fp); + } + else + { + exit(-1); + } + + //csv_obj="1,2,3,4,5,6,7,8,9,10\n10,20,30,40,50,60,70,80,90,100\n"; + csv_obj="1,2,3,4\n"; + + csv_to_parquet(csv_obj); + + return 0; +} + +int main(int argc,char **argv) +{ + return csv_file_to_parquet(argc,argv); +} + diff --git a/src/s3select/example/expr_genrator.py b/src/s3select/example/expr_genrator.py new file mode 100755 index 000000000..5905e9832 --- /dev/null +++ b/src/s3select/example/expr_genrator.py @@ -0,0 +1,9 @@ +import random +import sys + +def expr(depth): + if depth==1 or random.random()<1.0/(2**depth-1): + return str(int(random.random() * 100) + 1)+".0" + return '(' + expr(depth-1) + random.choice(['+','-','*','/']) + expr(depth-1) + ')' + +print (expr( int(sys.argv[1]) )) diff --git a/src/s3select/example/generate_rand_csv.c b/src/s3select/example/generate_rand_csv.c new file mode 100644 index 000000000..67d52adaa --- /dev/null +++ b/src/s3select/example/generate_rand_csv.c @@ -0,0 +1,28 @@ +#include +#include + + +int main(int argc, char** argv) +{ + if (argc<3) + { + printf("%s \n", argv[0]); + return -1; + } + + srand(1234); + int line_no=0; + for(int i=0; i "$PREFIX"/tmp.c + +#include +int main() +{ +printf("%f\n",$*); +} +@@ +gcc -o "$PREFIX"/a.out "$PREFIX"/tmp.c +"$PREFIX"/a.out +} + +expr_test() +{ +## test the arithmetic evaluation of s3select against C program +for i in {1..100} +do + e=$(python3 "$PREFIX"/expr_genrator.py 5) + echo expression["$i"]="$e" + r1=$(s3select_calc "$e") + r2=$(c_calc "$e") + echo "$r1" "$r2" + + ## should be zero or very close to zero; ( s3select is C compile program ) + res=$(echo "" | awk -v e="$e" -v r1="$r1" -v r2="$r2" 'function abs(n){if (n<0) return -n; else return n;}{if (abs(r1-r2) > 0.00001) {print "MISSMATCH result for expression",e;}}') + if test "$res" != ""; then + echo "$res" + exit 1 + fi +done +} + +aggregate_test() +{ +## generate_rand_csv is generating with the same seed +echo check sum +s3select_val=$("$PREFIX"/generate_rand_csv 10 10 | "$PREFIX"/s3select_example -q 'select sum(int(_1)) from stdin;') +awk_val=$("$PREFIX"/generate_rand_csv 10 10 | awk 'BEGIN{FS=",";} {s+=$1;} END{print s;}') +s3select_val=${s3select_val::-1} +echo "$s3select_val" "$awk_val" +if test "$s3select_val" -ne "$awk_val"; then + exit 1 +fi +echo check min +s3select_val=$("$PREFIX"/generate_rand_csv 10 10 | "$PREFIX"/s3select_example -q 'select min(int(_1)) from stdin;') +awk_val=$("$PREFIX"/generate_rand_csv 10 10 | awk 'BEGIN{FS=",";min=100000;} {if(min>$1) min=$1;} END{print min;}') +s3select_val=${s3select_val::-1} +echo "$s3select_val" "$awk_val" +if test "$s3select_val" -ne "$awk_val"; then + exit 1 +fi +echo check max +s3select_val=$("$PREFIX"/generate_rand_csv 10 10 | "$PREFIX"/s3select_example -q 'select max(int(_1)) from stdin;') +awk_val=$("$PREFIX"/generate_rand_csv 10 10 | awk 'BEGIN{FS=",";max=0;} {if(max<$1) max=$1;} END{print max;}' ) +s3select_val=${s3select_val::-1} +echo "$s3select_val" "$awk_val" +if test "$s3select_val" -ne "$awk_val"; then + exit 1 +fi +echo check substr and count +s3select_val=$("$PREFIX"/generate_rand_csv 10000 10 | "$PREFIX"/s3select_example -q 'select count(int(_1)) from stdin where int(_1)>200 and int(_1)<250;') +awk_val=$("$PREFIX"/generate_rand_csv 10000 10 | "$PREFIX"/s3select_example -q 'select substring(_1,1,1) from stdin where int(_1)>200 and int(_1)<250;' | uniq -c | awk '{print $1;}') +s3select_val=${s3select_val::-1} +echo "$s3select_val" "$awk_val" +if test "$s3select_val" -ne "$awk_val"; then + exit 1 +fi +} + +parquet_test() +{ +s3select_val=$(${PREFIX}/s3select_example -q "select count(*) from $(realpath parquet_mix_types.parquet) where _1>555 and _1<777;" | grep -v '^\[') + +if test "${s3select_val}" != "221,"; then + echo "parquet test failed,${s3select_val}" +# exit +fi + +s3select_val=$(${PREFIX}/s3select_example -q "select c5,c1,int(_1*0+6),int(_3*0+4),substring(c1,int(_1*0+6),int(_3*0+4)) from $(realpath parquet_mix_types.parquet) where ((c1 like \"%wedd%\") and c0 <100 ) and c5 between 2.1000000000000001 and 2.6200000000000001 and c4 between \"col4_1\" and \"col4_2\";" | grep -v '^\[') + +echo ${s3select_val} +} + +############################################################### + +expr_test +aggregate_test +parquet_test + +rm "$PREFIX"/tmp.c "$PREFIX"/a.out + +exit 0 + diff --git a/src/s3select/example/s3select_example.cpp b/src/s3select/example/s3select_example.cpp new file mode 100644 index 000000000..71aff3d01 --- /dev/null +++ b/src/s3select/example/s3select_example.cpp @@ -0,0 +1,711 @@ +#include "s3select.h" +#include +#include +#include +#include +#include +#include +#include +#include + +using namespace s3selectEngine; +using namespace BOOST_SPIRIT_CLASSIC_NS; + +class awsCli_handler { + + +//TODO get parameter +private: + std::unique_ptr s3select_syntax; + std::string m_s3select_query; + std::string m_result; + std::unique_ptr m_s3_csv_object; + std::string m_column_delimiter;//TODO remove + std::string m_quot;//TODO remove + std::string m_row_delimiter;//TODO remove + std::string m_compression_type;//TODO remove + std::string m_escape_char;//TODO remove + std::unique_ptr m_buff_header; + std::string m_header_info; + std::string m_sql_query; + uint64_t m_total_object_processing_size; + +public: + + awsCli_handler(): + s3select_syntax(std::make_unique()), + m_s3_csv_object(std::unique_ptr()), + m_buff_header(std::make_unique(1000)), + m_total_object_processing_size(0), + crc32(std::unique_ptr()) + { + } + + enum header_name_En + { + EVENT_TYPE, + CONTENT_TYPE, + MESSAGE_TYPE + }; + static const char* header_name_str[3]; + + enum header_value_En + { + RECORDS, + OCTET_STREAM, + EVENT, + CONT + }; + static const char* header_value_str[4]; + +private: + + void encode_short(char *buff, uint16_t s, int &i) + { + short x = htons(s); + memcpy(buff, &x, sizeof(s)); + i += sizeof(s); + } + + void encode_int(char *buff, u_int32_t s, int &i) + { + u_int32_t x = htonl(s); + memcpy(buff, &x, sizeof(s)); + i += sizeof(s); + } + + int create_header_records(char* buff) + { + int i = 0; + + //1 + buff[i++] = char(strlen(header_name_str[EVENT_TYPE])); + memcpy(&buff[i], header_name_str[EVENT_TYPE], strlen(header_name_str[EVENT_TYPE])); + i += strlen(header_name_str[EVENT_TYPE]); + buff[i++] = char(7); + encode_short(&buff[i], uint16_t(strlen(header_value_str[RECORDS])), i); + memcpy(&buff[i], header_value_str[RECORDS], strlen(header_value_str[RECORDS])); + i += strlen(header_value_str[RECORDS]); + + //2 + buff[i++] = char(strlen(header_name_str[CONTENT_TYPE])); + memcpy(&buff[i], header_name_str[CONTENT_TYPE], strlen(header_name_str[CONTENT_TYPE])); + i += strlen(header_name_str[CONTENT_TYPE]); + buff[i++] = char(7); + encode_short(&buff[i], uint16_t(strlen(header_value_str[OCTET_STREAM])), i); + memcpy(&buff[i], header_value_str[OCTET_STREAM], strlen(header_value_str[OCTET_STREAM])); + i += strlen(header_value_str[OCTET_STREAM]); + + //3 + buff[i++] = char(strlen(header_name_str[MESSAGE_TYPE])); + memcpy(&buff[i], header_name_str[MESSAGE_TYPE], strlen(header_name_str[MESSAGE_TYPE])); + i += strlen(header_name_str[MESSAGE_TYPE]); + buff[i++] = char(7); + encode_short(&buff[i], uint16_t(strlen(header_value_str[EVENT])), i); + memcpy(&buff[i], header_value_str[EVENT], strlen(header_value_str[EVENT])); + i += strlen(header_value_str[EVENT]); + + return i; +} + + std::unique_ptr crc32; + + int create_message(std::string &out_string, u_int32_t result_len, u_int32_t header_len) + { + u_int32_t total_byte_len = 0; + u_int32_t preload_crc = 0; + u_int32_t message_crc = 0; + int i = 0; + char *buff = out_string.data(); + + if (crc32 == 0) + { + // the parameters are according to CRC-32 algorithm and its aligned with AWS-cli checksum + crc32 = std::unique_ptr(new boost::crc_optimal<32, 0x04C11DB7, 0xFFFFFFFF, 0xFFFFFFFF, true, true>); + } + + total_byte_len = result_len + 16; + + encode_int(&buff[i], total_byte_len, i); + encode_int(&buff[i], header_len, i); + + crc32->reset(); + *crc32 = std::for_each(buff, buff + 8, *crc32); + preload_crc = (*crc32)(); + encode_int(&buff[i], preload_crc, i); + + i += result_len; + + crc32->reset(); + *crc32 = std::for_each(buff, buff + i, *crc32); + message_crc = (*crc32)(); + + int out_encode; + encode_int(reinterpret_cast(&out_encode), message_crc, i); + out_string.append(reinterpret_cast(&out_encode),sizeof(out_encode)); + + return i; + } + +#define PAYLOAD_LINE "\n\n\n\n" +#define END_PAYLOAD_LINE "\n" + +public: + + //std::string get_error_description(){} + + std::string get_result() + { + return m_result; + } + + int run_s3select(const char *query, const char *input, size_t input_length, size_t object_size) + { + int status = 0; + csv_object::csv_defintions csv; + + m_result = "012345678901"; //12 positions for header-crc + + int header_size = 0; + + if (m_s3_csv_object == 0) + { + s3select_syntax->parse_query(query); + + if (m_row_delimiter.size()) + { + csv.row_delimiter = *m_row_delimiter.c_str(); + } + + if (m_column_delimiter.size()) + { + csv.column_delimiter = *m_column_delimiter.c_str(); + } + + if (m_quot.size()) + { + csv.quot_char = *m_quot.c_str(); + } + + if (m_escape_char.size()) + { + csv.escape_char = *m_escape_char.c_str(); + } + + if (m_header_info.compare("IGNORE") == 0) + { + csv.ignore_header_info = true; + } + else if (m_header_info.compare("USE") == 0) + { + csv.use_header_info = true; + } + + m_s3_csv_object = std::unique_ptr(new s3selectEngine::csv_object(s3select_syntax.get(), csv)); + } + + if (s3select_syntax->get_error_description().empty() == false) + { + header_size = create_header_records(m_buff_header.get()); + m_result.append(m_buff_header.get(), header_size); + m_result.append(PAYLOAD_LINE); + m_result.append(s3select_syntax->get_error_description()); + //ldout(s->cct, 10) << "s3-select query: failed to prase query; {" << s3select_syntax->get_error_description() << "}" << dendl; + status = -1; + } + else + { + header_size = create_header_records(m_buff_header.get()); + m_result.append(m_buff_header.get(), header_size); + m_result.append(PAYLOAD_LINE); + //status = m_s3_csv_object->run_s3select_on_stream(m_result, input, input_length, s->obj_size); + status = m_s3_csv_object->run_s3select_on_stream(m_result, input, input_length, object_size); + if (status < 0) + { + m_result.append(m_s3_csv_object->get_error_description()); + } + } + + if (m_result.size() > strlen(PAYLOAD_LINE)) + { + m_result.append(END_PAYLOAD_LINE); + create_message(m_result, m_result.size() - 12, header_size); + //s->formatter->write_bin_data(m_result.data(), buff_len); + //if (op_ret < 0) + //{ + // return op_ret; + //} + } + //rgw_flush_formatter_and_reset(s, s->formatter); + + return status; + } + //int extract_by_tag(std::string tag_name, std::string& result); + + //void convert_escape_seq(std::string& esc); + + //int handle_aws_cli_parameters(std::string& sql_query); + +}; + +const char* awsCli_handler::header_name_str[3] = {":event-type", ":content-type", ":message-type"}; +const char* awsCli_handler::header_value_str[4] = {"Records", "application/octet-stream", "event","cont"}; +int run_on_localFile(char* input_query); + +bool is_parquet_file(const char * fn) +{//diffrentiate between csv and parquet + const char * ext = "parquet"; + + if(strstr(fn+strlen(fn)-strlen(ext), ext )) + { + return true; + } + + return false; +} + +#ifdef _ARROW_EXIST +int run_query_on_parquet_file(const char* input_query, const char* input_file) +{ + int status; + s3select s3select_syntax; + + status = s3select_syntax.parse_query(input_query); + if (status != 0) + { + std::cout << "failed to parse query " << s3select_syntax.get_error_description() << std::endl; + return -1; + } + + FILE *fp; + + fp=fopen(input_file,"r"); + + if(!fp){ + std::cout << "can not open " << input_file << std::endl; + return -1; + } + + std::function fp_get_size=[&]() + { + struct stat l_buf; + lstat(input_file,&l_buf); + return l_buf.st_size; + }; + + std::function fp_range_req=[&](int64_t start,int64_t length,void *buff,optional_yield*y) + { + fseek(fp,start,SEEK_SET); + size_t read_sz = fread(buff, 1, length, fp); + return read_sz; + }; + + rgw_s3select_api rgw; + rgw.set_get_size_api(fp_get_size); + rgw.set_range_req_api(fp_range_req); + + std::function fp_s3select_result_format = [](std::string& result){std::cout << result;result.clear();return 0;}; + std::function fp_s3select_header_format = [](std::string& result){result="";return 0;}; + std::function fp_debug = [](const char* msg) + { + std::cout << "DEBUG: {" << msg << "}" << std::endl; + }; + + parquet_object parquet_processor(input_file,&s3select_syntax,&rgw); + //parquet_processor.set_external_debug_system(fp_debug); + + std::string result; + + do + { + try + { + status = parquet_processor.run_s3select_on_object(result,fp_s3select_result_format,fp_s3select_header_format); + } + catch (base_s3select_exception &e) + { + std::cout << e.what() << std::endl; + //m_error_description = e.what(); + //m_error_count++; + if (e.severity() == base_s3select_exception::s3select_exp_en_t::FATAL) //abort query execution + { + return -1; + } + } + + if(status<0) + { + std::cout << parquet_processor.get_error_description() << std::endl; + break; + } + + std::cout << result << std::endl; + + if(status == 2) // limit reached + { + break; + } + + } while (0); + + return 0; +} +#else +int run_query_on_parquet_file(const char* input_query, const char* input_file) +{ + std::cout << "arrow is not installed" << std::endl; + return 0; +} +#endif //_ARROW_EXIST + +#define BUFFER_SIZE (4*1024*1024) +int process_json_query(const char* input_query,const char* fname) +{//purpose: process json query + + s3select s3select_syntax; + int status = s3select_syntax.parse_query(input_query); + if (status != 0) + { + std::cout << "failed to parse query " << s3select_syntax.get_error_description() << std::endl; + return -1; + } + + std::ifstream input_file_stream; + try { + input_file_stream = std::ifstream(fname, std::ios::in | std::ios::binary); + } + catch( ... ) + { + std::cout << "failed to open file " << fname << std::endl; + exit(-1); + } + + auto object_sz = boost::filesystem::file_size(fname); + json_object json_query_processor(&s3select_syntax); + std::string buff(BUFFER_SIZE,0); + std::string result; + + + size_t read_sz = input_file_stream.read(buff.data(),BUFFER_SIZE).gcount(); + int chunk_count=0; + size_t bytes_read=0; + while(read_sz) + { + bytes_read += read_sz; + std::cout << "read next chunk " << chunk_count++ << ":" << read_sz << ":" << bytes_read << "\r"; + result.clear(); + + try{ + status = json_query_processor.run_s3select_on_stream(result, buff.data(), read_sz, object_sz); + } catch (base_s3select_exception &e) + { + std::cout << e.what() << std::endl; + if (e.severity() == base_s3select_exception::s3select_exp_en_t::FATAL) //abort query execution + { + return -1; + } + } + + if(result.size()) + { + std::cout << result << std::endl; + } + + if(status<0) + { + std::cout << "failure upon processing " << std::endl; + return -1; + } + if(json_query_processor.is_sql_limit_reached()) + { + std::cout << "json processing reached limit " << std::endl; + break; + } + read_sz = input_file_stream.read(buff.data(),BUFFER_SIZE).gcount(); + } + try{ + result.clear(); + json_query_processor.run_s3select_on_stream(result, 0, 0, object_sz); + } catch (base_s3select_exception &e) + { + std::cout << e.what() << std::endl; + if (e.severity() == base_s3select_exception::s3select_exp_en_t::FATAL) //abort query execution + { + return -1; + } + } + + std::cout << result << std::endl; + return 0; +} + +int run_on_localFile(char* input_query) +{ + //purpose: demostrate the s3select functionalities + s3select s3select_syntax; + + if (!input_query) + { + std::cout << "type -q 'select ... from ... '" << std::endl; + return -1; + } + + int status = s3select_syntax.parse_query(input_query); + if (status != 0) + { + std::cout << "failed to parse query " << s3select_syntax.get_error_description() << std::endl; + return -1; + } + + std::string object_name = s3select_syntax.get_from_clause(); + + if (is_parquet_file(object_name.c_str())) + { + try { + return run_query_on_parquet_file(input_query, object_name.c_str()); + } + catch (base_s3select_exception &e) + { + std::cout << e.what() << std::endl; + if (e.severity() == base_s3select_exception::s3select_exp_en_t::FATAL) //abort query execution + { + return -1; + } + } + } + + FILE* fp = nullptr; + + if (object_name.compare("stdin")==0) + { + fp = stdin; + } + else + { + fp = fopen(object_name.c_str(), "r"); + } + + if(!fp) + { + std::cout << " input stream is not valid, abort;" << std::endl; + return -1; + } + + struct stat statbuf; + lstat(object_name.c_str(), &statbuf); + + std::string s3select_result; + s3selectEngine::csv_object::csv_defintions csv; + csv.use_header_info = false; + csv.quote_fields_always=false; + +#define CSV_QUOT "CSV_ALWAYS_QUOT" +#define CSV_COL_DELIM "CSV_COLUMN_DELIMETER" +#define CSV_ROW_DELIM "CSV_ROW_DELIMITER" +#define CSV_HEADER_INFO "CSV_HEADER_INFO" + + if(getenv(CSV_QUOT)) + { + csv.quote_fields_always=true; + } + if(getenv(CSV_COL_DELIM)) + { + csv.column_delimiter=*getenv(CSV_COL_DELIM); + } + if(getenv(CSV_ROW_DELIM)) + { + csv.row_delimiter=*getenv(CSV_ROW_DELIM); + } + if(getenv(CSV_HEADER_INFO)) + { + csv.use_header_info = true; + } + + s3selectEngine::csv_object s3_csv_object(&s3select_syntax, csv); + + std::function fp_debug = [](const char* msg) + { + std::cout << "DEBUG" << msg << std::endl; + }; + + //s3_csv_object.set_external_debug_system(fp_debug); + +#define BUFF_SIZE (1024*1024*4) //simulate 4mb parts in s3 object + char* buff = (char*)malloc( BUFF_SIZE ); + while(1) + { + buff[0]=0; + size_t input_sz = fread(buff, 1, BUFF_SIZE, fp); + char* in=buff; + + if (!input_sz) + { + if(fp == stdin) + { + status = s3_csv_object.run_s3select_on_stream(s3select_result, nullptr, 0, 0); + if(s3select_result.size()>0) + { + std::cout << s3select_result; + } + } + break; + } + + if(fp != stdin) + { + status = s3_csv_object.run_s3select_on_stream(s3select_result, in, input_sz, statbuf.st_size); + } + else + { + status = s3_csv_object.run_s3select_on_stream(s3select_result, in, input_sz, INT_MAX); + } + + if(status<0) + { + std::cout << "failure on execution " << std::endl << s3_csv_object.get_error_description() << std::endl; + break; + } + + if(s3select_result.size()>0) + { + std::cout << s3select_result; + } + + if(!input_sz || feof(fp) || status == 2) + { + break; + } + + s3select_result.clear(); + }//end-while + + free(buff); + fclose(fp); + + return 0; +} + +int run_on_single_query(const char* fname, const char* query) +{ + + std::unique_ptr awscli = std::make_unique() ; + std::ifstream input_file_stream; + try { + input_file_stream = std::ifstream(fname, std::ios::in | std::ios::binary); + } + catch( ... ) + { + std::cout << "failed to open file " << fname << std::endl; + exit(-1); + } + + + if (is_parquet_file(fname)) + { + std::string result; + int status = run_query_on_parquet_file(query, fname); + return status; + } + + s3select query_ast; + auto status = query_ast.parse_query(query); + if(status<0) + { + std::cout << "failed to parse query : " << query_ast.get_error_description() << std::endl; + return -1; + } + + if(query_ast.is_json_query()) + { + return process_json_query(query,fname); + } + + + auto file_sz = boost::filesystem::file_size(fname); + + std::string buff(BUFFER_SIZE,0); + while (1) + { + size_t read_sz = input_file_stream.read(buff.data(),BUFFER_SIZE).gcount(); + + status = awscli->run_s3select(query, buff.data(), read_sz, file_sz); + if(status<0) + { + std::cout << "failure on execution " << std::endl; + break; + } + else + { + std::cout << awscli->get_result() << std::endl; + } + + if(!read_sz || input_file_stream.eof()) + { + break; + } + } + + return status; +} + +int main(int argc,char **argv) +{ + char *query=0; + char *fname=0; + char *query_file=0;//file contains many queries + + for (int i = 0; i < argc; i++) + { + if (!strcmp(argv[i], "-key")) + {//object recieved as CLI parameter + fname = argv[i + 1]; + continue; + } + + if (!strcmp(argv[i], "-q")) + { + query = argv[i + 1]; + continue; + } + + if (!strcmp(argv[i], "-cmds")) + {//query file contain many queries + query_file = argv[i + 1]; + continue; + } + + if (!strcmp(argv[i], "-h") || !strcmp(argv[i], "-help")) + { + std::cout << "CSV_ALWAYS_QUOT= CSV_COLUMN_DELIMETER= CSV_ROW_DELIMITER= CSV_HEADER_INFO= s3select_example -q \"... query ...\" -key object-path -cmds queries-file" << std::endl; + exit(0); + } + } + + if(fname == 0) + {//object is in query explicitly. + return run_on_localFile(query); + } + + if(query_file) + { + //purpose: run many queries (reside in file) on single file. + std::fstream f(query_file, std::ios::in | std::ios::binary); + const auto sz = boost::filesystem::file_size(query_file); + std::string result(sz, '\0'); + f.read(result.data(), sz); + boost::char_separator sep("\n"); + boost::tokenizer> tokens(result, sep); + + for (const auto& t : tokens) { + std::cout << t << std::endl; + int status = run_on_single_query(fname,t.c_str()); + std::cout << "status: " << status << std::endl; + } + + return(0); + } + + int status = run_on_single_query(fname,query); + return status; +} + diff --git a/src/s3select/include/csvparser/LICENSE b/src/s3select/include/csvparser/LICENSE new file mode 100644 index 000000000..da603a96b --- /dev/null +++ b/src/s3select/include/csvparser/LICENSE @@ -0,0 +1,28 @@ +Copyright (c) 2015, ben-strasser +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +* Neither the name of fast-cpp-csv-parser nor the names of its + contributors may be used to endorse or promote products derived from + this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + diff --git a/src/s3select/include/csvparser/README.md b/src/s3select/include/csvparser/README.md new file mode 100644 index 000000000..0b1d2c83e --- /dev/null +++ b/src/s3select/include/csvparser/README.md @@ -0,0 +1,275 @@ +# Fast C++ CSV Parser + +This is a small, easy-to-use and fast header-only library for reading comma separated value (CSV) files. + +## Features + + * Automatically rearranges columns by parsing the header line. + * Disk I/O and CSV-parsing are overlapped using threads for efficiency. + * Parsing features such as escaped strings can be enabled and disabled at compile time using templates. You only pay in speed for the features you actually use. + * Can read multiple GB files in reasonable time. + * Support for custom columns separators (i.e. Tab separated value files are supported), quote escaped strings, automatic space trimming. + * Works with `*`nix and Windows newlines and automatically ignores UTF-8 BOMs. + * Exception classes with enough context to format useful error messages. what() returns error messages ready to be shown to a user. + +## Getting Started + +The following small example should contain most of the syntax you need to use the library. + +```cpp +# include "csv.h" + +int main(){ + io::CSVReader<3> in("ram.csv"); + in.read_header(io::ignore_extra_column, "vendor", "size", "speed"); + std::string vendor; int size; double speed; + while(in.read_row(vendor, size, speed)){ + // do stuff with the data + } +} +``` + +## Installation + +The library only needs a standard conformant C++11 compiler. It has no further dependencies. The library is completely contained inside a single header file and therefore it is sufficient to copy this file to some place on your include path. The library does not have to be explicitly build. + +Note however, that threads are used and some compiler (for example GCC) require you to link against additional libraries to make it work. With GCC it is important to add -lpthread as the last item when linking, i.e. the order in + +``` +g++ -std=c++0x a.o b.o -o prog -lpthread +``` + +is important. If you for some reason do not want to use threads you can define CSV_IO_NO_THREAD before including the header. + +Remember that the library makes use of C++11 features and therefore you have to enable support for it (f.e. add -std=c++0x or -std=gnu++0x). + +The library was developed and tested with GCC 4.6.1 + +Note that VS2013 is not C++11 compilant and will therefore not work out of the box. See [here](https://code.google.com/p/fast-cpp-csv-parser/issues/detail?id=6) for what needs to be adjusted to make the code work. + +## Documentation + +The libary provides two classes: + + * `LineReader`: A class to efficiently read large files line by line. + * `CSVReader`: A class that efficiently reads large CSV files. + +Note that everything is contained in the `io` namespace. + +### `LineReader` + +```cpp +class LineReader{ +public: + // Constructors + LineReader(some_string_type file_name); + LineReader(some_string_type file_name, std::FILE*source); + LineReader(some_string_type file_name, std::istream&source); + LineReader(some_string_type file_name, std::unique_ptrsource); + + // Reading + char*next_line(); + + // File Location + void set_file_line(unsigned); + unsigned get_file_line()const; + void set_file_name(some_string_type file_name); + const char*get_truncated_file_name()const; +}; +``` + +The constructor takes a file name and optionally a data source. If no data source is provided the function tries to open the file with the given name and throws an `error::can_not_open_file exception` on failure. If a data source is provided then the file name is only used to format error messages. In that case you can essentially put any string there. Using a string that describes the data source results in more informative error messages. + +`some_string_type` can be a `std::string` or a `char*`. If the data source is a `std::FILE*` then the library will take care of calling `std::fclose`. If it is a `std::istream` then the stream is not closed by the library. For best performance open the streams in binary mode. However using text mode also works. `ByteSourceBase` provides an interface that you can use to implement further data sources. + +```cpp +class ByteSourceBase{ +public: + virtual int read(char*buffer, int size)=0; + virtual ~ByteSourceBase(){} +}; +``` + +The read function should fill the provided buffer with at most `size` bytes from the data source. It should return the number of bytes actually written to the buffer. If data source has run out of bytes (because for example an end of file was reached) then the function should return 0. If a fatal error occures then you can throw an exception. Note that the function can be called both from the main and the worker thread. However, it is guarenteed that they do not call the function at the same time. + +Lines are read by calling the `next_line` function. It returns a pointer to a null terminated C-string that contains the line. If the end of file is reached a null pointer is returned. The newline character is not included in the string. You may modify the string as long as you do not write past the null terminator. The string stays valid until the destructor is called or until next_line is called again. Windows and `*`nix newlines are handled transparently. UTF-8 BOMs are automatically ignored and missing newlines at the end of the file are no problem. + +**Important:** There is a limit of 2^24-1 characters per line. If this limit is exceeded a `error::line_length_limit_exceeded` exception is thrown. + +Looping over all the lines in a file can be done in the following way. +```cpp +LineReader in(...); +while(char*line = in.next_line()){ + ... +} +``` + +The remaining functions are mainly used used to format error messages. The file line indicates the current position in the file, i.e., after the first `next_line` call it is 1 and after the second 2. Before the first call it is 0. The file name is truncated as internally C-strings are used to avoid `std::bad_alloc` exceptions during error reporting. + +**Note:** It is not possible to exchange the line termination character. + +### `CSVReader` + +`CSVReader` uses policies. These are classes with only static members to allow core functionality to be exchanged in an efficient way. + +```cpp +template< + unsigned column_count, + class trim_policy = trim_chars<' ', '\t'>, + class quote_policy = no_quote_escape<','>, + class overflow_policy = throw_on_overflow, + class comment_policy = no_comment +> +class CSVReader{ +public: + // Constructors + // same as for LineReader + + // Parsing Header + void read_header(ignore_column ignore_policy, some_string_type col_name1, some_string_type col_name2, ...); + void set_header(some_string_type col_name1, some_string_type col_name2, ...); + bool has_column(some_string_type col_name)const; + + // Read + char*next_line(); + bool read_row(ColType1&col1, ColType2&col2, ...); + + // File Location + void set_file_line(unsigned); + unsigned get_file_line()const; + void set_file_name(some_string_type file_name); + const char*get_truncated_file_name()const; +}; +``` + +The `column_count` template parameter indicates how many columns you want to read from the CSV file. This must not necessarily coincide with the actual number of columns in the file. The three policies govern various aspects of the parsing. + +The trim policy indicates what characters should be ignored at the begin and the end of every column. The default ignores spaces and tabs. This makes sure that + +``` +a,b,c +1,2,3 +``` + +is interpreted in the same way as + +``` + a, b, c +1 , 2, 3 +``` + +The trim_chars can take any number of template parameters. For example `trim_chars<' ', '\t', '_'> `is also valid. If no character should be trimmed use `trim_chars<>`. + +The quote policy indicates how string should be escaped. It also specifies the column separator. The predefined policies are: + + * `no_quote_escape` : Strings are not escaped. "`sep`" is used as column separator. + * `double_quote_escape` : Strings are escaped using quotes. Quotes are escaped using two consecutive quotes. "`sep`" is used as column separator and "`quote`" as quoting character. + +**Important**: When combining trimming and quoting the rows are first trimmed and then unquoted. A consequence is that spaces inside the quotes will be conserved. If you want to get rid of spaces inside the quotes, you need to remove them yourself. + +**Important**: Quoting can be quite expensive. Disable it if you do not need it. + +**Important**: Quoted strings may not contain unescaped newlines. This is currently not supported. + +The overflow policy indicates what should be done if the integers in the input are too large to fit into the variables. There following policies are predefined: + + * `throw_on_overflow` : Throw an `error::integer_overflow` or `error::integer_underflow` exception. + * `ignore_overflow` : Do nothing and let the overflow happen. + * `set_to_max_on_overflow` : Set the value to `numeric_limits<...>::max()` (or to the min-pendant). + +The comment policy allows to skip lines based on some criteria. Valid predefined policies are: + + * `no_comment` : Do not ignore any line. + * `empty_line_comment` : Ignore all lines that are empty or only contains spaces and tabs. + * `single_line_comment` : Ignore all lines that start with com1 or com2 or ... as the first character. There may not be any space between the beginning of the line and the comment character. + * `single_and_empty_line_comment` : Ignore all empty lines and single line comments. + +Examples: + + * `CSVReader<4, trim_chars<' '>, double_quote_escape<',','\"'> >` reads 4 columns from a normal CSV file with string escaping enabled. + * `CSVReader<3, trim_chars<' '>, no_quote_escape<'\t'>, throw_on_overflow, single_line_comment<'#'> >` reads 3 columns from a tab separated file with string escaping disabled. Lines starting with a # are ignored. + +The constructors and the file location functions are exactly the same as for `LineReader`. See its documentation for details. + +There are three methods that deal with headers. The `read_header` methods reads a line from the file and rearranges the columns to match that order. It also checks whether all necessary columns are present. The `set_header` method does *not* read any input. Use it if the file does not have any header. Obviously it is impossible to rearrange columns or check for their availability when using it. The order in the file and in the program must match when using `set_header`. The `has_column` method checks whether a column is present in the file. The first argument of `read_header` is a bitfield that determines how the function should react to column mismatches. The default behavior is to throw an `error::extra_column_in_header` exception if the file contains more columns than expected and an `error::missing_column_in_header` when there are not enough. This behavior can be altered using the following flags. + + * `ignore_no_column`: The default behavior, no flags are set + * `ignore_extra_column`: If a column with a name is in the file but not in the argument list, then it is silently ignored. + * `ignore_missing_column`: If a column with a name is not in the file but is in the argument list, then `read_row` will not modify the corresponding variable. + +When using `ignore_missing_column` it is a good idea to initialize the variables passed to `read_row` with a default value, for example: + +```cpp +// The file only contains column "a" +CSVReader<2>in(...); +in.read_header(ignore_missing_column, "a", "b"); +int a,b = 42; +while(in.read_row(a,b)){ + // a contains the value from the file + // b is left unchanged by read_row, i.e., it is 42 +} +``` + +If only some columns are optional or their default value depends on other columns you have to use `has_column`, for example: + +```cpp +// The file only contains the columns "a" and "b" +CSVReader<3>in(...); +in.read_header(ignore_missing_column, "a", "b", "sum"); +if(!in.has_column("a") || !in.has_column("b")) + throw my_neat_error_class(); +bool has_sum = in.has_column("sum"); +int a,b,sum; +while(in.read_row(a,b,sum)){ + if(!has_sum) + sum = a+b; +} +``` + +**Important**: Do not call `has_column` from within the read-loop. It would work correctly but significantly slowdown processing. + +If two columns have the same name an error::duplicated_column_in_header exception is thrown. If `read_header` is called but the file is empty a `error::header_missing` exception is thrown. + +The `next_line` functions reads a line without parsing it. It works analogous to `LineReader::next_line`. This can be used to skip broken lines in a CSV file. However, in nearly all applications you will want to use the `read_row` function. + +The `read_row` function reads a line, splits it into the columns and arranges them correctly. It trims the entries and unescapes them. If requested the content is interpreted as integer or as floating point. The variables passed to read_row may be of the following types. + + * builtin signed integer: These are `signed char`, `short`, `int`, `long` and `long long`. The input must be encoded as a base 10 ASCII number optionally preceded by a + or -. The function detects whether the integer is too large would overflow (or underflow) and behaves as indicated by overflow_policy. + * builtin unsigned integer: Just as the signed counterparts except that a leading + or - is not allowed. + * builtin floating point: These are `float`, `double` and `long double`. The input may have a leading + or -. The number must be base 10 encoded. The decimal point may either be a dot or a comma. (Note that a comma will only work if it is not also used as column separator or the number is escaped.) A base 10 exponent may be specified using the "1e10" syntax. The "e" may be lower- or uppercase. Examples for valid floating points are "1", "-42.42" and "+123.456E789". The input is rounded to the next floating point or infinity if it is too large or small. + * `char`: The column content must be a single character. + * `std::string`: The column content is assigned to the string. The std::string is filled with the trimmed and unescaped version. + * `char*`: A pointer directly into the buffer. The string is trimmed and unescaped and null terminated. This pointer stays valid until read_row is called again or the CSVReader is destroyed. Use this for user defined types. + +Note that there is no inherent overhead to using `char*` and then interpreting it compared to using one of the parsers directly build into `CSVReader`. The builtin number parsers are pure convenience. If you need a slightly different syntax then use `char*` and do the parsing yourself. + +## FAQ + +Q: The library is throwing a std::system_error with code -1. How to get it to work? + +A: Your compiler's std::thread implementation is broken. Define CSV\_IO\_NO\_THREAD to disable threading support. + + +Q: My values are not just ints or strings. I want to parse my customized type. Is this possible? + +A: Read a `char*` and parse the string. At first this seems expensive but it is not as the pointer you get points directly into the memory buffer. In fact there is no inherent reason why a custom int-parser realized this way must be any slower than the int-parser build into the library. By reading a `char*` the library takes care of column reordering and quote escaping and leaves the actual parsing to you. Note that using a std::string is slower as it involves a memory copy. + + +Q: I get lots of compiler errors when compiling the header! Please fix it. :( + +A: Have you enabled the C++11 mode of your compiler? If you use GCC you have to add -std=c++0x to the commandline. If this does not resolve the problem, then please open a ticket. + + +Q: The library crashes when parsing large files! Please fix it. :( + +A: When using GCC have you linked against -lpthread? Read the installation section for details on how to do this. If this does not resolve the issue then please open a ticket. (The reason why it only crashes only on large files is that the first chuck is read synchronous and if the whole file fits into this chuck then no asynchronous call is performed.) Alternatively you can define CSV\_IO\_NO\_THREAD. + + +Q: Does the library support UTF? + +A: The library has basic UTF-8 support, or to be more precise it does not break when passing UTF-8 strings through it. If you read a `char*` then you get a pointer to the UTF-8 string. You will have to decode the string on your own. The separator, quoting, and commenting characters used by the library can only be ASCII characters. + + +Q: Does the library support string fields that span multiple lines? + +A: No. This feature has been often requested in the past, however, it is difficult to make it work with the current design without breaking something else. diff --git a/src/s3select/include/csvparser/csv.h b/src/s3select/include/csvparser/csv.h new file mode 100644 index 000000000..c5cb5bcae --- /dev/null +++ b/src/s3select/include/csvparser/csv.h @@ -0,0 +1,1273 @@ +// Copyright: (2012-2015) Ben Strasser +// License: BSD-3 +// +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are met: +// +// 1. Redistributions of source code must retain the above copyright notice, +// this list of conditions and the following disclaimer. +// +// 2. Redistributions in binary form must reproduce the above copyright notice, +// this list of conditions and the following disclaimer in the documentation +// and/or other materials provided with the distribution. +// +// 3. Neither the name of the copyright holder nor the names of its contributors +// may be used to endorse or promote products derived from this software +// without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE +// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR +// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF +// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS +// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN +// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) +// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +// POSSIBILITY OF SUCH DAMAGE. + +#ifndef CSV_H +#define CSV_H + +#include +#include +#include +#include +#include +#include +#include +#ifndef CSV_IO_NO_THREAD +#include +#include +#include +#endif +#include +#include +#include +#include +#include + +namespace io{ + //////////////////////////////////////////////////////////////////////////// + // LineReader // + //////////////////////////////////////////////////////////////////////////// + + namespace error{ + struct base : std::exception{ + virtual void format_error_message()const = 0; + + const char*what()const noexcept override{ + format_error_message(); + return error_message_buffer; + } + + mutable char error_message_buffer[512]; + }; + + const int max_file_name_length = 255; + + struct with_file_name{ + with_file_name(){ + std::memset(file_name, 0, sizeof(file_name)); + } + + void set_file_name(const char*file_name){ + if(file_name != nullptr){ + // This call to strncpy has parenthesis around it + // to silence the GCC -Wstringop-truncation warning + (strncpy(this->file_name, file_name, sizeof(this->file_name))); + this->file_name[sizeof(this->file_name)-1] = '\0'; + }else{ + this->file_name[0] = '\0'; + } + } + + char file_name[max_file_name_length+1]; + }; + + struct with_file_line{ + with_file_line(){ + file_line = -1; + } + + void set_file_line(int file_line){ + this->file_line = file_line; + } + + int file_line; + }; + + struct with_errno{ + with_errno(){ + errno_value = 0; + } + + void set_errno(int errno_value){ + this->errno_value = errno_value; + } + + int errno_value; + }; + + struct can_not_open_file : + base, + with_file_name, + with_errno{ + void format_error_message()const override{ + if(errno_value != 0) + std::snprintf(error_message_buffer, sizeof(error_message_buffer), + "Can not open file \"%s\" because \"%s\"." + , file_name, std::strerror(errno_value)); + else + std::snprintf(error_message_buffer, sizeof(error_message_buffer), + "Can not open file \"%s\"." + , file_name); + } + }; + + struct line_length_limit_exceeded : + base, + with_file_name, + with_file_line{ + void format_error_message()const override{ + std::snprintf(error_message_buffer, sizeof(error_message_buffer), + "Line number %d in file \"%s\" exceeds the maximum length of 2^24-1." + , file_line, file_name); + } + }; + } + + class ByteSourceBase{ + public: + virtual int read(char*buffer, int size)=0; + virtual ~ByteSourceBase(){} + }; + + namespace detail{ + + class OwningStdIOByteSourceBase : public ByteSourceBase{ + public: + explicit OwningStdIOByteSourceBase(FILE*file):file(file){ + // Tell the std library that we want to do the buffering ourself. + std::setvbuf(file, 0, _IONBF, 0); + } + + int read(char*buffer, int size){ + return std::fread(buffer, 1, size, file); + } + + ~OwningStdIOByteSourceBase(){ + std::fclose(file); + } + + private: + FILE*file; + }; + + class NonOwningIStreamByteSource : public ByteSourceBase{ + public: + explicit NonOwningIStreamByteSource(std::istream&in):in(in){} + + int read(char*buffer, int size){ + in.read(buffer, size); + return in.gcount(); + } + + ~NonOwningIStreamByteSource(){} + + private: + std::istream∈ + }; + + class NonOwningStringByteSource : public ByteSourceBase{ + public: + NonOwningStringByteSource(const char*str, long long size):str(str), remaining_byte_count(size){} + + int read(char*buffer, int desired_byte_count){ + int to_copy_byte_count = desired_byte_count; + if(remaining_byte_count < to_copy_byte_count) + to_copy_byte_count = remaining_byte_count; + std::memcpy(buffer, str, to_copy_byte_count); + remaining_byte_count -= to_copy_byte_count; + str += to_copy_byte_count; + return to_copy_byte_count; + } + + ~NonOwningStringByteSource(){} + + private: + const char*str; + long long remaining_byte_count; + }; + + #ifndef CSV_IO_NO_THREAD + class AsynchronousReader{ + public: + void init(std::unique_ptrarg_byte_source){ + std::unique_lockguard(lock); + byte_source = std::move(arg_byte_source); + desired_byte_count = -1; + termination_requested = false; + worker = std::thread( + [&]{ + std::unique_lockguard(lock); + try{ + for(;;){ + read_requested_condition.wait( + guard, + [&]{ + return desired_byte_count != -1 || termination_requested; + } + ); + if(termination_requested) + return; + + read_byte_count = byte_source->read(buffer, desired_byte_count); + desired_byte_count = -1; + if(read_byte_count == 0) + break; + read_finished_condition.notify_one(); + } + }catch(...){ + read_error = std::current_exception(); + } + read_finished_condition.notify_one(); + } + ); + } + + bool is_valid()const{ + return byte_source != nullptr; + } + + void start_read(char*arg_buffer, int arg_desired_byte_count){ + std::unique_lockguard(lock); + buffer = arg_buffer; + desired_byte_count = arg_desired_byte_count; + read_byte_count = -1; + read_requested_condition.notify_one(); + } + + int finish_read(){ + std::unique_lockguard(lock); + read_finished_condition.wait( + guard, + [&]{ + return read_byte_count != -1 || read_error; + } + ); + if(read_error) + std::rethrow_exception(read_error); + else + return read_byte_count; + } + + ~AsynchronousReader(){ + if(byte_source != nullptr){ + { + std::unique_lockguard(lock); + termination_requested = true; + } + read_requested_condition.notify_one(); + worker.join(); + } + } + + private: + std::unique_ptrbyte_source; + + std::thread worker; + + bool termination_requested; + std::exception_ptr read_error; + char*buffer; + int desired_byte_count; + int read_byte_count; + + std::mutex lock; + std::condition_variable read_finished_condition; + std::condition_variable read_requested_condition; + }; + #endif + + class SynchronousReader{ + public: + void init(std::unique_ptrarg_byte_source){ + byte_source = std::move(arg_byte_source); + } + + bool is_valid()const{ + return byte_source != nullptr; + } + + void start_read(char*arg_buffer, int arg_desired_byte_count){ + buffer = arg_buffer; + desired_byte_count = arg_desired_byte_count; + } + + int finish_read(){ + return byte_source->read(buffer, desired_byte_count); + } + private: + std::unique_ptrbyte_source; + char*buffer; + int desired_byte_count; + }; + } + + class LineReader{ + private: + static const int block_len = 1<<20; + std::unique_ptrbuffer; // must be constructed before (and thus destructed after) the reader! + #ifdef CSV_IO_NO_THREAD + detail::SynchronousReader reader; + #else + detail::AsynchronousReader reader; + #endif + int data_begin; + int data_end; + + char file_name[error::max_file_name_length+1]; + unsigned file_line; + + static std::unique_ptr open_file(const char*file_name){ + // We open the file in binary mode as it makes no difference under *nix + // and under Windows we handle \r\n newlines ourself. + FILE*file = std::fopen(file_name, "rb"); + if(file == 0){ + int x = errno; // store errno as soon as possible, doing it after constructor call can fail. + error::can_not_open_file err; + err.set_errno(x); + err.set_file_name(file_name); + throw err; + } + return std::unique_ptr(new detail::OwningStdIOByteSourceBase(file)); + } + + void init(std::unique_ptrbyte_source){ + file_line = 0; + + buffer = std::unique_ptr(new char[3*block_len]); + data_begin = 0; + data_end = byte_source->read(buffer.get(), 2*block_len); + + // Ignore UTF-8 BOM + if(data_end >= 3 && buffer[0] == '\xEF' && buffer[1] == '\xBB' && buffer[2] == '\xBF') + data_begin = 3; + + if(data_end == 2*block_len){ + reader.init(std::move(byte_source)); + reader.start_read(buffer.get() + 2*block_len, block_len); + } + } + + public: + LineReader() = delete; + LineReader(const LineReader&) = delete; + LineReader&operator=(const LineReader&) = delete; + + explicit LineReader(const char*file_name){ + set_file_name(file_name); + init(open_file(file_name)); + } + + explicit LineReader(const std::string&file_name){ + set_file_name(file_name.c_str()); + init(open_file(file_name.c_str())); + } + + LineReader(const char*file_name, std::unique_ptrbyte_source){ + set_file_name(file_name); + init(std::move(byte_source)); + } + + LineReader(const std::string&file_name, std::unique_ptrbyte_source){ + set_file_name(file_name.c_str()); + init(std::move(byte_source)); + } + + LineReader(const char*file_name, const char*data_begin, const char*data_end){ + set_file_name(file_name); + init(std::unique_ptr(new detail::NonOwningStringByteSource(data_begin, data_end-data_begin))); + } + + LineReader(const std::string&file_name, const char*data_begin, const char*data_end){ + set_file_name(file_name.c_str()); + init(std::unique_ptr(new detail::NonOwningStringByteSource(data_begin, data_end-data_begin))); + } + + LineReader(const char*file_name, FILE*file){ + set_file_name(file_name); + init(std::unique_ptr(new detail::OwningStdIOByteSourceBase(file))); + } + + LineReader(const std::string&file_name, FILE*file){ + set_file_name(file_name.c_str()); + init(std::unique_ptr(new detail::OwningStdIOByteSourceBase(file))); + } + + LineReader(const char*file_name, std::istream&in){ + set_file_name(file_name); + init(std::unique_ptr(new detail::NonOwningIStreamByteSource(in))); + } + + LineReader(const std::string&file_name, std::istream&in){ + set_file_name(file_name.c_str()); + init(std::unique_ptr(new detail::NonOwningIStreamByteSource(in))); + } + + void set_file_name(const std::string&file_name){ + set_file_name(file_name.c_str()); + } + + void set_file_name(const char*file_name){ + if(file_name != nullptr){ + strncpy(this->file_name, file_name, sizeof(this->file_name)); + this->file_name[sizeof(this->file_name)-1] = '\0'; + }else{ + this->file_name[0] = '\0'; + } + } + + const char*get_truncated_file_name()const{ + return file_name; + } + + void set_file_line(unsigned file_line){ + this->file_line = file_line; + } + + unsigned get_file_line()const{ + return file_line; + } + + char*next_line(){ + if(data_begin == data_end) + return nullptr; + + ++file_line; + + assert(data_begin < data_end); + assert(data_end <= block_len*2); + + if(data_begin >= block_len){ + std::memcpy(buffer.get(), buffer.get()+block_len, block_len); + data_begin -= block_len; + data_end -= block_len; + if(reader.is_valid()) + { + data_end += reader.finish_read(); + std::memcpy(buffer.get()+block_len, buffer.get()+2*block_len, block_len); + reader.start_read(buffer.get() + 2*block_len, block_len); + } + } + + int line_end = data_begin; + while(line_end != data_end && buffer[line_end] != '\n'){ + ++line_end; + } + + if(line_end - data_begin + 1 > block_len){ + error::line_length_limit_exceeded err; + err.set_file_name(file_name); + err.set_file_line(file_line); + throw err; + } + + if(line_end != data_end && buffer[line_end] == '\n'){ + buffer[line_end] = '\0'; + }else{ + // some files are missing the newline at the end of the + // last line + ++data_end; + buffer[line_end] = '\0'; + } + + // handle windows \r\n-line breaks + if(line_end != data_begin && buffer[line_end-1] == '\r') + buffer[line_end-1] = '\0'; + + char*ret = buffer.get() + data_begin; + data_begin = line_end+1; + return ret; + } + }; + + + //////////////////////////////////////////////////////////////////////////// + // CSV // + //////////////////////////////////////////////////////////////////////////// + + namespace error{ + const int max_column_name_length = 63; + struct with_column_name{ + with_column_name(){ + std::memset(column_name, 0, max_column_name_length+1); + } + + void set_column_name(const char*column_name){ + if(column_name != nullptr){ + std::strncpy(this->column_name, column_name, max_column_name_length); + this->column_name[max_column_name_length] = '\0'; + }else{ + this->column_name[0] = '\0'; + } + } + + char column_name[max_column_name_length+1]; + }; + + + const int max_column_content_length = 63; + + struct with_column_content{ + with_column_content(){ + std::memset(column_content, 0, max_column_content_length+1); + } + + void set_column_content(const char*column_content){ + if(column_content != nullptr){ + std::strncpy(this->column_content, column_content, max_column_content_length); + this->column_content[max_column_content_length] = '\0'; + }else{ + this->column_content[0] = '\0'; + } + } + + char column_content[max_column_content_length+1]; + }; + + + struct extra_column_in_header : + base, + with_file_name, + with_column_name{ + void format_error_message()const override{ + std::snprintf(error_message_buffer, sizeof(error_message_buffer), + R"(Extra column "%s" in header of file "%s".)" + , column_name, file_name); + } + }; + + struct missing_column_in_header : + base, + with_file_name, + with_column_name{ + void format_error_message()const override{ + std::snprintf(error_message_buffer, sizeof(error_message_buffer), + R"(Missing column "%s" in header of file "%s".)" + , column_name, file_name); + } + }; + + struct duplicated_column_in_header : + base, + with_file_name, + with_column_name{ + void format_error_message()const override{ + std::snprintf(error_message_buffer, sizeof(error_message_buffer), + R"(Duplicated column "%s" in header of file "%s".)" + , column_name, file_name); + } + }; + + struct header_missing : + base, + with_file_name{ + void format_error_message()const override{ + std::snprintf(error_message_buffer, sizeof(error_message_buffer), + "Header missing in file \"%s\"." + , file_name); + } + }; + + struct too_few_columns : + base, + with_file_name, + with_file_line{ + void format_error_message()const override{ + std::snprintf(error_message_buffer, sizeof(error_message_buffer), + "Too few columns in line %d in file \"%s\"." + , file_line, file_name); + } + }; + + struct too_many_columns : + base, + with_file_name, + with_file_line{ + void format_error_message()const override{ + std::snprintf(error_message_buffer, sizeof(error_message_buffer), + "Too many columns in line %d in file \"%s\"." + , file_line, file_name); + } + }; + + struct escaped_string_not_closed : + base, + with_file_name, + with_file_line{ + void format_error_message()const override{ + std::snprintf(error_message_buffer, sizeof(error_message_buffer), + "Escaped string was not closed in line %d in file \"%s\"." + , file_line, file_name); + } + }; + + struct integer_must_be_positive : + base, + with_file_name, + with_file_line, + with_column_name, + with_column_content{ + void format_error_message()const override{ + std::snprintf(error_message_buffer, sizeof(error_message_buffer), + R"(The integer "%s" must be positive or 0 in column "%s" in file "%s" in line "%d".)" + , column_content, column_name, file_name, file_line); + } + }; + + struct no_digit : + base, + with_file_name, + with_file_line, + with_column_name, + with_column_content{ + void format_error_message()const override{ + std::snprintf(error_message_buffer, sizeof(error_message_buffer), + R"(The integer "%s" contains an invalid digit in column "%s" in file "%s" in line "%d".)" + , column_content, column_name, file_name, file_line); + } + }; + + struct integer_overflow : + base, + with_file_name, + with_file_line, + with_column_name, + with_column_content{ + void format_error_message()const override{ + std::snprintf(error_message_buffer, sizeof(error_message_buffer), + R"(The integer "%s" overflows in column "%s" in file "%s" in line "%d".)" + , column_content, column_name, file_name, file_line); + } + }; + + struct integer_underflow : + base, + with_file_name, + with_file_line, + with_column_name, + with_column_content{ + void format_error_message()const override{ + std::snprintf(error_message_buffer, sizeof(error_message_buffer), + R"(The integer "%s" underflows in column "%s" in file "%s" in line "%d".)" + , column_content, column_name, file_name, file_line); + } + }; + + struct invalid_single_character : + base, + with_file_name, + with_file_line, + with_column_name, + with_column_content{ + void format_error_message()const override{ + std::snprintf(error_message_buffer, sizeof(error_message_buffer), + R"(The content "%s" of column "%s" in file "%s" in line "%d" is not a single character.)" + , column_content, column_name, file_name, file_line); + } + }; + } + + using ignore_column = unsigned int; + static const ignore_column ignore_no_column = 0; + static const ignore_column ignore_extra_column = 1; + static const ignore_column ignore_missing_column = 2; + + template + struct trim_chars{ + private: + constexpr static bool is_trim_char(char){ + return false; + } + + template + constexpr static bool is_trim_char(char c, char trim_char, OtherTrimChars...other_trim_chars){ + return c == trim_char || is_trim_char(c, other_trim_chars...); + } + + public: + static void trim(char*&str_begin, char*&str_end){ + while(str_begin != str_end && is_trim_char(*str_begin, trim_char_list...)) + ++str_begin; + while(str_begin != str_end && is_trim_char(*(str_end-1), trim_char_list...)) + --str_end; + *str_end = '\0'; + } + }; + + + struct no_comment{ + static bool is_comment(const char*){ + return false; + } + }; + + template + struct single_line_comment{ + private: + constexpr static bool is_comment_start_char(char){ + return false; + } + + template + constexpr static bool is_comment_start_char(char c, char comment_start_char, OtherCommentStartChars...other_comment_start_chars){ + return c == comment_start_char || is_comment_start_char(c, other_comment_start_chars...); + } + + public: + + static bool is_comment(const char*line){ + return is_comment_start_char(*line, comment_start_char_list...); + } + }; + + struct empty_line_comment{ + static bool is_comment(const char*line){ + if(*line == '\0') + return true; + while(*line == ' ' || *line == '\t'){ + ++line; + if(*line == 0) + return true; + } + return false; + } + }; + + template + struct single_and_empty_line_comment{ + static bool is_comment(const char*line){ + return single_line_comment::is_comment(line) || empty_line_comment::is_comment(line); + } + }; + + template + struct no_quote_escape{ + static const char*find_next_column_end(const char*col_begin){ + while(*col_begin != sep && *col_begin != '\0') + ++col_begin; + return col_begin; + } + + static void unescape(char*&, char*&){ + + } + }; + + template + struct double_quote_escape{ + static const char*find_next_column_end(const char*col_begin){ + while(*col_begin != sep && *col_begin != '\0') + if(*col_begin != quote) + ++col_begin; + else{ + do{ + ++col_begin; + while(*col_begin != quote){ + if(*col_begin == '\0') + throw error::escaped_string_not_closed(); + ++col_begin; + } + ++col_begin; + }while(*col_begin == quote); + } + return col_begin; + } + + static void unescape(char*&col_begin, char*&col_end){ + if(col_end - col_begin >= 2){ + if(*col_begin == quote && *(col_end-1) == quote){ + ++col_begin; + --col_end; + char*out = col_begin; + for(char*in = col_begin; in!=col_end; ++in){ + if(*in == quote && (in+1) != col_end && *(in+1) == quote){ + ++in; + } + *out = *in; + ++out; + } + col_end = out; + *col_end = '\0'; + } + } + + } + }; + + struct throw_on_overflow{ + template + static void on_overflow(T&){ + throw error::integer_overflow(); + } + + template + static void on_underflow(T&){ + throw error::integer_underflow(); + } + }; + + struct ignore_overflow{ + template + static void on_overflow(T&){} + + template + static void on_underflow(T&){} + }; + + struct set_to_max_on_overflow{ + template + static void on_overflow(T&x){ + // using (std::numeric_limits::max) instead of std::numeric_limits::max + // to make code including windows.h with its max macro happy + x = (std::numeric_limits::max)(); + } + + template + static void on_underflow(T&x){ + x = (std::numeric_limits::min)(); + } + }; + + + namespace detail{ + template + void chop_next_column( + char*&line, char*&col_begin, char*&col_end + ){ + assert(line != nullptr); + + col_begin = line; + // the col_begin + (... - col_begin) removes the constness + col_end = col_begin + (quote_policy::find_next_column_end(col_begin) - col_begin); + + if(*col_end == '\0'){ + line = nullptr; + }else{ + *col_end = '\0'; + line = col_end + 1; + } + } + + template + void parse_line( + char*line, + char**sorted_col, + const std::vector&col_order + ){ + for (int i : col_order) { + if(line == nullptr) + throw ::io::error::too_few_columns(); + char*col_begin, *col_end; + chop_next_column(line, col_begin, col_end); + + if (i != -1) { + trim_policy::trim(col_begin, col_end); + quote_policy::unescape(col_begin, col_end); + + sorted_col[i] = col_begin; + } + } + if(line != nullptr) + throw ::io::error::too_many_columns(); + } + + template + void parse_header_line( + char*line, + std::vector&col_order, + const std::string*col_name, + ignore_column ignore_policy + ){ + col_order.clear(); + + bool found[column_count]; + std::fill(found, found + column_count, false); + while(line){ + char*col_begin,*col_end; + chop_next_column(line, col_begin, col_end); + + trim_policy::trim(col_begin, col_end); + quote_policy::unescape(col_begin, col_end); + + for(unsigned i=0; i + void parse(char*col, char &x){ + if(!*col) + throw error::invalid_single_character(); + x = *col; + ++col; + if(*col) + throw error::invalid_single_character(); + } + + template + void parse(char*col, std::string&x){ + x = col; + } + + template + void parse(char*col, const char*&x){ + x = col; + } + + template + void parse(char*col, char*&x){ + x = col; + } + + template + void parse_unsigned_integer(const char*col, T&x){ + x = 0; + while(*col != '\0'){ + if('0' <= *col && *col <= '9'){ + T y = *col - '0'; + if(x > ((std::numeric_limits::max)()-y)/10){ + overflow_policy::on_overflow(x); + return; + } + x = 10*x+y; + }else + throw error::no_digit(); + ++col; + } + } + + templatevoid parse(char*col, unsigned char &x) + {parse_unsigned_integer(col, x);} + templatevoid parse(char*col, unsigned short &x) + {parse_unsigned_integer(col, x);} + templatevoid parse(char*col, unsigned int &x) + {parse_unsigned_integer(col, x);} + templatevoid parse(char*col, unsigned long &x) + {parse_unsigned_integer(col, x);} + templatevoid parse(char*col, unsigned long long &x) + {parse_unsigned_integer(col, x);} + + template + void parse_signed_integer(const char*col, T&x){ + if(*col == '-'){ + ++col; + + x = 0; + while(*col != '\0'){ + if('0' <= *col && *col <= '9'){ + T y = *col - '0'; + if(x < ((std::numeric_limits::min)()+y)/10){ + overflow_policy::on_underflow(x); + return; + } + x = 10*x-y; + }else + throw error::no_digit(); + ++col; + } + return; + }else if(*col == '+') + ++col; + parse_unsigned_integer(col, x); + } + + templatevoid parse(char*col, signed char &x) + {parse_signed_integer(col, x);} + templatevoid parse(char*col, signed short &x) + {parse_signed_integer(col, x);} + templatevoid parse(char*col, signed int &x) + {parse_signed_integer(col, x);} + templatevoid parse(char*col, signed long &x) + {parse_signed_integer(col, x);} + templatevoid parse(char*col, signed long long &x) + {parse_signed_integer(col, x);} + + template + void parse_float(const char*col, T&x){ + bool is_neg = false; + if(*col == '-'){ + is_neg = true; + ++col; + }else if(*col == '+') + ++col; + + x = 0; + while('0' <= *col && *col <= '9'){ + int y = *col - '0'; + x *= 10; + x += y; + ++col; + } + + if(*col == '.'|| *col == ','){ + ++col; + T pos = 1; + while('0' <= *col && *col <= '9'){ + pos /= 10; + int y = *col - '0'; + ++col; + x += y*pos; + } + } + + if(*col == 'e' || *col == 'E'){ + ++col; + int e; + + parse_signed_integer(col, e); + + if(e != 0){ + T base; + if(e < 0){ + base = T(0.1); + e = -e; + }else{ + base = T(10); + } + + while(e != 1){ + if((e & 1) == 0){ + base = base*base; + e >>= 1; + }else{ + x *= base; + --e; + } + } + x *= base; + } + }else{ + if(*col != '\0') + throw error::no_digit(); + } + + if(is_neg) + x = -x; + } + + template void parse(char*col, float&x) { parse_float(col, x); } + template void parse(char*col, double&x) { parse_float(col, x); } + template void parse(char*col, long double&x) { parse_float(col, x); } + + template + void parse(char*col, T&x){ + // Mute unused variable compiler warning + (void)col; + (void)x; + // GCC evalutes "false" when reading the template and + // "sizeof(T)!=sizeof(T)" only when instantiating it. This is why + // this strange construct is used. + static_assert(sizeof(T)!=sizeof(T), + "Can not parse this type. Only buildin integrals, floats, char, char*, const char* and std::string are supported"); + } + + } + + template, + class quote_policy = no_quote_escape<','>, + class overflow_policy = throw_on_overflow, + class comment_policy = no_comment + > + class CSVReader{ + private: + LineReader in; + + char*row[column_count]; + std::string column_names[column_count]; + + std::vectorcol_order; + + template + void set_column_names(std::string s, ColNames...cols){ + column_names[column_count-sizeof...(ColNames)-1] = std::move(s); + set_column_names(std::forward(cols)...); + } + + void set_column_names(){} + + + public: + CSVReader() = delete; + CSVReader(const CSVReader&) = delete; + CSVReader&operator=(const CSVReader&); + + template + explicit CSVReader(Args&&...args):in(std::forward(args)...){ + std::fill(row, row+column_count, nullptr); + col_order.resize(column_count); + for(unsigned i=0; i + void read_header(ignore_column ignore_policy, ColNames...cols){ + static_assert(sizeof...(ColNames)>=column_count, "not enough column names specified"); + static_assert(sizeof...(ColNames)<=column_count, "too many column names specified"); + try{ + set_column_names(std::forward(cols)...); + + char*line; + do{ + line = in.next_line(); + if(!line) + throw error::header_missing(); + }while(comment_policy::is_comment(line)); + + detail::parse_header_line + + (line, col_order, column_names, ignore_policy); + }catch(error::with_file_name&err){ + err.set_file_name(in.get_truncated_file_name()); + throw; + } + } + + template + void set_header(ColNames...cols){ + static_assert(sizeof...(ColNames)>=column_count, + "not enough column names specified"); + static_assert(sizeof...(ColNames)<=column_count, + "too many column names specified"); + set_column_names(std::forward(cols)...); + std::fill(row, row+column_count, nullptr); + col_order.resize(column_count); + for(unsigned i=0; i + void parse_helper(std::size_t r, T&t, ColType&...cols){ + if(row[r]){ + try{ + try{ + ::io::detail::parse(row[r], t); + }catch(error::with_column_content&err){ + err.set_column_content(row[r]); + throw; + } + }catch(error::with_column_name&err){ + err.set_column_name(column_names[r].c_str()); + throw; + } + } + parse_helper(r+1, cols...); + } + + + public: + template + bool read_row(ColType& ...cols){ + static_assert(sizeof...(ColType)>=column_count, + "not enough columns specified"); + static_assert(sizeof...(ColType)<=column_count, + "too many columns specified"); + try{ + try{ + + char*line; + do{ + line = in.next_line(); + if(!line) + return false; + }while(comment_policy::is_comment(line)); + + detail::parse_line + (line, row, col_order); + + parse_helper(0, cols...); + }catch(error::with_file_name&err){ + err.set_file_name(in.get_truncated_file_name()); + throw; + } + }catch(error::with_file_line&err){ + err.set_file_line(in.get_file_line()); + throw; + } + + return true; + } + }; +} +#endif + diff --git a/src/s3select/include/encryption_internal.h b/src/s3select/include/encryption_internal.h new file mode 100644 index 000000000..c8deee492 --- /dev/null +++ b/src/s3select/include/encryption_internal.h @@ -0,0 +1,114 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#pragma once + +#include +#include +#include + +#include "parquet/properties.h" +#include "parquet/types.h" + +using parquet::ParquetCipher; + +namespace parquet { +namespace encryption { + +constexpr int kGcmTagLength = 16; +constexpr int kNonceLength = 12; + +// Module types +constexpr int8_t kFooter = 0; +constexpr int8_t kColumnMetaData = 1; +constexpr int8_t kDataPage = 2; +constexpr int8_t kDictionaryPage = 3; +constexpr int8_t kDataPageHeader = 4; +constexpr int8_t kDictionaryPageHeader = 5; +constexpr int8_t kColumnIndex = 6; +constexpr int8_t kOffsetIndex = 7; + +/// Performs AES encryption operations with GCM or CTR ciphers. +class AesEncryptor { + public: + static AesEncryptor* Make(ParquetCipher::type alg_id, int key_len, bool metadata, + std::vector* all_encryptors); + + ~AesEncryptor(); + + /// Size difference between plaintext and ciphertext, for this cipher. + int CiphertextSizeDelta(); + + /// Encrypts plaintext with the key and aad. Key length is passed only for validation. + /// If different from value in constructor, exception will be thrown. + int Encrypt(const uint8_t* plaintext, int plaintext_len, const uint8_t* key, + int key_len, const uint8_t* aad, int aad_len, uint8_t* ciphertext); + + /// Encrypts plaintext footer, in order to compute footer signature (tag). + int SignedFooterEncrypt(const uint8_t* footer, int footer_len, const uint8_t* key, + int key_len, const uint8_t* aad, int aad_len, + const uint8_t* nonce, uint8_t* encrypted_footer); + + void WipeOut(); + + private: + /// Can serve one key length only. Possible values: 16, 24, 32 bytes. + explicit AesEncryptor(ParquetCipher::type alg_id, int key_len, bool metadata); + // PIMPL Idiom + class AesEncryptorImpl; + std::unique_ptr impl_; +}; + +/// Performs AES decryption operations with GCM or CTR ciphers. +class AesDecryptor { + public: + static AesDecryptor* Make(ParquetCipher::type alg_id, int key_len, bool metadata, + std::vector* all_decryptors); + + ~AesDecryptor(); + void WipeOut(); + + /// Size difference between plaintext and ciphertext, for this cipher. + int CiphertextSizeDelta(); + + /// Decrypts ciphertext with the key and aad. Key length is passed only for + /// validation. If different from value in constructor, exception will be thrown. + int Decrypt(const uint8_t* ciphertext, int ciphertext_len, const uint8_t* key, + int key_len, const uint8_t* aad, int aad_len, uint8_t* plaintext); + + private: + /// Can serve one key length only. Possible values: 16, 24, 32 bytes. + explicit AesDecryptor(ParquetCipher::type alg_id, int key_len, bool metadata); + // PIMPL Idiom + class AesDecryptorImpl; + std::unique_ptr impl_; +}; + +std::string CreateModuleAad(const std::string& file_aad, int8_t module_type, + int16_t row_group_ordinal, int16_t column_ordinal, + int16_t page_ordinal); + +std::string CreateFooterAad(const std::string& aad_prefix_bytes); + +// Update last two bytes of page (or page header) module AAD +void QuickUpdatePageAad(const std::string& AAD, int16_t new_page_ordinal); + +// Wraps OpenSSL RAND_bytes function +void RandBytes(unsigned char* buf, int num); + +} // namespace encryption +} // namespace parquet diff --git a/src/s3select/include/internal_file_decryptor.h b/src/s3select/include/internal_file_decryptor.h new file mode 100644 index 000000000..011c4acbe --- /dev/null +++ b/src/s3select/include/internal_file_decryptor.h @@ -0,0 +1,121 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#pragma once + +#include +#include +#include +#include + +#include "parquet/schema.h" + +namespace parquet { + +namespace encryption { +class AesDecryptor; +class AesEncryptor; +} // namespace encryption + +class FileDecryptionProperties; + +class PARQUET_EXPORT Decryptor { + public: + Decryptor(encryption::AesDecryptor* decryptor, const std::string& key, + const std::string& file_aad, const std::string& aad, + ::arrow::MemoryPool* pool); + + const std::string& file_aad() const { return file_aad_; } + void UpdateAad(const std::string& aad) { aad_ = aad; } + ::arrow::MemoryPool* pool() { return pool_; } + + int CiphertextSizeDelta(); + int Decrypt(const uint8_t* ciphertext, int ciphertext_len, uint8_t* plaintext); + + private: + encryption::AesDecryptor* aes_decryptor_; + std::string key_; + std::string file_aad_; + std::string aad_; + ::arrow::MemoryPool* pool_; +}; + +class InternalFileDecryptor { + public: + explicit InternalFileDecryptor(FileDecryptionProperties* properties, + const std::string& file_aad, + ParquetCipher::type algorithm, + const std::string& footer_key_metadata, + ::arrow::MemoryPool* pool); + + std::string& file_aad() { return file_aad_; } + + std::string GetFooterKey(); + + ParquetCipher::type algorithm() { return algorithm_; } + + std::string& footer_key_metadata() { return footer_key_metadata_; } + + FileDecryptionProperties* properties() { return properties_; } + + void WipeOutDecryptionKeys(); + + ::arrow::MemoryPool* pool() { return pool_; } + + std::shared_ptr GetFooterDecryptor(); + std::shared_ptr GetFooterDecryptorForColumnMeta(const std::string& aad = ""); + std::shared_ptr GetFooterDecryptorForColumnData(const std::string& aad = ""); + std::shared_ptr GetColumnMetaDecryptor( + const std::string& column_path, const std::string& column_key_metadata, + const std::string& aad = ""); + std::shared_ptr GetColumnDataDecryptor( + const std::string& column_path, const std::string& column_key_metadata, + const std::string& aad = ""); + + private: + FileDecryptionProperties* properties_; + // Concatenation of aad_prefix (if exists) and aad_file_unique + std::string file_aad_; + std::map> column_data_map_; + std::map> column_metadata_map_; + + std::shared_ptr footer_metadata_decryptor_; + std::shared_ptr footer_data_decryptor_; + ParquetCipher::type algorithm_; + std::string footer_key_metadata_; + std::vector all_decryptors_; + + /// Key must be 16, 24 or 32 bytes in length. Thus there could be up to three + // types of meta_decryptors and data_decryptors. + std::unique_ptr meta_decryptor_[3]; + std::unique_ptr data_decryptor_[3]; + + ::arrow::MemoryPool* pool_; + + std::shared_ptr GetFooterDecryptor(const std::string& aad, bool metadata); + std::shared_ptr GetColumnDecryptor(const std::string& column_path, + const std::string& column_key_metadata, + const std::string& aad, + bool metadata = false); + + encryption::AesDecryptor* GetMetaAesDecryptor(size_t key_size); + encryption::AesDecryptor* GetDataAesDecryptor(size_t key_size); + + int MapKeyLenToDecryptorArrayIndex(int key_len); +}; + +} // namespace parquet diff --git a/src/s3select/include/s3select.h b/src/s3select/include/s3select.h new file mode 100644 index 000000000..3ac111351 --- /dev/null +++ b/src/s3select/include/s3select.h @@ -0,0 +1,3153 @@ +#ifndef __S3SELECT__ +#define __S3SELECT__ +#define BOOST_SPIRIT_THREADSAFE +#define CSV_IO_NO_THREAD + +#pragma once +#define BOOST_BIND_GLOBAL_PLACEHOLDERS +#include +#include +#include +#include +#include +#include +#include "s3select_oper.h" +#include "s3select_functions.h" +#include "s3select_csv_parser.h" +#include "s3select_json_parser.h" +#include +#include +#include + +#define _DEBUG_TERM {string token(a,b);std::cout << __FUNCTION__ << token << std::endl;} + +namespace s3selectEngine +{ + +/// AST builder + +class s3select_projections +{ + +private: + std::vector m_projections; + +public: + + std::vector* get() + { + return &m_projections; + } + +}; + +static s3select_reserved_word g_s3select_reserve_word;//read-only + +struct actionQ +{ +// upon parser is accepting a token (lets say some number), +// it push it into dedicated queue, later those tokens are poped out to build some "higher" contruct (lets say 1 + 2) +// those containers are used only for parsing phase and not for runtime. + + std::vector muldivQ; + std::vector addsubQ; + std::vector arithmetic_compareQ; + std::vector logical_compareQ; + std::vector exprQ; + std::vector funcQ; + std::vector whenThenQ; + std::vector inPredicateQ; + base_statement* inMainArg; + std::vector dataTypeQ; + std::vector trimTypeQ; + std::vector datePartQ; + projection_alias alias_map; + std::string from_clause; + std::vector json_from_clause; + bool limit_op; + unsigned long limit; + std::string column_prefix; + std::string table_alias; + s3select_projections projections; + + bool projection_or_predicate_state; //true->projection false->predicate(where-clause statement) + std::vector predicate_columns; + std::vector projections_columns; + base_statement* first_when_then_expr; + + std::string json_array_name; // _1.a[ ] json_array_name = "a"; upon parser is scanning a correct json-path; json_array_name will contain the array name. + std::string json_object_name; // _1.b json_object_name = "b" ; upon parser is scanning a correct json-path; json_object_name will contain the object name. + std::deque json_array_index_number; // _1.a.c[ some integer number >=0 ]; upon parser is scanning a correct json-path; json_array_index_number will contain the array index. + // or in the case of multidimensional contain seiries of index number + + json_variable_access json_var_md; + + std::vector> json_statement_variables_match_expression;//contains all statement variables and their search-expression for locating the correct values in input document + + actionQ(): inMainArg(0),from_clause("##"),limit_op(false),column_prefix("##"),table_alias("##"),projection_or_predicate_state(true),first_when_then_expr(nullptr){} + + std::map *> x_map; + + ~actionQ() + { + for(auto m : x_map) + delete m.second; + } + + bool is_already_scanned(const void *th,const char *a) + { + //purpose: caller get indication in the case a specific builder is scan more than once the same text(pointer) + auto t = x_map.find(th); + + if(t == x_map.end()) + { + auto v = new std::vector; + x_map.insert(std::pair *>(th,v)); + v->push_back(a); + } + else + { + for(auto& c : *(t->second)) + { + if( strcmp(c,a) == 0) + return true; + } + t->second->push_back(a); + } + return false; + } + +}; + +class s3select; + +struct base_ast_builder +{ + void operator()(s3select* self, const char* a, const char* b) const; + + virtual void builder(s3select* self, const char* a, const char* b) const = 0; + + virtual ~base_ast_builder() = default; +}; + +struct push_from_clause : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_from_clause g_push_from_clause; + +struct push_json_from_clause : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_json_from_clause g_push_json_from_clause; + +struct push_limit_clause : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_limit_clause g_push_limit_clause; + +struct push_number : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_number g_push_number; + +struct push_float_number : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_float_number g_push_float_number; + +struct push_string : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_string g_push_string; + +struct push_variable : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_variable g_push_variable; + +struct push_json_variable : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_json_variable g_push_json_variable; + +/////////////////////////arithmetic unit ///////////////// +struct push_addsub : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_addsub g_push_addsub; + +struct push_mulop : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_mulop g_push_mulop; + +struct push_addsub_binop : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_addsub_binop g_push_addsub_binop; + +struct push_mulldiv_binop : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_mulldiv_binop g_push_mulldiv_binop; + +struct push_function_arg : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_function_arg g_push_function_arg; + +struct push_function_name : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_function_name g_push_function_name; + +struct push_function_expr : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_function_expr g_push_function_expr; + +struct push_cast_expr : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_cast_expr g_push_cast_expr; + +struct push_cast_decimal_expr : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_cast_decimal_expr g_push_cast_decimal_expr; + +struct push_decimal_operator : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_decimal_operator g_push_decimal_operator; + +struct push_data_type : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_data_type g_push_data_type; + +////////////////////// logical unit //////////////////////// + +struct push_compare_operator : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; + +}; +static push_compare_operator g_push_compare_operator; + +struct push_logical_operator : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; + +}; +static push_logical_operator g_push_logical_operator; + +struct push_arithmetic_predicate : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; + +}; +static push_arithmetic_predicate g_push_arithmetic_predicate; + +struct push_logical_predicate : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_logical_predicate g_push_logical_predicate; + +struct push_negation : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_negation g_push_negation; + +struct push_column_pos : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_column_pos g_push_column_pos; + +struct push_projection : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_projection g_push_projection; + +struct push_alias_projection : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_alias_projection g_push_alias_projection; + +struct push_between_filter : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_between_filter g_push_between_filter; + +struct push_not_between_filter : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_not_between_filter g_push_not_between_filter; + +struct push_in_predicate : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_in_predicate g_push_in_predicate; + +struct push_in_predicate_arguments : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_in_predicate_arguments g_push_in_predicate_arguments; + +struct push_in_predicate_first_arg : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_in_predicate_first_arg g_push_in_predicate_first_arg; + +struct push_like_predicate_escape : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_like_predicate_escape g_push_like_predicate_escape; + +struct push_like_predicate_no_escape : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_like_predicate_no_escape g_push_like_predicate_no_escape; + +struct push_is_null_predicate : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_is_null_predicate g_push_is_null_predicate; + +struct push_case_when_else : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_case_when_else g_push_case_when_else; + +struct push_when_condition_then : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_when_condition_then g_push_when_condition_then; + +struct push_when_value_then : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_when_value_then g_push_when_value_then; + +struct push_case_value_when_value_else : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_case_value_when_value_else g_push_case_value_when_value_else; + +struct push_substr_from : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_substr_from g_push_substr_from; + +struct push_substr_from_for : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_substr_from_for g_push_substr_from_for; + +struct push_trim_type : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_trim_type g_push_trim_type; + +struct push_trim_whitespace_both : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_trim_whitespace_both g_push_trim_whitespace_both; + +struct push_trim_expr_one_side_whitespace : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_trim_expr_one_side_whitespace g_push_trim_expr_one_side_whitespace; + +struct push_trim_expr_anychar_anyside : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_trim_expr_anychar_anyside g_push_trim_expr_anychar_anyside; + +struct push_datediff : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_datediff g_push_datediff; + +struct push_dateadd : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_dateadd g_push_dateadd; + +struct push_extract : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_extract g_push_extract; + +struct push_date_part : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_date_part g_push_date_part; + +struct push_time_to_string_constant : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_time_to_string_constant g_push_time_to_string_constant; + +struct push_time_to_string_dynamic : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_time_to_string_dynamic g_push_time_to_string_dynamic; + +struct push_string_to_time_constant : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_string_to_time_constant g_push_string_to_time_constant; + +struct push_array_number : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_array_number g_push_array_number; + +struct push_json_array_name : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_json_array_name g_push_json_array_name; + +struct push_json_object : public base_ast_builder +{ + void builder(s3select* self, const char* a, const char* b) const; +}; +static push_json_object g_push_json_object; + +struct s3select : public bsc::grammar +{ +private: + + actionQ m_actionQ; + scratch_area m_sca; + s3select_functions m_s3select_functions; + std::string error_description; + s3select_allocator m_s3select_allocator; + bool aggr_flow = false; + bool m_json_query = false; + std::set m_ast_nodes_to_delete; + base_function* m_to_timestamp_for_clean = nullptr; + +#define BOOST_BIND_ACTION( push_name ) boost::bind( &push_name::operator(), g_ ## push_name, const_cast(&self), _1, _2) + +public: + + std::set& get_ast_nodes_to_delete() + { + return m_ast_nodes_to_delete; + } + + base_function* & get_to_timestamp_for_clean() + { + return m_to_timestamp_for_clean; + } + + actionQ* getAction() + { + return &m_actionQ; + } + + s3select_allocator* getAllocator() + { + return &m_s3select_allocator; + } + + s3select_functions* getS3F() + { + return &m_s3select_functions; + } + + int semantic() + { + for (const auto &e : get_projections_list()) + { + e->resolve_node(); + //upon validate there is no aggregation-function nested calls, it validates legit aggregation call. + if (e->is_nested_aggregate(aggr_flow)) + { + error_description = "nested aggregation function is illegal i.e. sum(...sum ...)"; + throw base_s3select_exception(error_description, base_s3select_exception::s3select_exp_en_t::FATAL); + } + + e->push_for_cleanup(m_ast_nodes_to_delete); + } + + if(get_filter()) + get_filter()->push_for_cleanup(m_ast_nodes_to_delete); + + if (aggr_flow == true) + {// atleast one projection column contain aggregation function + for (const auto &e : get_projections_list()) + { + auto aggregate_expr = e->get_aggregate(); + + if (aggregate_expr) + { + //per each column, subtree is mark to skip except for the aggregation function subtree. + //for an example: substring( ... , sum() , count() ) :: the substring is mark to skip execution, while sum and count not. + e->set_skip_non_aggregate(true); + e->mark_aggreagtion_subtree_to_execute(); + } + else + { + //in case projection column is not aggregate, the projection column must *not* contain reference to columns. + if(e->is_column_reference()) + { + error_description = "illegal query; projection contains aggregation function is not allowed with projection contains column reference"; + throw base_s3select_exception(error_description, base_s3select_exception::s3select_exp_en_t::FATAL); + } + } + + } + } + + m_json_query = (m_actionQ.json_from_clause.size() != 0); + + return 0; + } + + int parse_query(const char* input_query) + { + if(get_projections_list().empty() == false) + { + return 0; //already parsed + } + + + error_description.clear(); + aggr_flow = false; + + try + { + bsc::parse_info<> info = bsc::parse(input_query, *this, bsc::space_p); + auto query_parse_position = info.stop; + + if (!info.full) + { + error_description = std::string("failure -->") + query_parse_position + std::string("<---"); + return -1; + } + + semantic(); + } + catch (base_s3select_exception& e) + { + error_description.assign(e.what()); + if (e.severity() == base_s3select_exception::s3select_exp_en_t::FATAL) //abort query execution + { + return -1; + } + } + + return 0; + } + + std::string get_error_description() + { + return error_description; + } + + s3select() + { + m_s3select_functions.setAllocator(&m_s3select_allocator); + m_s3select_functions.set_AST_nodes_for_cleanup(&m_ast_nodes_to_delete); + } + + bool is_semantic()//TBD traverse and validate semantics per all nodes + { + base_statement* cond = m_actionQ.exprQ.back(); + + return cond->semantic(); + } + + std::string get_from_clause() const + { + return m_actionQ.from_clause; + } + + bool is_limit() + { + return m_actionQ.limit_op; + } + + unsigned long get_limit() + { + return m_actionQ.limit; + } + + void load_schema(std::vector< std::string>& scm) + { + int i = 0; + for (auto& c : scm) + { + m_sca.set_column_pos(c.c_str(), i++); + } + } + + base_statement* get_filter() + { + if(m_actionQ.exprQ.empty()) + { + return nullptr; + } + + return m_actionQ.exprQ.back(); + } + + std::vector get_projections_list() + { + return *m_actionQ.projections.get(); //TODO return COPY(?) or to return evalaution results (list of class value{}) / return reference(?) + } + + scratch_area* get_scratch_area() + { + return &m_sca; + } + + projection_alias* get_aliases() + { + return &m_actionQ.alias_map; + } + + std::vector>& get_json_variables_access() + { + return m_actionQ.json_statement_variables_match_expression; + } + + bool is_aggregate_query() const + { + return aggr_flow == true; + } + + bool is_json_query() + { + return m_json_query; + } + + ~s3select() + { + for(auto it : m_ast_nodes_to_delete) + { + if (it->is_function()) + {//upon its a function, call to the implementation destructor + if(dynamic_cast<__function*>(it)->impl()) + dynamic_cast<__function*>(it)->impl()->dtor(); + } + //calling to destrcutor of class-function itself, or non-function destructor + it->dtor(); + } + + for(auto x: m_actionQ.json_statement_variables_match_expression) + {//the json_variable_access object is allocated by S3SELECT_NEW. this object contains stl-vector that should be free + x.first->~json_variable_access(); + } + if(m_to_timestamp_for_clean) + { + m_to_timestamp_for_clean->dtor(); + } + } + +#define JSON_ROOT_OBJECT "s3object[*]" + +//the input is converted to lower case +#define S3SELECT_KW( reserve_word ) bsc::as_lower_d[ reserve_word ] + + template + struct definition + { + explicit definition(s3select const& self) + { + ///// s3select syntax rules and actions for building AST + + select_expr = select_expr_base_ >> bsc::lexeme_d[ *(bsc::str_p(" ")|bsc::str_p(";")) ]; + + select_expr_base_ = select_expr_base >> S3SELECT_KW("limit") >> (limit_number)[BOOST_BIND_ACTION(push_limit_clause)] | select_expr_base; + + limit_number = (+bsc::digit_p); + + select_expr_base = S3SELECT_KW("select") >> projections >> S3SELECT_KW("from") >> (from_expression)[BOOST_BIND_ACTION(push_from_clause)] >> !where_clause ; + + projections = projection_expression >> *( ',' >> projection_expression) ; + + projection_expression = (arithmetic_expression >> S3SELECT_KW("as") >> alias_name)[BOOST_BIND_ACTION(push_alias_projection)] | + (arithmetic_expression)[BOOST_BIND_ACTION(push_projection)] | + (arithmetic_predicate >> S3SELECT_KW("as") >> alias_name)[BOOST_BIND_ACTION(push_alias_projection)] | + (arithmetic_predicate)[BOOST_BIND_ACTION(push_projection)] ; + + alias_name = bsc::lexeme_d[(+bsc::alpha_p >> *bsc::digit_p)] ; + + when_case_else_projection = (S3SELECT_KW("case") >> (+when_stmt) >> S3SELECT_KW("else") >> arithmetic_expression >> S3SELECT_KW("end")) [BOOST_BIND_ACTION(push_case_when_else)]; + + when_stmt = (S3SELECT_KW("when") >> condition_expression >> S3SELECT_KW("then") >> arithmetic_expression)[BOOST_BIND_ACTION(push_when_condition_then)]; + + when_case_value_when = (S3SELECT_KW("case") >> arithmetic_expression >> + (+when_value_then) >> S3SELECT_KW("else") >> arithmetic_expression >> S3SELECT_KW("end")) [BOOST_BIND_ACTION(push_case_value_when_value_else)]; + + when_value_then = (S3SELECT_KW("when") >> arithmetic_expression >> S3SELECT_KW("then") >> arithmetic_expression)[BOOST_BIND_ACTION(push_when_value_then)]; + + from_expression = (s3_object >> variable_name ) | s3_object; + + //the stdin and object_path are for debug purposes(not part of the specs) + s3_object = json_s3_object | S3SELECT_KW("stdin") | S3SELECT_KW("s3object") | object_path; + + json_s3_object = ((S3SELECT_KW(JSON_ROOT_OBJECT)) >> *(bsc::str_p(".") >> json_path_element))[BOOST_BIND_ACTION(push_json_from_clause)]; + + json_path_element = bsc::lexeme_d[+( bsc::alnum_p | bsc::str_p("_")) ]; + + object_path = "/" >> *( fs_type >> "/") >> fs_type; + + fs_type = bsc::lexeme_d[+( bsc::alnum_p | bsc::str_p(".") | bsc::str_p("_")) ]; + + where_clause = S3SELECT_KW("where") >> condition_expression; + + condition_expression = arithmetic_predicate; + + arithmetic_predicate = (S3SELECT_KW("not") >> logical_predicate)[BOOST_BIND_ACTION(push_negation)] | logical_predicate; + + logical_predicate = (logical_and) >> *(or_op[BOOST_BIND_ACTION(push_logical_operator)] >> (logical_and)[BOOST_BIND_ACTION(push_logical_predicate)]); + + logical_and = (cmp_operand) >> *(and_op[BOOST_BIND_ACTION(push_logical_operator)] >> (cmp_operand)[BOOST_BIND_ACTION(push_logical_predicate)]); + + cmp_operand = special_predicates | (factor) >> *(arith_cmp[BOOST_BIND_ACTION(push_compare_operator)] >> (factor)[BOOST_BIND_ACTION(push_arithmetic_predicate)]); + + special_predicates = (is_null) | (is_not_null) | (between_predicate) | (not_between) | (in_predicate) | (like_predicate); + + is_null = ((factor) >> S3SELECT_KW("is") >> S3SELECT_KW("null"))[BOOST_BIND_ACTION(push_is_null_predicate)]; + + is_not_null = ((factor) >> S3SELECT_KW("is") >> S3SELECT_KW("not") >> S3SELECT_KW("null"))[BOOST_BIND_ACTION(push_is_null_predicate)]; + + between_predicate = (arithmetic_expression >> S3SELECT_KW("between") >> arithmetic_expression >> S3SELECT_KW("and") >> arithmetic_expression)[BOOST_BIND_ACTION(push_between_filter)]; + + not_between = (arithmetic_expression >> S3SELECT_KW("not") >> S3SELECT_KW("between") >> arithmetic_expression >> S3SELECT_KW("and") >> arithmetic_expression)[BOOST_BIND_ACTION(push_not_between_filter)]; + + in_predicate = (arithmetic_expression >> S3SELECT_KW("in") >> '(' >> arithmetic_expression[BOOST_BIND_ACTION(push_in_predicate_first_arg)] >> *(',' >> arithmetic_expression[BOOST_BIND_ACTION(push_in_predicate_arguments)]) >> ')')[BOOST_BIND_ACTION(push_in_predicate)]; + + like_predicate = (like_predicate_escape) |(like_predicate_no_escape); + + like_predicate_no_escape = (arithmetic_expression >> S3SELECT_KW("like") >> arithmetic_expression)[BOOST_BIND_ACTION(push_like_predicate_no_escape)]; + + like_predicate_escape = (arithmetic_expression >> S3SELECT_KW("like") >> arithmetic_expression >> S3SELECT_KW("escape") >> arithmetic_expression)[BOOST_BIND_ACTION(push_like_predicate_escape)]; + + factor = arithmetic_expression | ( '(' >> arithmetic_predicate >> ')' ) ; + + arithmetic_expression = (addsub_operand >> *(addsubop_operator[BOOST_BIND_ACTION(push_addsub)] >> addsub_operand[BOOST_BIND_ACTION(push_addsub_binop)] )); + + addsub_operand = (mulldiv_operand >> *(muldiv_operator[BOOST_BIND_ACTION(push_mulop)] >> mulldiv_operand[BOOST_BIND_ACTION(push_mulldiv_binop)] ));// this non-terminal gives precedense to mull/div + + mulldiv_operand = arithmetic_argument | ('(' >> (arithmetic_expression) >> ')') ; + + list_of_function_arguments = (arithmetic_expression)[BOOST_BIND_ACTION(push_function_arg)] >> *(',' >> (arithmetic_expression)[BOOST_BIND_ACTION(push_function_arg)]); + + reserved_function_names = (S3SELECT_KW("when")|S3SELECT_KW("case")|S3SELECT_KW("then")|S3SELECT_KW("not")|S3SELECT_KW("limit")|S3SELECT_KW("where")|S3SELECT_KW("in")|S3SELECT_KW("between") | + S3SELECT_KW("like")|S3SELECT_KW("is") ); + + function = ( ((variable_name) >> '(' )[BOOST_BIND_ACTION(push_function_name)] >> !list_of_function_arguments >> ')')[BOOST_BIND_ACTION(push_function_expr)]; + + arithmetic_argument = (float_number)[BOOST_BIND_ACTION(push_float_number)] | (number)[BOOST_BIND_ACTION(push_number)] | (json_variable_name)[BOOST_BIND_ACTION(push_json_variable)] | + (column_pos)[BOOST_BIND_ACTION(push_column_pos)] | + (string)[BOOST_BIND_ACTION(push_string)] | (backtick_string) | (datediff) | (dateadd) | (extract) | (time_to_string_constant) | (time_to_string_dynamic) | + (cast) | (substr) | (trim) | (when_case_value_when) | (when_case_else_projection) | + (function) | (variable)[BOOST_BIND_ACTION(push_variable)]; //function is pushed by right-term + + cast = cast_as_data_type | cast_as_decimal_expr ; + + cast_as_data_type = (S3SELECT_KW("cast") >> '(' >> factor >> S3SELECT_KW("as") >> (data_type) >> ')') [BOOST_BIND_ACTION(push_cast_expr)]; + + cast_as_decimal_expr = (S3SELECT_KW("cast") >> '(' >> factor >> S3SELECT_KW("as") >> decimal_operator >> ')') [BOOST_BIND_ACTION(push_cast_decimal_expr)]; + + decimal_operator = (S3SELECT_KW("decimal") >> '(' >> (number)[BOOST_BIND_ACTION(push_number)] >> ',' >> (number)[BOOST_BIND_ACTION(push_number)] >> ')') + [BOOST_BIND_ACTION(push_decimal_operator)]; + + data_type = (S3SELECT_KW("int") | S3SELECT_KW("float") | S3SELECT_KW("string") | S3SELECT_KW("timestamp") | S3SELECT_KW("bool"))[BOOST_BIND_ACTION(push_data_type)]; + + substr = (substr_from) | (substr_from_for); + + substr_from = (S3SELECT_KW("substring") >> '(' >> (arithmetic_expression >> S3SELECT_KW("from") >> arithmetic_expression) >> ')') [BOOST_BIND_ACTION(push_substr_from)]; + + substr_from_for = (S3SELECT_KW("substring") >> '(' >> (arithmetic_expression >> S3SELECT_KW("from") >> arithmetic_expression >> S3SELECT_KW("for") >> arithmetic_expression) >> ')') [BOOST_BIND_ACTION(push_substr_from_for)]; + + trim = (trim_whitespace_both) | (trim_one_side_whitespace) | (trim_anychar_anyside); + + trim_one_side_whitespace = (S3SELECT_KW("trim") >> '(' >> (trim_type)[BOOST_BIND_ACTION(push_trim_type)] >> arithmetic_expression >> ')') [BOOST_BIND_ACTION(push_trim_expr_one_side_whitespace)]; + + trim_whitespace_both = (S3SELECT_KW("trim") >> '(' >> arithmetic_expression >> ')') [BOOST_BIND_ACTION(push_trim_whitespace_both)]; + + trim_anychar_anyside = (S3SELECT_KW("trim") >> '(' >> ((trim_remove_type)[BOOST_BIND_ACTION(push_trim_type)] >> arithmetic_expression >> S3SELECT_KW("from") >> arithmetic_expression) >> ')') [BOOST_BIND_ACTION(push_trim_expr_anychar_anyside)]; + + trim_type = ((S3SELECT_KW("leading") >> S3SELECT_KW("from")) | ( S3SELECT_KW("trailing") >> S3SELECT_KW("from")) | (S3SELECT_KW("both") >> S3SELECT_KW("from")) | S3SELECT_KW("from") ); + + trim_remove_type = (S3SELECT_KW("leading") | S3SELECT_KW("trailing") | S3SELECT_KW("both") ); + + datediff = (S3SELECT_KW("date_diff") >> '(' >> date_part >> ',' >> arithmetic_expression >> ',' >> arithmetic_expression >> ')') [BOOST_BIND_ACTION(push_datediff)]; + + dateadd = (S3SELECT_KW("date_add") >> '(' >> date_part >> ',' >> arithmetic_expression >> ',' >> arithmetic_expression >> ')') [BOOST_BIND_ACTION(push_dateadd)]; + + extract = (S3SELECT_KW("extract") >> '(' >> (date_part_extract)[BOOST_BIND_ACTION(push_date_part)] >> S3SELECT_KW("from") >> arithmetic_expression >> ')') [BOOST_BIND_ACTION(push_extract)]; + + date_part = (S3SELECT_KW("year") | S3SELECT_KW("month") | S3SELECT_KW("day") | S3SELECT_KW("hour") | S3SELECT_KW("minute") | S3SELECT_KW("second")) [BOOST_BIND_ACTION(push_date_part)]; + + date_part_extract = ((date_part) | S3SELECT_KW("week") | S3SELECT_KW("timezone_hour") | S3SELECT_KW("timezone_minute")); + + time_to_string_constant = (S3SELECT_KW("to_string") >> '(' >> arithmetic_expression >> ',' >> (string)[BOOST_BIND_ACTION(push_string)] >> ')') [BOOST_BIND_ACTION(push_time_to_string_constant)]; + + time_to_string_dynamic = (S3SELECT_KW("to_string") >> '(' >> arithmetic_expression >> ',' >> arithmetic_expression >> ')') [BOOST_BIND_ACTION(push_time_to_string_dynamic)]; + + number = bsc::int_p; + + float_number = bsc::real_p; + + string = (bsc::str_p("\"") >> *( bsc::anychar_p - bsc::str_p("\"") ) >> bsc::str_p("\"")) | (bsc::str_p("\'") >> *( bsc::anychar_p - bsc::str_p("\'") ) >> bsc::str_p("\'")); + + backtick_string = (bsc::str_p("`") >> *( bsc::anychar_p - bsc::str_p("`") ) >> bsc::str_p("`")) [BOOST_BIND_ACTION(push_string_to_time_constant)]; + + column_pos = (variable_name >> "." >> column_pos_name) | column_pos_name; //TODO what about space + + column_pos_name = ('_'>>+(bsc::digit_p) ) | '*' ; + + muldiv_operator = bsc::str_p("*") | bsc::str_p("/") | bsc::str_p("^") | bsc::str_p("%");// got precedense + + addsubop_operator = bsc::str_p("+") | bsc::str_p("-"); + + arith_cmp = bsc::str_p("<>") | bsc::str_p(">=") | bsc::str_p("<=") | bsc::str_p("=") | bsc::str_p("<") | bsc::str_p(">") | bsc::str_p("!="); + + and_op = S3SELECT_KW("and"); + + or_op = S3SELECT_KW("or"); + + variable_name = bsc::lexeme_d[(+bsc::alpha_p >> *( bsc::alpha_p | bsc::digit_p | '_') ) - reserved_function_names]; + + variable = (variable_name >> "." >> variable_name) | variable_name; + + json_variable_name = bsc::str_p("_1") >> +("." >> (json_array | json_object) ); + + json_object = (variable_name)[BOOST_BIND_ACTION(push_json_object)]; + + json_array = (variable_name >> +(bsc::str_p("[") >> number[BOOST_BIND_ACTION(push_array_number)] >> bsc::str_p("]")) )[BOOST_BIND_ACTION(push_json_array_name)]; + } + + + bsc::rule cast, data_type, variable, json_variable_name, variable_name, select_expr, select_expr_base, select_expr_base_, s3_object, where_clause, limit_number; + bsc::rule number, float_number, string, backtick_string, from_expression, cast_as_data_type, cast_as_decimal_expr, decimal_operator; + bsc::rule cmp_operand, arith_cmp, condition_expression, arithmetic_predicate, logical_predicate, factor; + bsc::rule trim, trim_whitespace_both, trim_one_side_whitespace, trim_anychar_anyside, trim_type, trim_remove_type, substr, substr_from, substr_from_for; + bsc::rule datediff, dateadd, extract, date_part, date_part_extract, time_to_string_constant, time_to_string_dynamic; + bsc::rule special_predicates, between_predicate, not_between, in_predicate, like_predicate, like_predicate_escape, like_predicate_no_escape, is_null, is_not_null; + bsc::rule muldiv_operator, addsubop_operator, function, arithmetic_expression, addsub_operand, list_of_function_arguments, arithmetic_argument, mulldiv_operand, reserved_function_names; + bsc::rule fs_type, object_path,json_s3_object,json_path_element,json_object,json_array; + bsc::rule projections, projection_expression, alias_name, column_pos,column_pos_name; + bsc::rule when_case_else_projection, when_case_value_when, when_stmt, when_value_then; + bsc::rule logical_and,and_op,or_op; + bsc::rule const& start() const + { + return select_expr ; + } + }; +}; + +void base_ast_builder::operator()(s3select *self, const char *a, const char *b) const +{ + //the purpose of the following procedure is to bypass boost::spirit rescan (calling to bind-action more than once per the same text) + //which cause wrong AST creation (and later false execution). + if (self->getAction()->is_already_scanned((void *)(this), const_cast(a))) + return; + + builder(self, a, b); +} + +void push_from_clause::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b),table_name,alias_name; + + //should search for generic space + if(token.find(' ') != std::string::npos) + { + size_t pos = token.find(' '); + table_name = token.substr(0,pos); + + pos = token.rfind(' '); + alias_name = token.substr(pos+1,token.size()); + + self->getAction()->table_alias = alias_name; + + if(self->getAction()->column_prefix != "##" && self->getAction()->table_alias != self->getAction()->column_prefix) + { + throw base_s3select_exception(std::string("query can not contain more then a single table-alias"), base_s3select_exception::s3select_exp_en_t::FATAL); + } + + token = table_name; + } + + self->getAction()->from_clause = token; + + self->getAction()->exprQ.clear(); +} + +void push_json_from_clause::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b),table_name,alias_name; + + //TODO handle the star-operation ('*') in from-clause. build the parameters for json-reader search-api's. + std::vector variable_key_path; + const char* delimiter = "."; + auto pos = token.find(delimiter); + + if(pos != std::string::npos) + { + token = token.substr(strlen(JSON_ROOT_OBJECT)+1,token.size()); + pos = token.find(delimiter); + do + { + variable_key_path.push_back(token.substr(0,pos)); + if(pos != std::string::npos) + token = token.substr(pos+1,token.size()); + else + token = ""; + pos = token.find(delimiter); + }while(token.size()); + } + else + { + variable_key_path.push_back(JSON_ROOT_OBJECT); + } + + self->getAction()->json_from_clause = variable_key_path; +} + +void push_limit_clause::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + self->getAction()->limit_op = true; + try + { + self->getAction()->limit = std::stoul(token); + } + catch(std::invalid_argument& e) + { + throw base_s3select_exception(std::string("Invalid argument "), base_s3select_exception::s3select_exp_en_t::FATAL); + } + catch(std::out_of_range& e) + { + throw base_s3select_exception(std::string("Out of range "), base_s3select_exception::s3select_exp_en_t::FATAL); + } +} + +void push_number::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + variable* v = S3SELECT_NEW(self, variable, atoi(token.c_str())); + + self->getAction()->exprQ.push_back(v); +} + +void push_float_number::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + //the parser for float(real_p) is accepting also integers, thus "blocking" integer acceptence and all are float. + bsc::parse_info<> info = bsc::parse(token.c_str(), bsc::int_p, bsc::space_p); + + if (!info.full) + { + char* perr; + double d = strtod(token.c_str(), &perr); + variable* v = S3SELECT_NEW(self, variable, d); + + self->getAction()->exprQ.push_back(v); + } + else + { + variable* v = S3SELECT_NEW(self, variable, atoi(token.c_str())); + + self->getAction()->exprQ.push_back(v); + } +} + +void push_string::builder(s3select* self, const char* a, const char* b) const +{ + a++; + b--; // remove double quotes + std::string token(a, b); + + variable* v = S3SELECT_NEW(self, variable, token, variable::var_t::COLUMN_VALUE); + + self->getAction()->exprQ.push_back(v); +} + +void push_variable::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + variable* v = nullptr; + + if (g_s3select_reserve_word.is_reserved_word(token)) + { + if (g_s3select_reserve_word.get_reserved_word(token) == s3select_reserved_word::reserve_word_en_t::S3S_NULL) + { + v = S3SELECT_NEW(self, variable, s3select_reserved_word::reserve_word_en_t::S3S_NULL); + } + else if (g_s3select_reserve_word.get_reserved_word(token) == s3select_reserved_word::reserve_word_en_t::S3S_NAN) + { + v = S3SELECT_NEW(self, variable, s3select_reserved_word::reserve_word_en_t::S3S_NAN); + } + else if (g_s3select_reserve_word.get_reserved_word(token) == s3select_reserved_word::reserve_word_en_t::S3S_FALSE) + { + v = S3SELECT_NEW(self, variable, s3select_reserved_word::reserve_word_en_t::S3S_FALSE); + } + else if (g_s3select_reserve_word.get_reserved_word(token) == s3select_reserved_word::reserve_word_en_t::S3S_TRUE) + { + v = S3SELECT_NEW(self, variable, s3select_reserved_word::reserve_word_en_t::S3S_TRUE); + } + else + { + v = S3SELECT_NEW(self, variable, s3select_reserved_word::reserve_word_en_t::NA); + } + + } + else + { + size_t pos = token.find('.'); + std::string alias_name; + if(pos != std::string::npos) + { + alias_name = token.substr(0,pos); + pos ++; + token = token.substr(pos,token.size()); + + if(self->getAction()->column_prefix != "##" && alias_name != self->getAction()->column_prefix) + { + throw base_s3select_exception(std::string("query can not contain more then a single table-alias"), base_s3select_exception::s3select_exp_en_t::FATAL); + } + + self->getAction()->column_prefix = alias_name; + } + v = S3SELECT_NEW(self, variable, token); + } + + self->getAction()->exprQ.push_back(v); +} + +void push_json_variable::builder(s3select* self, const char* a, const char* b) const +{//purpose: handle the use case of json-variable structure (_1.a.b.c) + + std::string token(a, b); + std::vector variable_key_path; + + //the following flow determine the index per json variable reside on statement. + //per each discovered json_variable, it search the json-variables-vector whether it already exists. + //in case it is exist, it uses its index (position in vector) + //in case it's not exist its pushes the variable into vector. + //the json-index is used upon updating the scratch area or searching for a specific json-variable value. + + size_t json_index=self->getAction()->json_statement_variables_match_expression.size(); + variable* v = nullptr; + json_variable_access* ja = S3SELECT_NEW(self, json_variable_access); + *ja = self->getAction()->json_var_md; + self->getAction()->json_statement_variables_match_expression.push_back(std::pair(ja,json_index)); + + v = S3SELECT_NEW(self, variable, token, variable::var_t::JSON_VARIABLE, json_index); + self->getAction()->exprQ.push_back(v); + + self->getAction()->json_var_md.clear(); +} + +void push_array_number::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + //DEBUG - TEMP std::cout << "push_array_number " << token << std::endl; + + self->getAction()->json_array_index_number.push_back(std::stoll(token.c_str())); +} + +void push_json_array_name::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + size_t found = token.find("["); + std::string array_name = token.substr(0,found); + + //DEBUG - TEMP std::cout << "push_json_array_name " << array_name << std::endl; + + //remove white-space + array_name.erase(std::remove_if(array_name.begin(), + array_name.end(), + [](unsigned char x){return std::isspace(x);}), + array_name.end()); + + std::vector json_path; + std::vector empty = {}; + json_path.push_back(array_name); + + self->getAction()->json_var_md.push_variable_state(json_path, -1);//pushing the array-name, {-1} means, search for object-name + + while(self->getAction()->json_array_index_number.size()) + { + self->getAction()->json_var_md.push_variable_state(empty, self->getAction()->json_array_index_number.front());//pushing empty and number>=0, means array-access + self->getAction()->json_array_index_number.pop_front(); + } +} + +void push_json_object::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + //DEBUG - TEMP std::cout << "push_json_object " << token << std::endl; + + self->getAction()->json_object_name = token; + std::vector json_path; + json_path.push_back(token); + + self->getAction()->json_var_md.push_variable_state(json_path, -1); +} + +void push_addsub::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + if (token == "+") + { + self->getAction()->addsubQ.push_back(addsub_operation::addsub_op_t::ADD); + } + else + { + self->getAction()->addsubQ.push_back(addsub_operation::addsub_op_t::SUB); + } +} + +void push_mulop::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + if (token == "*") + { + self->getAction()->muldivQ.push_back(mulldiv_operation::muldiv_t::MULL); + } + else if (token == "/") + { + self->getAction()->muldivQ.push_back(mulldiv_operation::muldiv_t::DIV); + } + else if(token == "^") + { + self->getAction()->muldivQ.push_back(mulldiv_operation::muldiv_t::POW); + } + else + { + self->getAction()->muldivQ.push_back(mulldiv_operation::muldiv_t::MOD); + } +} + +void push_addsub_binop::builder(s3select* self, [[maybe_unused]] const char* a,[[maybe_unused]] const char* b) const +{ + base_statement* l = nullptr, *r = nullptr; + + r = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + l = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + addsub_operation::addsub_op_t o = self->getAction()->addsubQ.back(); + self->getAction()->addsubQ.pop_back(); + addsub_operation* as = S3SELECT_NEW(self, addsub_operation, l, o, r); + self->getAction()->exprQ.push_back(as); +} + +void push_mulldiv_binop::builder(s3select* self, [[maybe_unused]] const char* a, [[maybe_unused]] const char* b) const +{ + base_statement* vl = nullptr, *vr = nullptr; + + vr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + vl = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + mulldiv_operation::muldiv_t o = self->getAction()->muldivQ.back(); + self->getAction()->muldivQ.pop_back(); + mulldiv_operation* f = S3SELECT_NEW(self, mulldiv_operation, vl, o, vr); + self->getAction()->exprQ.push_back(f); +} + +void push_function_arg::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + base_statement* be = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + base_statement* f = self->getAction()->funcQ.back(); + + if (dynamic_cast<__function*>(f)) + { + dynamic_cast<__function*>(f)->push_argument(be); + } +} + +void push_function_name::builder(s3select* self, const char* a, const char* b) const +{ + b--; + while (*b == '(' || *b == ' ') + { + b--; //point to function-name + } + + std::string fn; + fn.assign(a, b - a + 1); + + __function* func = S3SELECT_NEW(self, __function, fn.c_str(), self->getS3F()); + self->getAction()->funcQ.push_back(func); +} + +void push_function_expr::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + base_statement* func = self->getAction()->funcQ.back(); + self->getAction()->funcQ.pop_back(); + + self->getAction()->exprQ.push_back(func); +} + +void push_compare_operator::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + arithmetic_operand::cmp_t c = arithmetic_operand::cmp_t::NA; + + if (token == "=") + { + c = arithmetic_operand::cmp_t::EQ; + } + else if (token == "!=" || token == "<>") + { + c = arithmetic_operand::cmp_t::NE; + } + else if (token == ">=") + { + c = arithmetic_operand::cmp_t::GE; + } + else if (token == "<=") + { + c = arithmetic_operand::cmp_t::LE; + } + else if (token == ">") + { + c = arithmetic_operand::cmp_t::GT; + } + else if (token == "<") + { + c = arithmetic_operand::cmp_t::LT; + } + + self->getAction()->arithmetic_compareQ.push_back(c); +} + +void push_logical_operator::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + logical_operand::oplog_t l = logical_operand::oplog_t::NA; + + if (token == "and") + { + l = logical_operand::oplog_t::AND; + } + else if (token == "or") + { + l = logical_operand::oplog_t::OR; + } + + self->getAction()->logical_compareQ.push_back(l); +} + +void push_arithmetic_predicate::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + base_statement* vr, *vl; + arithmetic_operand::cmp_t c = self->getAction()->arithmetic_compareQ.back(); + self->getAction()->arithmetic_compareQ.pop_back(); + + if (!self->getAction()->exprQ.empty()) + { + vr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + } + else + { + throw base_s3select_exception(std::string("missing right operand for arithmetic-comparision expression"), base_s3select_exception::s3select_exp_en_t::FATAL); + } + + if (!self->getAction()->exprQ.empty()) + { + vl = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + } + else + { + throw base_s3select_exception(std::string("missing left operand for arithmetic-comparision expression"), base_s3select_exception::s3select_exp_en_t::FATAL); + } + + arithmetic_operand* t = S3SELECT_NEW(self, arithmetic_operand, vl, c, vr); + + self->getAction()->exprQ.push_back(t); +} + +void push_logical_predicate::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + base_statement* tl = nullptr, *tr = nullptr; + logical_operand::oplog_t oplog = self->getAction()->logical_compareQ.back(); + self->getAction()->logical_compareQ.pop_back(); + + if (self->getAction()->exprQ.empty() == false) + { + tr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + } + else + {//should reject by syntax parser + throw base_s3select_exception(std::string("missing right operand for logical expression"), base_s3select_exception::s3select_exp_en_t::FATAL); + } + + if (self->getAction()->exprQ.empty() == false) + { + tl = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + } + else + {//should reject by syntax parser + throw base_s3select_exception(std::string("missing left operand for logical expression"), base_s3select_exception::s3select_exp_en_t::FATAL); + } + + logical_operand* f = S3SELECT_NEW(self, logical_operand, tl, oplog, tr); + + self->getAction()->exprQ.push_back(f); +} + +void push_negation::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + base_statement* pred = nullptr; + + if (self->getAction()->exprQ.empty() == false) + { + pred = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + } + else + { + throw base_s3select_exception(std::string("failed to create AST for NOT operator"), base_s3select_exception::s3select_exp_en_t::FATAL); + } + + //upon NOT operator, the logical and arithmetical operators are "tagged" to negate result. + if (dynamic_cast(pred)) + { + logical_operand* f = S3SELECT_NEW(self, logical_operand, pred); + self->getAction()->exprQ.push_back(f); + } + else if (dynamic_cast<__function*>(pred) || dynamic_cast(pred) || dynamic_cast(pred)) + { + negate_function_operation* nf = S3SELECT_NEW(self, negate_function_operation, pred); + self->getAction()->exprQ.push_back(nf); + } + else if(dynamic_cast(pred)) + { + arithmetic_operand* f = S3SELECT_NEW(self, arithmetic_operand, pred); + self->getAction()->exprQ.push_back(f); + } + else + { + throw base_s3select_exception(std::string("failed to create AST for NOT operator"), base_s3select_exception::s3select_exp_en_t::FATAL); + } +} + +void push_column_pos::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + std::string alias_name; + variable* v; + + if (token == "*" || token == "* ") //TODO space should skip in boost::spirit + { + v = S3SELECT_NEW(self, variable, token, variable::var_t::STAR_OPERATION); + + } + else + { + size_t pos = token.find('.'); + if(pos != std::string::npos) + { + alias_name = token.substr(0,pos); + + pos ++; + token = token.substr(pos,token.size()); + + if(self->getAction()->column_prefix != "##" && self->getAction()->column_prefix != alias_name) + { + throw base_s3select_exception(std::string("query can not contain more then a single table-alias"), base_s3select_exception::s3select_exp_en_t::FATAL); + } + + self->getAction()->column_prefix = alias_name; + } + v = S3SELECT_NEW(self, variable, token, variable::var_t::POS); + } + + self->getAction()->exprQ.push_back(v); +} + +void push_projection::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + self->getAction()->projections.get()->push_back(self->getAction()->exprQ.back()); + self->getAction()->exprQ.pop_back(); +} + +void push_alias_projection::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + //extract alias name + const char* p = b; + while (*(--p) != ' ') + ; + std::string alias_name(p + 1, b); + base_statement* bs = self->getAction()->exprQ.back(); + + //mapping alias name to base-statement + bool res = self->getAction()->alias_map.insert_new_entry(alias_name, bs); + if (res == false) + { + throw base_s3select_exception(std::string("alias <") + alias_name + std::string("> is already been used in query"), base_s3select_exception::s3select_exp_en_t::FATAL); + } + + self->getAction()->projections.get()->push_back(bs); + self->getAction()->exprQ.pop_back(); +} + +void push_between_filter::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + std::string between_function("#between#"); + + __function* func = S3SELECT_NEW(self, __function, between_function.c_str(), self->getS3F()); + + base_statement* second_expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + func->push_argument(second_expr); + + base_statement* first_expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + func->push_argument(first_expr); + + base_statement* main_expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + func->push_argument(main_expr); + + self->getAction()->exprQ.push_back(func); +} + +void push_not_between_filter::builder(s3select* self, const char* a, const char* b) const +{ + + static constexpr const std::string_view not_between_function("#not_between#"); + + __function* func = S3SELECT_NEW(self, __function, not_between_function.data(), self->getS3F()); + + base_statement* second_expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + func->push_argument(second_expr); + + base_statement* first_expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + func->push_argument(first_expr); + + base_statement* main_expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + func->push_argument(main_expr); + + self->getAction()->exprQ.push_back(func); +} + +void push_in_predicate_first_arg::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + if(self->getAction()->exprQ.empty()) + { + throw base_s3select_exception("failed to create AST for in predicate", base_s3select_exception::s3select_exp_en_t::FATAL); + } + + self->getAction()->inPredicateQ.push_back( self->getAction()->exprQ.back() ); + self->getAction()->exprQ.pop_back(); + + if(self->getAction()->exprQ.empty()) + { + throw base_s3select_exception("failed to create AST for in predicate", base_s3select_exception::s3select_exp_en_t::FATAL); + } + + self->getAction()->inMainArg = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + +} + +void push_in_predicate_arguments::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + if(self->getAction()->exprQ.empty()) + { + throw base_s3select_exception("failed to create AST for in predicate", base_s3select_exception::s3select_exp_en_t::FATAL); + } + + self->getAction()->inPredicateQ.push_back( self->getAction()->exprQ.back() ); + + self->getAction()->exprQ.pop_back(); + +} + +void push_in_predicate::builder(s3select* self, const char* a, const char* b) const +{ + // expr in (e1,e2,e3 ...) + std::string token(a, b); + + std::string in_function("#in_predicate#"); + + __function* func = S3SELECT_NEW(self, __function, in_function.c_str(), self->getS3F()); + + while(!self->getAction()->inPredicateQ.empty()) + { + base_statement* ei = self->getAction()->inPredicateQ.back(); + + self->getAction()->inPredicateQ.pop_back(); + + func->push_argument(ei); + + } + + func->push_argument( self->getAction()->inMainArg ); + + self->getAction()->exprQ.push_back(func); + + self->getAction()->inPredicateQ.clear(); + + self->getAction()->inMainArg = 0; +} + +void push_like_predicate_no_escape::builder(s3select* self, const char* a, const char* b) const +{ + + std::string token(a, b); + std::string in_function("#like_predicate#"); + + __function* func = S3SELECT_NEW(self, __function, in_function.c_str(), self->getS3F()); + + variable* v = S3SELECT_NEW(self, variable, "\\",variable::var_t::COLUMN_VALUE); + func->push_argument(v); + + // experimenting valgrind-issue happens only on teuthology + //self->getS3F()->push_for_cleanup(v); + + base_statement* like_expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + func->push_argument(like_expr); + + base_statement* expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + func->push_argument(expr); + + self->getAction()->exprQ.push_back(func); +} + +void push_like_predicate_escape::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + std::string in_function("#like_predicate#"); + + __function* func = S3SELECT_NEW(self, __function, in_function.c_str(), self->getS3F()); + + base_statement* expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + func->push_argument(expr); + + base_statement* main_expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + func->push_argument(main_expr); + + base_statement* escape_expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + func->push_argument(escape_expr); + + self->getAction()->exprQ.push_back(func); +} + +void push_is_null_predicate::builder(s3select* self, const char* a, const char* b) const +{ + //expression is null, is not null + std::string token(a, b); + bool is_null = true; + + for(size_t i=0;i info = bsc::parse(token.c_str()+i, (bsc::str_p("is") >> bsc::str_p("not") >> bsc::str_p("null")) , bsc::space_p); + if (info.full) + is_null = false; + } + + std::string in_function("#is_null#"); + + if (is_null == false) + { + in_function = "#is_not_null#"; + } + + __function* func = S3SELECT_NEW(self, __function, in_function.c_str(), self->getS3F()); + + if (!self->getAction()->exprQ.empty()) + { + base_statement* expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + func->push_argument(expr); + } + + self->getAction()->exprQ.push_back(func); +} + +void push_when_condition_then::builder(s3select* self, const char* a, const char* b) const +{ +//purpose: each new function node, provide execution for (if {condition} then {expresion} ) + std::string token(a, b); + + // _fn_when_then + __function* func = S3SELECT_NEW(self, __function, "#when-then#", self->getS3F()); + + base_statement* then_expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + base_statement* when_expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + func->push_argument(then_expr); + func->push_argument(when_expr); + + self->getAction()->exprQ.push_back(func); + + // the first_when_then_expr mark the first when-then expression, it is been used later upon complete the full statement (case when ... then ... else ... end) + if(self->getAction()->first_when_then_expr == nullptr) + { + self->getAction()->first_when_then_expr = func; + } +} + +void push_case_when_else::builder(s3select* self, const char* a, const char* b) const +{ +//purpose: provide the execution for complete statement, i.e. (case when {expression} then {expression} else {expression} end) + std::string token(a, b); + + base_statement* else_expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + // _fn_case_when_else + __function* func = S3SELECT_NEW(self, __function, "#case-when-else#", self->getS3F()); + + func->push_argument(else_expr); + + base_statement* when_then_func = nullptr; + + // the loop ended upon reaching the first when-then + while(when_then_func != self->getAction()->first_when_then_expr) + { + // poping from whenThen-queue and pushing to function arguments list + when_then_func = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + func->push_argument(when_then_func); + } + + self->getAction()->first_when_then_expr = nullptr; + //func is the complete statement, implemented by _fn_case_when_else + self->getAction()->exprQ.push_back(func); +} + +void push_case_value_when_value_else::builder(s3select* self, const char* a, const char* b) const +{ +//purpose: provide execution for the complete statement. i.e. case-value-when-value-else-value-end + std::string token(a, b); + + base_statement* else_expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + // _fn_case_when_else + __function* func = S3SELECT_NEW(self, __function, "#case-when-else#", self->getS3F()); + + // push the else expression + func->push_argument(else_expr); + + // poping the case-value + base_statement* case_value = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + base_statement* when_then_func = nullptr; + + //poping all when-value-then expression(_fn_when_value_then) and add the case-value per each + while(self->getAction()->whenThenQ.empty() == false) + { + when_then_func = self->getAction()->whenThenQ.back(); + if (dynamic_cast<__function*>(when_then_func)) + { + // adding the case-value as argument + dynamic_cast<__function*>(when_then_func)->push_argument(case_value); + } + else + throw base_s3select_exception("failed to create AST for case-value-when construct", base_s3select_exception::s3select_exp_en_t::FATAL); + + self->getAction()->whenThenQ.pop_back(); + + func->push_argument(when_then_func); + } + //pushing the execution function for the complete statement + self->getAction()->exprQ.push_back(func); +} + +void push_when_value_then::builder(s3select* self, const char* a, const char* b) const +{ + //provide execution of when-value-then-value :: _fn_when_value_then + std::string token(a, b); + + __function* func = S3SELECT_NEW(self, __function, "#when-value-then#", self->getS3F()); + + base_statement* then_expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + base_statement* when_expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + func->push_argument(then_expr); + func->push_argument(when_expr); + //each when-value-then-value pushed to dedicated queue + self->getAction()->whenThenQ.push_back(func); +} + +void push_decimal_operator::builder(s3select* self, const char* a, const char* b) const +{//decimal(integer,integer) + std::string token(a, b); + + base_statement* lhs = nullptr; + base_statement* rhs = nullptr; + + //right side (decimal operator) + if (self->getAction()->exprQ.empty() == false) + { + rhs = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + } + + //left side (decimal operator) + if (self->getAction()->exprQ.empty() == false) + { + lhs = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + } + + __function* func = S3SELECT_NEW(self, __function, "#decimal_operator#", self->getS3F()); + + func->push_argument(rhs); + func->push_argument(lhs); + + self->getAction()->exprQ.push_back(func); +} + +void push_cast_decimal_expr::builder(s3select* self, const char* a, const char* b) const +{ + //cast(expression as decimal(x,y)) + std::string token(a, b); + + base_statement* lhs = nullptr; + base_statement* rhs = nullptr; + + //right side (decimal operator) + if (self->getAction()->exprQ.empty() == false) + { + rhs = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + } + + //left side - expression + if (self->getAction()->exprQ.empty() == false) + { + lhs = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + } + + __function* func = S3SELECT_NEW(self, __function, "#cast_as_decimal#", self->getS3F()); + + func->push_argument(rhs); + func->push_argument(lhs); + + self->getAction()->exprQ.push_back(func); +} + +void push_cast_expr::builder(s3select* self, const char* a, const char* b) const +{ + //cast(expression as int/float/string/timestamp) --> new function "int/float/string/timestamp" ( args = expression ) + std::string token(a, b); + + std::string cast_function; + + cast_function = self->getAction()->dataTypeQ.back(); + self->getAction()->dataTypeQ.pop_back(); + + __function* func = S3SELECT_NEW(self, __function, cast_function.c_str(), self->getS3F()); + + base_statement* expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + func->push_argument(expr); + + self->getAction()->exprQ.push_back(func); +} + +void push_data_type::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + auto cast_operator = [&](const char *s){return strncasecmp(a,s,strlen(s))==0;}; + + if(cast_operator("int")) + { + self->getAction()->dataTypeQ.push_back("int"); + }else if(cast_operator("float")) + { + self->getAction()->dataTypeQ.push_back("float"); + }else if(cast_operator("string")) + { + self->getAction()->dataTypeQ.push_back("string"); + }else if(cast_operator("timestamp")) + { + self->getAction()->dataTypeQ.push_back("to_timestamp"); + }else if(cast_operator("bool")) + { + self->getAction()->dataTypeQ.push_back("to_bool"); + } +} + +void push_trim_whitespace_both::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + __function* func = S3SELECT_NEW(self, __function, "#trim#", self->getS3F()); + + base_statement* expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + func->push_argument(expr); + + self->getAction()->exprQ.push_back(func); +} + +void push_trim_expr_one_side_whitespace::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + std::string trim_function; + + trim_function = self->getAction()->trimTypeQ.back(); + self->getAction()->trimTypeQ.pop_back(); + + __function* func = S3SELECT_NEW(self, __function, trim_function.c_str(), self->getS3F()); + + base_statement* inp_expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + func->push_argument(inp_expr); + + self->getAction()->exprQ.push_back(func); +} + +void push_trim_expr_anychar_anyside::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + std::string trim_function; + + trim_function = self->getAction()->trimTypeQ.back(); + self->getAction()->trimTypeQ.pop_back(); + + __function* func = S3SELECT_NEW(self, __function, trim_function.c_str(), self->getS3F()); + + base_statement* expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + func->push_argument(expr); + + base_statement* inp_expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + func->push_argument(inp_expr); + + self->getAction()->exprQ.push_back(func); +} + +void push_trim_type::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + auto trim_option = [&](const char *s){return strncmp(a,s,strlen(s))==0;}; + + if(trim_option("leading")) + { + self->getAction()->trimTypeQ.push_back("#leading#"); + }else if(trim_option("trailing")) + { + self->getAction()->trimTypeQ.push_back("#trailing#"); + }else + { + self->getAction()->trimTypeQ.push_back("#trim#"); + } +} + +void push_substr_from::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + __function* func = S3SELECT_NEW(self, __function, "substring", self->getS3F()); + + base_statement* expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + base_statement* start_position = self->getAction()->exprQ.back(); + + self->getAction()->exprQ.pop_back(); + func->push_argument(start_position); + func->push_argument(expr); + + self->getAction()->exprQ.push_back(func); +} + +void push_substr_from_for::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + __function* func = S3SELECT_NEW(self, __function, "substring", self->getS3F()); + + base_statement* expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + base_statement* start_position = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + base_statement* end_position = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + func->push_argument(end_position); + func->push_argument(start_position); + func->push_argument(expr); + + self->getAction()->exprQ.push_back(func); +} + +void push_datediff::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + std::string date_op; + + date_op = self->getAction()->datePartQ.back(); + self->getAction()->datePartQ.pop_back(); + + std::string date_function = "#datediff_" + date_op + "#"; + + __function* func = S3SELECT_NEW(self, __function, date_function.c_str(), self->getS3F()); + + base_statement* expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + base_statement* start_position = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + func->push_argument(start_position); + func->push_argument(expr); + + self->getAction()->exprQ.push_back(func); +} + +void push_dateadd::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + std::string date_op; + + date_op = self->getAction()->datePartQ.back(); + self->getAction()->datePartQ.pop_back(); + + std::string date_function = "#dateadd_" + date_op + "#"; + + __function* func = S3SELECT_NEW(self, __function, date_function.c_str(), self->getS3F()); + + base_statement* expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + base_statement* start_position = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + func->push_argument(start_position); + func->push_argument(expr); + + self->getAction()->exprQ.push_back(func); +} + +void push_extract::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + std::string date_op; + + date_op = self->getAction()->datePartQ.back(); + self->getAction()->datePartQ.pop_back(); + + std::string date_function = "#extract_" + date_op + "#"; + + __function* func = S3SELECT_NEW(self, __function, date_function.c_str(), self->getS3F()); + + base_statement* expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + func->push_argument(expr); + + self->getAction()->exprQ.push_back(func); +} + +void push_date_part::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + self->getAction()->datePartQ.push_back(token); +} + +void push_time_to_string_constant::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + __function* func = S3SELECT_NEW(self, __function, "#to_string_constant#", self->getS3F()); + + base_statement* expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + base_statement* frmt = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + func->push_argument(frmt); + func->push_argument(expr); + + self->getAction()->exprQ.push_back(func); + +} + +void push_time_to_string_dynamic::builder(s3select* self, const char* a, const char* b) const +{ + std::string token(a, b); + + __function* func = S3SELECT_NEW(self, __function, "#to_string_dynamic#", self->getS3F()); + + base_statement* expr = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + base_statement* frmt = self->getAction()->exprQ.back(); + self->getAction()->exprQ.pop_back(); + + func->push_argument(frmt); + func->push_argument(expr); + + self->getAction()->exprQ.push_back(func); +} + +void push_string_to_time_constant::builder(s3select* self, const char* a, const char* b) const +{ + //token could be a string or a timestamp, we need to check it + //upon it is a timestamp format, we need to push the variable as timestamp or else, it as a string + //the purpose is to use backticks to convert the string to timestamp in parsing time instead of processing time(Trino uses this approach) + + a++; //remove the first quote + b--; + std::string token(a, b); + + _fn_to_timestamp* to_timestamp = S3SELECT_NEW(self, _fn_to_timestamp);//TODO the _fn_to_timestamp should release the memory (cleanup) + bs_stmt_vec_t args; + + variable* var_string = S3SELECT_NEW(self, variable, token, variable::var_t::COLUMN_VALUE); + variable* timestamp = S3SELECT_NEW(self, variable, token, variable::var_t::COLUMN_VALUE); + + (self->get_to_timestamp_for_clean()) = to_timestamp; + var_string->push_for_cleanup(self->get_ast_nodes_to_delete()); + timestamp->push_for_cleanup(self->get_ast_nodes_to_delete()); + + args.push_back(var_string); + + try { + (*to_timestamp)(&args, timestamp); + } + catch(std::exception& e) + { + //it is not a timestamp, it is a string + self->getAction()->exprQ.push_back(var_string); + return; + } + + self->getAction()->exprQ.push_back(timestamp); +} + +struct s3select_csv_definitions //TODO +{ + char row_delimiter; + char column_delimiter; + char output_row_delimiter; + char output_column_delimiter; + char escape_char; + char output_escape_char; + char output_quot_char; + char quot_char; + bool use_header_info; + bool ignore_header_info;//skip first line + bool quote_fields_always; + bool quote_fields_asneeded; + bool redundant_column; + bool comment_empty_lines; + std::vector comment_chars; + std::vector trim_chars; + + s3select_csv_definitions():row_delimiter('\n'), column_delimiter(','), output_row_delimiter('\n'), output_column_delimiter(','), escape_char('\\'), output_escape_char('\\'), output_quot_char('"'), quot_char('"'), use_header_info(false), ignore_header_info(false), quote_fields_always(false), quote_fields_asneeded(false), redundant_column(false), comment_empty_lines(false) {} + +}; + + +/////// handling different object types +class base_s3object +{ + +protected: + scratch_area* m_sa; + std::string m_obj_name; + bool m_aggr_flow = false; //TODO once per query + bool m_is_to_aggregate; + std::vector m_projections; + base_statement* m_where_clause; + s3select* m_s3_select; + size_t m_error_count; + bool m_is_limit_on; + unsigned long m_limit; + unsigned long m_processed_rows; + size_t m_returned_bytes_size; + std::function fp_ext_debug_mesg;//dispache debug message into external system + +public: + s3select_csv_definitions m_csv_defintion;//TODO add method for modify + + enum class Status { + END_OF_STREAM, + INITIAL_STAT, + NORMAL_EXIT, + LIMIT_REACHED, + SQL_ERROR + }; + + Status m_sql_processing_status; + + Status get_sql_processing_status() + { + return m_sql_processing_status; + } + + bool is_sql_limit_reached() + { + return m_sql_processing_status == Status::LIMIT_REACHED; + } + + void set_base_defintions(s3select* m) + { + if(m_s3_select || !m) + {//not to define twice + //not to define with null + return; + } + + m_s3_select=m; + m_sa=m_s3_select->get_scratch_area(); + m_error_count=0; + m_projections = m_s3_select->get_projections_list(); + m_where_clause = m_s3_select->get_filter(); + + if (m_where_clause) + { + m_where_clause->traverse_and_apply(m_sa, m_s3_select->get_aliases(), m_s3_select->is_json_query()); + } + + for (auto& p : m_projections) + { + p->traverse_and_apply(m_sa, m_s3_select->get_aliases(), m_s3_select->is_json_query()); + } + m_is_to_aggregate = true;//TODO not correct. should be set upon end-of-stream + m_aggr_flow = m_s3_select->is_aggregate_query(); + + m_is_limit_on = m_s3_select->is_limit(); + if(m_is_limit_on) + { + m_limit = m_s3_select->get_limit(); + } + + m_processed_rows = 0; + } + + base_s3object():m_sa(nullptr),m_is_to_aggregate(false),m_where_clause(nullptr),m_s3_select(nullptr),m_error_count(0),m_returned_bytes_size(0),m_sql_processing_status(Status::INITIAL_STAT){} + + explicit base_s3object(s3select* m):base_s3object() + { + if(m) + { + set_base_defintions(m); + } + } + + virtual bool is_end_of_stream() {return false;} + virtual void row_fetch_data() {} + virtual void row_update_data() {} + virtual void columnar_fetch_where_clause_columns(){} + virtual void columnar_fetch_projection(){} + // for the case were the rows are not fetched, but "pushed" by the data-source parser (JSON) + virtual bool multiple_row_processing(){return true;} + + void set_external_debug_system(std::function fp_external) + { + fp_ext_debug_mesg = fp_external; + } + + size_t get_return_result_size() + { + return m_returned_bytes_size; + } + + void result_values_to_string(multi_values& projections_resuls, std::string& result) + { + size_t i = 0; + std::string output_delimiter(1,m_csv_defintion.output_column_delimiter); + std::string output_row_delimiter(1,m_csv_defintion.output_row_delimiter); + + for(auto& res : projections_resuls.values) + { + if(fp_ext_debug_mesg) + fp_ext_debug_mesg( res->to_string() ); + + if (m_csv_defintion.quote_fields_always) { + std::ostringstream quoted_result; + quoted_result << std::quoted(res->to_string(),m_csv_defintion.output_quot_char, m_csv_defintion.escape_char); + result.append(quoted_result.str()); + m_returned_bytes_size += quoted_result.str().size(); + }//TODO to add asneeded + else + { + result.append(res->to_string()); + m_returned_bytes_size += strlen(res->to_string()); + } + + if(!m_csv_defintion.redundant_column) { + if(++i < projections_resuls.values.size()) { + result.append(output_delimiter); + m_returned_bytes_size += output_delimiter.size(); + } + } + else { + result.append(output_delimiter); + m_returned_bytes_size += output_delimiter.size(); + } + } + if(!m_aggr_flow){ + result.append(output_row_delimiter); + m_returned_bytes_size += output_delimiter.size(); + } + } + + Status getMatchRow( std::string& result) + { + multi_values projections_resuls; + + if (m_is_limit_on && m_processed_rows == m_limit) + { + return m_sql_processing_status = Status::LIMIT_REACHED; + } + + if (m_aggr_flow == true) + { + do + { + row_fetch_data(); + columnar_fetch_where_clause_columns(); + if (is_end_of_stream()) + { + if (m_is_to_aggregate) + for (auto& i : m_projections) + { + i->set_last_call(); + i->set_skip_non_aggregate(false);//projection column is set to be runnable + + projections_resuls.push_value( &(i->eval()) ); + } + + result_values_to_string(projections_resuls,result); + return m_sql_processing_status = Status::END_OF_STREAM; + } + + m_processed_rows++; + if ((*m_projections.begin())->is_set_last_call()) + { + //should validate while query execution , no update upon nodes are marked with set_last_call + throw base_s3select_exception("on aggregation query , can not stream row data post do-aggregate call", base_s3select_exception::s3select_exp_en_t::FATAL); + } + + for (auto& a : *m_s3_select->get_aliases()->get()) + { + a.second->invalidate_cache_result(); + } + + row_update_data(); + if (!m_where_clause || m_where_clause->eval().is_true()) + { + columnar_fetch_projection(); + for (auto i : m_projections) + { + i->eval(); + } + } + + if(m_is_limit_on && m_processed_rows == m_limit) + { + for (auto& i : m_projections) + { + i->set_last_call(); + i->set_skip_non_aggregate(false);//projection column is set to be runnable + projections_resuls.push_value( &(i->eval()) ); + } + result_values_to_string(projections_resuls,result); + return m_sql_processing_status = Status::LIMIT_REACHED; + } + } + while (multiple_row_processing()); + } + else + { + //save the where-clause evaluation result (performance perspective) + bool where_clause_result = false; + do + { + row_fetch_data(); + columnar_fetch_where_clause_columns(); + if(is_end_of_stream()) + { + return m_sql_processing_status = Status::END_OF_STREAM; + } + + m_processed_rows++; + row_update_data(); + for (auto& a : *m_s3_select->get_aliases()->get()) + { + a.second->invalidate_cache_result(); + } + } + while (multiple_row_processing() && m_where_clause && !(where_clause_result = m_where_clause->eval().is_true()) && !(m_is_limit_on && m_processed_rows == m_limit)); + + // in the of JSON it needs to evaluate the where-clause(for the first time) + if(!multiple_row_processing() && m_where_clause){ + where_clause_result = m_where_clause->eval().is_true(); + } + + if(m_where_clause && ! where_clause_result && m_is_limit_on && m_processed_rows == m_limit) + { + return m_sql_processing_status = Status::LIMIT_REACHED; + } + + bool found = multiple_row_processing(); + + if(!multiple_row_processing()) + { + found = !m_where_clause || where_clause_result; + } + + if(found) + { + columnar_fetch_projection(); + projections_resuls.clear(); + for (auto& i : m_projections) + { + projections_resuls.push_value( &(i->eval()) ); + } + result_values_to_string(projections_resuls,result); + } + + } + return is_end_of_stream() ? (m_sql_processing_status = Status::END_OF_STREAM) : (m_sql_processing_status = Status::NORMAL_EXIT); + + }//getMatchRow + + virtual ~base_s3object() = default; + +}; //base_s3object + +//TODO config / default-value +#define CSV_INPUT_TYPE_RESPONSE_SIZE_LIMIT (64 * 1024) +class csv_object : public base_s3object +{ + +public: + + class csv_defintions : public s3select_csv_definitions + {}; + + explicit csv_object(s3select* s3_query) : + base_s3object(s3_query), + m_skip_last_line(false), + m_extract_csv_header_info(false), + m_previous_line(false), + m_skip_first_line(false), + m_processed_bytes(0) {} + + csv_object(s3select* s3_query, csv_defintions csv) : + base_s3object(s3_query), + m_skip_last_line(false), + m_extract_csv_header_info(false), + m_previous_line(false), + m_skip_first_line(false), + m_processed_bytes(0) + { + m_csv_defintion = csv; + } + + csv_object(): + base_s3object(nullptr), + m_skip_last_line(false), + m_extract_csv_header_info(false), + m_previous_line(false), + m_skip_first_line(false), + m_processed_bytes(0) {} + + void set_csv_query(s3select* s3_query,csv_defintions csv) + { + if(m_s3_select != nullptr) + { + //return; + } + + set_base_defintions(s3_query); + m_csv_defintion = csv; + } + +private: + bool m_skip_last_line; + std::string m_error_description; + char* m_stream; + char* m_end_stream; + std::vector m_row_tokens; + CSVParser* csv_parser; + bool m_extract_csv_header_info; + std::vector m_csv_schema{128}; + + //handling arbitrary chunks (rows cut in the middle) + bool m_previous_line; + bool m_skip_first_line; + std::string merge_line; + std::string m_last_line; + size_t m_processed_bytes; + int64_t m_number_of_tokens; + size_t m_skip_x_first_bytes=0; + + std::function fp_s3select_result_format=nullptr; + std::function fp_s3select_header_format=nullptr; +public: + void set_result_formatters( std::function& result_format, + std::function& header_format) + { + fp_s3select_result_format = result_format; + fp_s3select_header_format = header_format; + } +private: + int getNextRow() + { + size_t num_of_tokens=0; + m_row_tokens.clear(); + + if (csv_parser->read_row(m_row_tokens)) + { + num_of_tokens = m_row_tokens.size(); + } + else + { + return -1; + } + + return num_of_tokens; + } + +public: + + std::string get_error_description() + { + return m_error_description; + } + + virtual ~csv_object() = default; + +public: + virtual bool is_end_of_stream() + { + return m_number_of_tokens < 0; + } + + virtual void row_fetch_data() + { + m_number_of_tokens = getNextRow(); + } + + virtual void row_update_data() + { + m_sa->update(m_row_tokens, m_number_of_tokens); + } + + + int extract_csv_header_info() + { + + if (m_csv_defintion.ignore_header_info == true) + { + csv_parser->next_line(); + } + else if(m_csv_defintion.use_header_info == true) + { + size_t num_of_tokens = getNextRow();//TODO validate number of tokens + + for(size_t i=0; iload_schema(m_csv_schema); + } + + m_extract_csv_header_info = true; + + return 0; + } + + + int run_s3select_on_stream(std::string& result, const char* csv_stream, size_t stream_length, size_t obj_size) + { + int status=0; + try{ + status = run_s3select_on_stream_internal(result,csv_stream,stream_length,obj_size); + } + catch(base_s3select_exception& e) + { + m_error_description = e.what(); + m_error_count ++; + if (e.severity() == base_s3select_exception::s3select_exp_en_t::FATAL || m_error_count>100)//abort query execution + { + return -1; + } + } + catch(chunkalloc_out_of_mem) + { + m_error_description = "out of memory"; + return -1; + } + catch(io::error::escaped_char_missing& err) + { + m_error_description = "escaped_char_missing failure while csv parsing"; + return -1; + } + catch(io::error::escaped_string_not_closed& err) + { + m_error_description = "escaped_string_not_closed failure while csv parsing"; + return -1; + } + catch(io::error::line_length_limit_exceeded& err) + { + m_error_description = "line_length_limit_exceeded failure while csv parsing"; + return -1; + } + catch(io::error::with_file_name& err) + { + m_error_description = "with_file_name failure while csv parsing"; + return -1; + } + catch(io::error::with_file_line& err) + { + m_error_description = "with_file_line failure while csv parsing"; + return -1; + } + + return status; + } + +private: + int run_s3select_on_stream_internal(std::string& result, const char* csv_stream, size_t stream_length, size_t obj_size) + { + //purpose: the CSV data is "streaming", it may "cut" rows in the middle, in that case the "broken-line" is stores + //for later, upon next chunk of data is streaming, the stored-line is merge with current broken-line, and processed. + std::string tmp_buff; + + m_processed_bytes += stream_length; + + m_skip_first_line = false; + + if (m_previous_line) + { + //if previous broken line exist , merge it to current chunk + char* p_obj_chunk = (char*)csv_stream; + while (*p_obj_chunk != m_csv_defintion.row_delimiter && p_obj_chunk<(csv_stream+stream_length)) + { + p_obj_chunk++; + } + + tmp_buff.assign((char*)csv_stream, (char*)csv_stream + (p_obj_chunk - csv_stream)); + merge_line = m_last_line + tmp_buff + m_csv_defintion.row_delimiter; + m_previous_line = false; + m_skip_first_line = true; + m_skip_x_first_bytes = tmp_buff.size()+1; + + //processing the merged row (previous broken row) + run_s3select_on_object(result, merge_line.c_str(), merge_line.length(), false, false, false); + } + + if (stream_length && csv_stream[stream_length - 1] != m_csv_defintion.row_delimiter) + { + //in case of "broken" last line + char* p_obj_chunk = (char*)&(csv_stream[stream_length - 1]); + while (*p_obj_chunk != m_csv_defintion.row_delimiter && p_obj_chunk>csv_stream) + { + p_obj_chunk--; //scan until end-of previous line in chunk + } + + u_int32_t skip_last_bytes = (&(csv_stream[stream_length - 1]) - p_obj_chunk); + m_last_line.assign(p_obj_chunk + 1, p_obj_chunk + 1 + skip_last_bytes); //save it for next chunk + + m_previous_line = true;//it means to skip last line + + //cut out the broken line + stream_length -= (m_last_line.length()); + } + + return run_s3select_on_object(result, csv_stream, stream_length, m_skip_first_line, m_previous_line, (m_processed_bytes >= obj_size)); + } + +public: + int run_s3select_on_object(std::string& result, const char* csv_stream, size_t stream_length, bool skip_first_line, bool skip_last_line, bool do_aggregate) + { + m_stream = (char*)csv_stream; + m_end_stream = (char*)csv_stream + stream_length; + m_is_to_aggregate = do_aggregate; + m_skip_last_line = skip_last_line; + + if(skip_first_line) + { + //the stream may start in the middle of a row (maybe in the middle of a quote). + //at this point the stream should skip the first row(broken row). + //the csv_parser should be init with the fixed stream position. + m_stream += m_skip_x_first_bytes; + m_skip_x_first_bytes=0; + } + + CSVParser _csv_parser("csv", m_stream, m_end_stream); + csv_parser = &_csv_parser; + csv_parser->set_csv_def( m_csv_defintion.row_delimiter, + m_csv_defintion.column_delimiter, + m_csv_defintion.quot_char, + m_csv_defintion.escape_char, + m_csv_defintion.comment_empty_lines, + m_csv_defintion.comment_chars, + m_csv_defintion.trim_chars); + + + if(m_extract_csv_header_info == false) + { + extract_csv_header_info(); + } + do + { + m_sql_processing_status = Status::INITIAL_STAT; + try + { + getMatchRow(result); + } + catch (base_s3select_exception& e) + { + m_error_description = e.what(); + m_error_count ++; + if (e.severity() == base_s3select_exception::s3select_exp_en_t::FATAL || m_error_count>100 || (m_stream>=m_end_stream))//abort query execution + { + return -1; + } + } + + if(fp_s3select_result_format && fp_s3select_header_format) + { + if (result.size() > CSV_INPUT_TYPE_RESPONSE_SIZE_LIMIT) + {//there are systems that might resject the response due to its size. + fp_s3select_result_format(result); + fp_s3select_header_format(result); + } + } + + if (m_sql_processing_status == Status::END_OF_STREAM) + { + break; + } + else if (m_sql_processing_status == Status::LIMIT_REACHED) // limit reached + { + break;//user should request for sql_processing_status + } + + } while (true); + + if(fp_s3select_result_format && fp_s3select_header_format) + { //note: it may produce empty response(more the once) + //upon empty result, it should return *only* upon last call. + fp_s3select_result_format(result); + fp_s3select_header_format(result); + } + + return 0; + } +}; + +#ifdef _ARROW_EXIST +class parquet_object : public base_s3object +{ + +private: + std::string m_error_description; + parquet_file_parser* object_reader; + parquet_file_parser::column_pos_t m_where_clause_columns; + parquet_file_parser::column_pos_t m_projections_columns; + std::vector m_predicate_values; + std::vector m_projections_values; + bool not_to_increase_first_time; + +public: + + parquet_object(std::string parquet_file_name, s3select *s3_query,s3selectEngine::rgw_s3select_api* rgw) : base_s3object(s3_query),object_reader(nullptr) + { + try{ + + object_reader = new parquet_file_parser(parquet_file_name,rgw); //TODO uniq ptr + } catch(std::exception &e) + { + throw base_s3select_exception(std::string("failure while processing parquet meta-data ") + std::string(e.what()) ,base_s3select_exception::s3select_exp_en_t::FATAL); + } + + parquet_query_setting(nullptr); + } + + parquet_object() : base_s3object(nullptr),object_reader(nullptr) + {} + + void parquet_query_setting(s3select *s3_query) + { + if(s3_query) + { + set_base_defintions(s3_query); + } + load_meta_data_into_scratch_area(); + for(auto x : m_s3_select->get_projections_list()) + {//traverse the AST and extract all columns reside in projection statement. + x->extract_columns(m_projections_columns,object_reader->get_num_of_columns()); + } + //traverse the AST and extract all columns reside in where clause. + if(m_s3_select->get_filter()) + m_s3_select->get_filter()->extract_columns(m_where_clause_columns,object_reader->get_num_of_columns()); + + not_to_increase_first_time = true; + } + + ~parquet_object() + { + if(object_reader != nullptr) + { + delete object_reader; + } + + } + + std::string get_error_description() + { + return m_error_description; + } + + bool is_set() + { + return m_s3_select != nullptr; + } + + void set_parquet_object(std::string parquet_file_name, s3select *s3_query,s3selectEngine::rgw_s3select_api* rgw) //TODO duplicate code + { + try{ + + object_reader = new parquet_file_parser(parquet_file_name,rgw); //TODO uniq ptr + } catch(std::exception &e) + { + throw base_s3select_exception(std::string("failure while processing parquet meta-data ") + std::string(e.what()) ,base_s3select_exception::s3select_exp_en_t::FATAL); + } + + parquet_query_setting(s3_query); + } + + + int run_s3select_on_object(std::string &result, + std::function fp_s3select_result_format, + std::function fp_s3select_header_format) + { + m_sql_processing_status = Status::INITIAL_STAT; + do + { + try + { + getMatchRow(result); + } + catch (base_s3select_exception &e) + { + m_error_description = e.what(); + m_error_count++; + if (e.severity() == base_s3select_exception::s3select_exp_en_t::FATAL || m_error_count > 100) //abort query execution + { + return -1; + } + } + catch (std::exception &e) + { + m_error_description = e.what(); + m_error_count++; + if (m_error_count > 100) //abort query execution + { + return -1; + } + } + +#define S3SELECT_RESPONSE_SIZE_LIMIT (4 * 1024 * 1024) + if (result.size() > S3SELECT_RESPONSE_SIZE_LIMIT) + {//AWS-cli limits response size the following callbacks send response upon some threshold + fp_s3select_result_format(result); + + if (!is_end_of_stream() && (get_sql_processing_status() != Status::LIMIT_REACHED)) + { + fp_s3select_header_format(result); + } + } + else + { + if (is_end_of_stream() || (get_sql_processing_status() == Status::LIMIT_REACHED)) + { + fp_s3select_result_format(result); + } + } + + //TODO is_end_of_stream() required? + if (get_sql_processing_status() == Status::END_OF_STREAM || is_end_of_stream() || get_sql_processing_status() == Status::LIMIT_REACHED) + { + break; + } + + } while (1); + + return 0; + } + + void load_meta_data_into_scratch_area() + { + int i=0; + for(auto x : object_reader->get_schema()) + { + m_s3_select->get_scratch_area()->set_column_pos(x.first.c_str(),i++); + } + } + + virtual bool is_end_of_stream() + { + return object_reader->end_of_stream(); + } + + virtual void columnar_fetch_where_clause_columns() + { + if(!not_to_increase_first_time)//for rownum=0 + object_reader->increase_rownum(); + else + not_to_increase_first_time = false; + + auto status = object_reader->get_column_values_by_positions(m_where_clause_columns, m_predicate_values); + if(status<0)//TODO exception? + return; + m_sa->update(m_predicate_values, m_where_clause_columns); + } + + virtual void columnar_fetch_projection() + { + auto status = object_reader->get_column_values_by_positions(m_projections_columns, m_projections_values); + if(status<0)//TODO exception? + return; + m_sa->update(m_projections_values, m_projections_columns); + } + +}; +#endif //_ARROW_EXIST + +class json_object : public base_s3object +{ +private: + + JsonParserHandler JsonHandler; + size_t m_processed_bytes; + bool m_end_of_stream; + std::string* m_s3select_result = nullptr; + size_t m_row_count; + bool star_operation_ind; + std::string m_error_description; + bool m_init_json_processor_ind; + +public: + + void init_json_processor(s3select* query) + { + if(m_init_json_processor_ind) + return; + + m_init_json_processor_ind = true; + std::function f_sql = [this](void){auto res = sql_execution_on_row_cb();return res;}; + std::function + f_push_to_scratch = [this](s3selectEngine::value& value,int json_var_idx){return push_into_scratch_area_cb(value,json_var_idx);}; + std::function + f_push_key_value_into_scratch_area_per_star_operation = [this](s3selectEngine::scratch_area::json_key_value_t& key_value) + {return push_key_value_into_scratch_area_per_star_operation(key_value);}; + + //setting the container for all json-variables, to be extracted by the json reader + JsonHandler.set_statement_json_variables(query->get_json_variables_access()); + + + //calling to getMatchRow. processing a single row per each call. + JsonHandler.set_s3select_processing_callback(f_sql); + //upon excat match between input-json-key-path and sql-statement-variable-path the callback pushes to scratch area + JsonHandler.set_exact_match_callback(f_push_to_scratch); + //upon star-operation(in statemenet) the callback pushes the key-path and value into scratch-area + JsonHandler.set_push_per_star_operation_callback(f_push_key_value_into_scratch_area_per_star_operation); + + //the json-from-clause is unique and should exist. otherwise it's a failure. + if(query->getAction()->json_from_clause.empty()) + { + JsonHandler.m_fatal_initialization_ind = true; + JsonHandler.m_fatal_initialization_description = "the SQL statement is not align with the correct syntax of JSON statement. from-clause is missing."; + return; + } + + //setting the from clause path + if(query->getAction()->json_from_clause[0] == JSON_ROOT_OBJECT) + { + query->getAction()->json_from_clause.pop_back(); + } + JsonHandler.set_prefix_match(query->getAction()->json_from_clause); + + for (auto& p : m_projections) + { + if(p->is_statement_contain_star_operation()) + { + star_operation_ind=true; + break; + } + } + + if(star_operation_ind) + { + JsonHandler.set_star_operation(); + //upon star-operation the key-path is extracted with the value, each key-value displayed in a seperate row. + //the return results end with a line contains the row-number. + m_csv_defintion.output_column_delimiter = m_csv_defintion.output_row_delimiter; + } + + m_sa->set_parquet_type();//TODO json type + } + + json_object(s3select* query):base_s3object(query),m_processed_bytes(0),m_end_of_stream(false),m_row_count(0),star_operation_ind(false),m_init_json_processor_ind(false) + { + init_json_processor(query); + } + + void set_sql_result(std::string& sql_result) + { + m_s3select_result = &sql_result; + } + + json_object(): base_s3object(nullptr), m_processed_bytes(0),m_end_of_stream(false),m_row_count(0),star_operation_ind(false),m_init_json_processor_ind(false) {} + +private: + + virtual bool is_end_of_stream() + { + return m_end_of_stream == true; + } + + virtual bool multiple_row_processing() + { + return false; + } + + int sql_execution_on_row_cb() + { + //execute statement on row + //create response (TODO callback) + + size_t result_len = m_s3select_result->size(); + int status=0; + try{ + getMatchRow(*m_s3select_result); + } + catch(s3selectEngine::base_s3select_exception& e) + { + sql_error_handling(e,*m_s3select_result); + status = -1; + } + + if(is_sql_limit_reached()) + { + status = JSON_PROCESSING_LIMIT_REACHED;//returning number since sql_execution_on_row_cb is a callback; the caller can not access the object + } + + m_sa->clear_data(); + if(star_operation_ind && (m_s3select_result->size() != result_len)) + {//as explained above the star-operation is displayed differently + std::string end_of_row; + end_of_row = "#=== " + std::to_string(m_row_count++) + " ===#\n"; + m_s3select_result->append(end_of_row); + } + return status; + } + + int push_into_scratch_area_cb(s3selectEngine::value& key_value, int json_var_idx) + { + //upon exact-filter match push value to scratch area with json-idx , it should match variable + //push (key path , json-var-idx , value) json-var-idx should be attached per each exact filter + m_sa->update_json_varible(key_value,json_var_idx); + return 0; + } + + int push_key_value_into_scratch_area_per_star_operation(s3selectEngine::scratch_area::json_key_value_t& key_value) + { + m_sa->get_star_operation_cont()->push_back( key_value ); + return 0; + } + + void sql_error_handling(s3selectEngine::base_s3select_exception& e,std::string& result) + { + //the JsonHandler makes the call to SQL processing, upon a failure to procees the SQL statement, + //the error-handling takes care of the error flow. + m_error_description = e.what(); + m_error_count++; + m_s3select_result->append(std::to_string(m_error_count)); + *m_s3select_result += " : "; + m_s3select_result->append(m_error_description); + *m_s3select_result += m_csv_defintion.output_row_delimiter; + } + +public: + + int run_s3select_on_stream(std::string& result, const char* json_stream, size_t stream_length, size_t obj_size) + { + int status=0; + m_processed_bytes += stream_length; + set_sql_result(result); + + if(JsonHandler.is_fatal_initialization()) + { + throw base_s3select_exception(JsonHandler.m_fatal_initialization_description, base_s3select_exception::s3select_exp_en_t::FATAL); + } + + if(!stream_length || !json_stream)//TODO m_processed_bytes(?) + {//last processing cycle + JsonHandler.process_json_buffer(0, 0, true);//TODO end-of-stream = end-of-row + m_end_of_stream = true; + sql_execution_on_row_cb(); + return 0; + } + + try{ + //the handler is processing any buffer size and return results per each buffer + status = JsonHandler.process_json_buffer((char*)json_stream, stream_length); + } + catch(std::exception &e) + { + std::string error_description = std::string("exception while processing :") + e.what(); + throw base_s3select_exception(error_description,base_s3select_exception::s3select_exp_en_t::FATAL); + } + + if(status<0) + { + std::string error_description = std::string("failure upon JSON processing"); + throw base_s3select_exception(error_description,base_s3select_exception::s3select_exp_en_t::FATAL); + return -1; + } + + return status; + } + + void set_json_query(s3select* s3_query) + { + set_base_defintions(s3_query); + init_json_processor(s3_query); + } + + std::string get_error_description() + { + return m_error_description; + } + + ~json_object() = default; +}; + +}; // namespace s3selectEngine + +#endif diff --git a/src/s3select/include/s3select_csv_parser.h b/src/s3select/include/s3select_csv_parser.h new file mode 100644 index 000000000..dab2e4efa --- /dev/null +++ b/src/s3select/include/s3select_csv_parser.h @@ -0,0 +1,418 @@ +#include "csvparser/csv.h" + +namespace io{ + + namespace error{ + struct escaped_char_missing : + base, + with_file_name, + with_file_line{ + void format_error_message()const override{ + std::snprintf(error_message_buffer, sizeof(error_message_buffer), + "Escaped character missing in line %d in file \"%s\"." + , file_line, file_name); + } + }; + } + + namespace detail{ + static void unescape(char*&col_begin, char*&col_end, char& quote, char& escape_char) + { + if(col_end - col_begin >= 2) + { + while(*col_begin == quote && *(col_begin + 1) == quote) + { + ++col_begin; + ++col_begin; + } + char*out = col_begin; + char* in = col_begin; + bool init = true; + + while(in != col_end) + { + if(*in != quote && *in != escape_char) + { + if(init) + { + init = false; + } + else + { + *out = *in; + } + ++in; + ++out; + } + else + { + if(*in == escape_char) + { + ++in; + if(init) + { + ++col_begin; + ++out; + init = false; + } + else + { + *out = *in; + } + ++in; + ++out; + } + else + { + ++in; + while(*in != quote) + { + if(init) + { + ++col_begin; + ++out; + init = false; + } + else + { + *out = *in; + } + ++in; + ++out; + } + ++in; + } + } + } + *out = '\0'; + col_end = out; + } + } + + static void trim(char*&str_begin, char*&str_end, std::vector& trim_chars) + { + while(str_begin != str_end && std::find(trim_chars.begin(), trim_chars.end(), *str_begin) != trim_chars.end()) + ++str_begin; + while(str_begin != str_end && std::find(trim_chars.begin(), trim_chars.end(), *(str_end-1)) != trim_chars.end()) + --str_end; + *str_end = '\0'; + } + + static const char*find_next_column_end(const char*col_begin, char& sep, char& quote, char& escape_char) + { + while(*col_begin != sep && *col_begin != '\0') + { + if(*col_begin != quote && *col_begin != escape_char) + ++col_begin; + else + { + if(*col_begin == escape_char) + { + if(*(col_begin+1) == '\0') + throw error::escaped_char_missing(); + col_begin += 2; + } + else + { + do + { + ++col_begin; + while(*col_begin != quote) + { + if(*col_begin == '\0') + throw error::escaped_string_not_closed(); + ++col_begin; + } + ++col_begin; + }while(*col_begin == quote); + } + } + } + return col_begin; + } + + void chop_next_column(char*&line, char*&col_begin, char*&col_end, char& col_delimiter, char& quote, char& escape_char) + { + assert(line != nullptr); + + col_begin = line; + // the col_begin + (... - col_begin) removes the constness + col_end = col_begin + (find_next_column_end(col_begin, col_delimiter, quote, escape_char) - col_begin); + + if(*col_end == '\0') + { + line = nullptr; + } + else + { + *col_end = '\0'; + line = col_end + 1; + } + } + + void parse_line(char*line, std::vector& sorted_col, char& col_delimiter, char& quote, char& escape_char, std::vector& trim_chars) + { + while (line != nullptr) + { + char*col_begin, *col_end; + chop_next_column(line, col_begin, col_end, col_delimiter, quote, escape_char); + if (!trim_chars.empty()) + trim(col_begin, col_end, trim_chars); + if (!(quote == '\0' && escape_char == '\0')) + unescape(col_begin, col_end, quote, escape_char); + sorted_col.push_back(col_begin); + } + } + + + bool empty_comment_line(char* line) + { + if(*line == '\0') + return true; + while(*line == ' ' || *line == '\t') + { + ++line; + if(*line == '\0') + return true; + } + return false; + } + + bool single_line_comment(char start_char, std::vector& comment_chars) + { + if(std::find(comment_chars.begin(), comment_chars.end(), start_char) != comment_chars.end()) + return true; + else + return false; + } + + bool is_comment(char*&line, bool& comment_empty_line, std::vector& comment_chars) + { + if(!comment_empty_line && comment_chars.empty()) + return false; + else if(comment_empty_line && comment_chars.empty()) + return empty_comment_line(line); + else if(!comment_empty_line && !comment_chars.empty()) + return single_line_comment(*line, comment_chars); + else + return empty_comment_line(line) || single_line_comment(*line, comment_chars); + } + + } +} + + +class CSVParser +{ + private: + char row_delimiter; + char col_delimiter; + char quote; + char escape_char; + bool comment_empty_line; + std::vector comment_characters; + std::vector trim_characters; + + static const int block_len = 1<<20; + std::unique_ptrbuffer; // must be constructed before (and thus destructed after) the reader! + #ifdef CSV_IO_NO_THREAD + io::detail::SynchronousReader reader; + #else + io::detail::AsynchronousReader reader; + #endif + int data_begin; + int data_end; + + char file_name[io::error::max_file_name_length+1]; + unsigned file_line; + + void init(std::unique_ptrbyte_source) + { + file_line = 0; + + buffer = std::unique_ptr(new char[3*block_len]); + data_begin = 0; + data_end = byte_source->read(buffer.get(), 2*block_len); + + // Ignore UTF-8 BOM + if(data_end >= 3 && buffer[0] == '\xEF' && buffer[1] == '\xBB' && buffer[2] == '\xBF') + data_begin = 3; + + if(data_end == 2*block_len){ + reader.init(std::move(byte_source)); + reader.start_read(buffer.get() + 2*block_len, block_len); + } + } + + public: + CSVParser() = delete; + CSVParser(const CSVParser&) = delete; + CSVParser&operator=(const CSVParser&); + + CSVParser(const char*file_name, const char*data_begin, const char*data_end) + { + set_file_name(file_name); + init(std::unique_ptr(new io::detail::NonOwningStringByteSource(data_begin, data_end-data_begin))); + } + + CSVParser(const std::string&file_name, const char*data_begin, const char*data_end) + { + set_file_name(file_name.c_str()); + init(std::unique_ptr(new io::detail::NonOwningStringByteSource(data_begin, data_end-data_begin))); + } + + void set_file_name(const std::string&file_name) + { + set_file_name(file_name.c_str()); + } + + void set_file_name(const char*file_name) + { + if(file_name != nullptr) + { + strncpy(this->file_name, file_name, sizeof(this->file_name)); + this->file_name[sizeof(this->file_name)-1] = '\0'; + } + else + { + this->file_name[0] = '\0'; + } + } + + const char*get_truncated_file_name()const + { + return file_name; + } + + void set_file_line(unsigned file_line) + { + this->file_line = file_line; + } + + unsigned get_file_line()const + { + return file_line; + } + + void set_csv_def(char& row_delimit, char& col_delimit, char& quote_char, char& escp_char, bool& cmnt_empty_line, std::vector& comment_chars , std::vector& trim_chars) + { + row_delimiter = row_delimit; + col_delimiter = col_delimit; + quote = quote_char; + escape_char = escp_char; + comment_empty_line = cmnt_empty_line; + comment_characters.assign(comment_chars.begin(), comment_chars.end()); + trim_characters.assign(trim_chars.begin(), trim_chars.end()); + } + + char*next_line() + { + if(data_begin == data_end) + return nullptr; + + ++file_line; + + assert(data_begin < data_end); + assert(data_end <= block_len*2); + + if(data_begin >= block_len) + { + std::memcpy(buffer.get(), buffer.get()+block_len, block_len); + data_begin -= block_len; + data_end -= block_len; + if(reader.is_valid()) + { + data_end += reader.finish_read(); + std::memcpy(buffer.get()+block_len, buffer.get()+2*block_len, block_len); + reader.start_read(buffer.get() + 2*block_len, block_len); + } + } + + int line_end = data_begin; + while(line_end != data_end && buffer[line_end] != row_delimiter) + { + if(buffer[line_end] == quote || buffer[line_end] == escape_char) + { + if(buffer[line_end] == escape_char) + { + ++line_end; + if(line_end == data_end) + { + throw io::error::escaped_char_missing(); + } + else if(buffer[line_end] == '\r' && buffer[line_end + 1] == '\n') // handle windows \r\n-line breaks + { + ++line_end; + } + } + else + { + ++line_end; + while(buffer[line_end] != quote) + { + if(line_end == data_end) + throw io::error::escaped_string_not_closed(); + ++line_end; + } + } + } + ++line_end; + } + + if(line_end - data_begin + 1 > block_len) + { + io::error::line_length_limit_exceeded err; + err.set_file_name(file_name); + err.set_file_line(file_line); + throw err; + } + + if(line_end != data_end && buffer[line_end] == row_delimiter) + { + buffer[line_end] = '\0'; + } + else + { + // some files are missing the newline at the end of the + // last line + ++data_end; + buffer[line_end] = '\0'; + } + + // handle windows \r\n-line breaks + if(row_delimiter == '\n') + { + if(line_end != data_begin && buffer[line_end-1] == '\r') + buffer[line_end-1] = '\0'; + } + + char*ret = buffer.get() + data_begin; + data_begin = line_end+1; + return ret; + } + + bool read_row(std::vector& cols) + { + try{ + try{ + char*line; + do{ + line = next_line(); + if(!line) + return false; + }while(io::detail::is_comment(line, comment_empty_line, comment_characters)); + + io::detail::parse_line(line, cols, col_delimiter, quote, escape_char, trim_characters); + + }catch(io::error::with_file_name&err){ + err.set_file_name(get_truncated_file_name()); + throw; + } + }catch(io::error::with_file_line&err){ + err.set_file_line(get_file_line()); + throw; + } + + return true; + } +}; diff --git a/src/s3select/include/s3select_functions.h b/src/s3select/include/s3select_functions.h new file mode 100644 index 000000000..8c507fca1 --- /dev/null +++ b/src/s3select/include/s3select_functions.h @@ -0,0 +1,2703 @@ +#ifndef __S3SELECT_FUNCTIONS__ +#define __S3SELECT_FUNCTIONS__ + + +#include "s3select_oper.h" +#include +#include +#include +#include + +using namespace std::string_literals; + +#define BOOST_BIND_ACTION_PARAM( push_name ,param ) boost::bind( &push_name::operator(), g_ ## push_name , _1 ,_2, param) +namespace s3selectEngine +{ + +constexpr double sec_scale(int n) +{ + return pow(10, n); +} + +struct push_char +{ + void operator()(const char* a, const char* b, uint32_t* n) const + { + *n = *a; + } + +}; +static push_char g_push_char; + +struct push_2dig +{ + void operator()(const char* a, const char* b, uint32_t* n) const + { + *n = (static_cast(*a) - 48) * 10 + (static_cast(*(a+1)) - 48) ; + } + +}; +static push_2dig g_push_2dig; + +struct push_4dig +{ + void operator()(const char* a, const char* b, uint32_t* n) const + { + *n = (static_cast(*a) - 48) * 1000 + (static_cast(*(a+1)) - 48) * 100 + (static_cast(*(a+2)) - 48) * 10 + (static_cast(*(a+3)) - 48); + } + +}; +static push_4dig g_push_4dig; + +struct push_1fdig +{ + void operator()(const char* a, const char* b, uint32_t* n) const + { + #if BOOST_DATE_TIME_POSIX_TIME_STD_CONFIG + const double scale = sec_scale(9-1); //nano-sec + #else + const double scale = sec_scale(6-1); //micro-sec + #endif + + *n = ((static_cast(*a) - 48)) * scale; + } + +}; +static push_1fdig g_push_1fdig; + +struct push_2fdig +{ + void operator()(const char* a, const char* b, uint32_t* n) const + { + #if BOOST_DATE_TIME_POSIX_TIME_STD_CONFIG + const double scale = sec_scale(9-2); //nano-sec + #else + const double scale = sec_scale(6-2); //micro-sec + #endif + + *n = ((static_cast(*a) - 48) * 10 + (static_cast(*(a+1)) - 48)) * scale; + } + +}; +static push_2fdig g_push_2fdig; + +struct push_3fdig +{ + void operator()(const char* a, const char* b, uint32_t* n) const + { + #if BOOST_DATE_TIME_POSIX_TIME_STD_CONFIG + const double scale = sec_scale(9-3); //nano-sec + #else + const double scale = sec_scale(6-3); //micro-sec + #endif + + *n = ((static_cast(*a) - 48) * 100 + (static_cast(*(a+1)) - 48) * 10 + (static_cast(*(a+2)) - 48)) * scale; + } + +}; +static push_3fdig g_push_3fdig; + +struct push_4fdig +{ + void operator()(const char* a, const char* b, uint32_t* n) const + { + #if BOOST_DATE_TIME_POSIX_TIME_STD_CONFIG + const double scale = sec_scale(9-4); //nano-sec + #else + const double scale = sec_scale(6-4); //micro-sec + #endif + + *n = ((static_cast(*a) - 48) * 1000 + (static_cast(*(a+1)) - 48) * 100 + (static_cast(*(a+2)) - 48) * 10 + (static_cast(*(a+3)) - 48)) * scale; + } + +}; +static push_4fdig g_push_4fdig; + +struct push_5fdig +{ + void operator()(const char* a, const char* b, uint32_t* n) const + { + #if BOOST_DATE_TIME_POSIX_TIME_STD_CONFIG + const double scale = sec_scale(9-5); //nano-sec + #else + const double scale = sec_scale(6-5); //micro-sec + #endif + + *n = ((static_cast(*a) - 48) * 10000 + (static_cast(*(a+1)) - 48) * 1000 + (static_cast(*(a+2)) - 48) * 100 + (static_cast(*(a+3)) - 48) * 10 + (static_cast(*(a+4)) - 48)) * scale; + } + +}; +static push_5fdig g_push_5fdig; + +struct push_6fdig +{ + void operator()(const char* a, const char* b, uint32_t* n) const + { + #if BOOST_DATE_TIME_POSIX_TIME_STD_CONFIG + const double scale = sec_scale(9-6); //nano-sec + #else + const double scale = sec_scale(6-6); //micro-sec + #endif + + *n = ((static_cast(*a) - 48) * 100000 + (static_cast(*(a+1)) - 48) * 10000 + (static_cast(*(a+2)) - 48) * 1000 + (static_cast(*(a+3)) - 48) * 100 + (static_cast(*(a+4)) - 48) * 10 + (static_cast(*(a+5)) - 48)) * scale; + } + +}; +static push_6fdig g_push_6fdig; + +#if BOOST_DATE_TIME_POSIX_TIME_STD_CONFIG + struct push_7fdig + { + void operator()(const char* a, const char* b, uint32_t* n) const + { + const double scale = sec_scale(9-7); //nano-sec + *n = ((static_cast(*a) - 48) * 1000000 + (static_cast(*(a+1)) - 48) * 100000 + (static_cast(*(a+2)) - 48) * 10000 + (static_cast(*(a+3)) - 48) * 1000 + (static_cast(*(a+4)) - 48) * 100 + (static_cast(*(a+5)) - 48) * 10 + (static_cast(*(a+6)) - 48)) * scale; + } + + }; + static push_7fdig g_push_7fdig; + + struct push_8fdig + { + void operator()(const char* a, const char* b, uint32_t* n) const + { + const double scale = sec_scale(9-8); //nano-sec + *n = ((static_cast(*a) - 48) * 10000000 + (static_cast(*(a+1)) - 48) * 1000000 + (static_cast(*(a+2)) - 48) * 100000 + (static_cast(*(a+3)) - 48) * 10000 + (static_cast(*(a+4)) - 48) * 1000 + (static_cast(*(a+5)) - 48) * 100 + (static_cast(*(a+6)) - 48) * 10 + (static_cast(*(a+7)) - 48)) * scale; + } + + }; + static push_8fdig g_push_8fdig; + + struct push_9fdig + { + void operator()(const char* a, const char* b, uint32_t* n) const + { + const double scale = sec_scale(9-9); //nano-sec + *n = ((static_cast(*a) - 48) * 100000000 + (static_cast(*(a+1)) - 48) * 10000000 + (static_cast(*(a+2)) - 48) * 1000000 + (static_cast(*(a+3)) - 48) * 100000 + (static_cast(*(a+4)) - 48) * 10000 + (static_cast(*(a+5)) - 48) * 1000 + (static_cast(*(a+6)) - 48) * 100 + (static_cast(*(a+7)) - 48) * 10 + (static_cast(*(a+8)) - 48)) * scale; + } + + }; + static push_9fdig g_push_9fdig; +#endif + +enum class s3select_func_En_t {ADD, + SUM, + AVG, + MIN, + MAX, + COUNT, + TO_INT, + TO_FLOAT, + TO_TIMESTAMP, + TO_STRING_CONSTANT, + TO_STRING_DYNAMIC, + TO_BOOL, + SUBSTR, + EXTRACT_YEAR, + EXTRACT_MONTH, + EXTRACT_DAY, + EXTRACT_HOUR, + EXTRACT_MINUTE, + EXTRACT_SECOND, + EXTRACT_WEEK, + EXTRACT_TIMEZONE_HOUR, + EXTRACT_TIMEZONE_MINUTE, + DATE_ADD_YEAR, + DATE_ADD_MONTH, + DATE_ADD_DAY, + DATE_ADD_HOUR, + DATE_ADD_MINUTE, + DATE_ADD_SECOND, + DATE_DIFF_YEAR, + DATE_DIFF_MONTH, + DATE_DIFF_DAY, + DATE_DIFF_HOUR, + DATE_DIFF_MINUTE, + DATE_DIFF_SECOND, + UTCNOW, + LENGTH, + LOWER, + UPPER, + NULLIF, + BETWEEN, + NOT_BETWEEN, + IS_NULL, + IS_NOT_NULL, + IN, + LIKE, + VERSION, + CASE_WHEN_ELSE, + WHEN_THEN, + WHEN_VALUE_THEN, + COALESCE, + STRING, + TRIM, + LEADING, + TRAILING, + DECIMAL_OPERATOR, + CAST_TO_DECIMAL, + ENGINE_VERSION + }; + + +class s3select_functions +{ + +private: + + using FunctionLibrary = std::map; + s3select_allocator* m_s3select_allocator; + std::set* m_ast_nodes_for_cleanup; + + const FunctionLibrary m_functions_library = + { + {"add", s3select_func_En_t::ADD}, + {"sum", s3select_func_En_t::SUM}, + {"avg", s3select_func_En_t::AVG}, + {"count", s3select_func_En_t::COUNT}, + {"min", s3select_func_En_t::MIN}, + {"max", s3select_func_En_t::MAX}, + {"int", s3select_func_En_t::TO_INT}, + {"float", s3select_func_En_t::TO_FLOAT}, + {"substring", s3select_func_En_t::SUBSTR}, + {"to_timestamp", s3select_func_En_t::TO_TIMESTAMP}, + {"#to_string_constant#",s3select_func_En_t::TO_STRING_CONSTANT}, + {"#to_string_dynamic#",s3select_func_En_t::TO_STRING_DYNAMIC}, + {"to_bool", s3select_func_En_t::TO_BOOL}, + {"#extract_year#", s3select_func_En_t::EXTRACT_YEAR}, + {"#extract_month#", s3select_func_En_t::EXTRACT_MONTH}, + {"#extract_day#", s3select_func_En_t::EXTRACT_DAY}, + {"#extract_hour#", s3select_func_En_t::EXTRACT_HOUR}, + {"#extract_minute#", s3select_func_En_t::EXTRACT_MINUTE}, + {"#extract_second#", s3select_func_En_t::EXTRACT_SECOND}, + {"#extract_week#", s3select_func_En_t::EXTRACT_WEEK}, + {"#extract_timezone_hour#", s3select_func_En_t::EXTRACT_TIMEZONE_HOUR}, + {"#extract_timezone_minute#", s3select_func_En_t::EXTRACT_TIMEZONE_MINUTE}, + {"#dateadd_year#", s3select_func_En_t::DATE_ADD_YEAR}, + {"#dateadd_month#", s3select_func_En_t::DATE_ADD_MONTH}, + {"#dateadd_day#", s3select_func_En_t::DATE_ADD_DAY}, + {"#dateadd_hour#", s3select_func_En_t::DATE_ADD_HOUR}, + {"#dateadd_minute#", s3select_func_En_t::DATE_ADD_MINUTE}, + {"#dateadd_second#", s3select_func_En_t::DATE_ADD_SECOND}, + {"#datediff_year#", s3select_func_En_t::DATE_DIFF_YEAR}, + {"#datediff_month#", s3select_func_En_t::DATE_DIFF_MONTH}, + {"#datediff_day#", s3select_func_En_t::DATE_DIFF_DAY}, + {"#datediff_hour#", s3select_func_En_t::DATE_DIFF_HOUR}, + {"#datediff_minute#", s3select_func_En_t::DATE_DIFF_MINUTE}, + {"#datediff_second#", s3select_func_En_t::DATE_DIFF_SECOND}, + {"utcnow", s3select_func_En_t::UTCNOW}, + {"character_length", s3select_func_En_t::LENGTH}, + {"char_length", s3select_func_En_t::LENGTH}, + {"lower", s3select_func_En_t::LOWER}, + {"upper", s3select_func_En_t::UPPER}, + {"nullif", s3select_func_En_t::NULLIF}, + {"#between#", s3select_func_En_t::BETWEEN}, + {"#not_between#", s3select_func_En_t::NOT_BETWEEN}, + {"#is_null#", s3select_func_En_t::IS_NULL}, + {"#is_not_null#", s3select_func_En_t::IS_NOT_NULL}, + {"#in_predicate#", s3select_func_En_t::IN}, + {"#like_predicate#", s3select_func_En_t::LIKE}, + {"version", s3select_func_En_t::VERSION}, + {"#when-then#", s3select_func_En_t::WHEN_THEN}, + {"#when-value-then#", s3select_func_En_t::WHEN_VALUE_THEN}, + {"#case-when-else#", s3select_func_En_t::CASE_WHEN_ELSE}, + {"coalesce", s3select_func_En_t::COALESCE}, + {"string", s3select_func_En_t::STRING}, + {"#trim#", s3select_func_En_t::TRIM}, + {"#leading#", s3select_func_En_t::LEADING}, + {"#trailing#", s3select_func_En_t::TRAILING}, + {"#decimal_operator#", s3select_func_En_t::DECIMAL_OPERATOR}, + {"#cast_as_decimal#", s3select_func_En_t::CAST_TO_DECIMAL}, + {"engine_version", s3select_func_En_t::ENGINE_VERSION} + + }; + +public: + + base_function* create(std::string_view fn_name,const bs_stmt_vec_t&); + + s3select_functions():m_s3select_allocator(nullptr),m_ast_nodes_for_cleanup(nullptr) + { + } + + + void setAllocator(s3select_allocator* alloc) + { + m_s3select_allocator = alloc; + } + + void set_AST_nodes_for_cleanup(std::set* ast_for_cleanup) + { + m_ast_nodes_for_cleanup = ast_for_cleanup; + } + + s3select_allocator* getAllocator() + { + return m_s3select_allocator; + } + + void clean(); + +}; + +class __function : public base_statement +{ + +private: + bs_stmt_vec_t arguments; + std::basic_string,ChunkAllocator> name; + base_function* m_func_impl; + s3select_functions* m_s3select_functions; + variable m_result; + bool m_is_aggregate_function; + + void _resolve_name() + { + if (m_func_impl) + { + return; + } + + auto string_to_lower = [](std::basic_string,ChunkAllocator> s) + { + std::transform(s.begin(),s.end(),s.begin(),[](unsigned char c){ return std::tolower(c); }); + return s; + }; + + //the function name is converted into lowercase to enable case-insensitive + base_function* f = m_s3select_functions->create(string_to_lower(name),arguments); + if (!f) + { + throw base_s3select_exception("function not found", base_s3select_exception::s3select_exp_en_t::FATAL); //should abort query + } + m_func_impl = f; + m_is_aggregate_function= m_func_impl->is_aggregate(); + f->set_function_name(name.c_str()); + } + +public: + + base_function* impl() + { + return m_func_impl; + } + + void traverse_and_apply(scratch_area* sa, projection_alias* pa,bool json_statement) override + { + m_scratch = sa; + m_aliases = pa; + m_json_statement = json_statement; + for (base_statement* ba : arguments) + { + ba->traverse_and_apply(sa, pa, json_statement); + } + } + + void set_last_call() override + {//it cover the use-case where aggregation function is an argument in non-aggregate function. + is_last_call = true; + for (auto& ba : arguments) + { + ba->set_last_call(); + } + } + + void set_skip_non_aggregate(bool skip_non_aggregate_op) override + {//it cover the use-case where aggregation function is an argument in non-aggregate function. + m_skip_non_aggregate_op = skip_non_aggregate_op; + for (auto& ba : arguments) + { + ba->set_skip_non_aggregate(m_skip_non_aggregate_op); + } + } + + bool is_aggregate() const override + { + return m_is_aggregate_function; + } + + bool semantic() override + { + return true; + } + + __function(const char* fname, s3select_functions* s3f) : name(fname), m_func_impl(nullptr), m_s3select_functions(s3f),m_is_aggregate_function(false){set_operator_name(fname);} + + value& eval() override + { + return eval_internal(); + } + + value& eval_internal() override + { + + _resolve_name();//node is "resolved" (function is created) upon first call/first row. + + if (is_last_call == false) + {//all rows prior to last row + if(m_skip_non_aggregate_op == false || is_aggregate() == true) + { + (*m_func_impl)(&arguments, &m_result); + } + else if(m_skip_non_aggregate_op == true) + { + for(auto& p : arguments) + {//evaluating the arguments (not the function itself, which is a non-aggregate function) + //i.e. in the following use case substring( , sum(),count() ) ; only sum() and count() are evaluated. + p->eval(); + } + } + } + else + {//on the last row, the aggregate function is finalized, + //and non-aggregate function is evaluated with the result of aggregate function. + if(is_aggregate()) + (*m_func_impl).get_aggregate_result(&m_result); + else + (*m_func_impl)(&arguments, &m_result); + } + + return m_result.get_value(); + } + + void resolve_node() override + { + _resolve_name(); + + for (auto& arg : arguments) + { + arg->resolve_node(); + } + } + + std::string print(int ident) override + { + return std::string(0); + } + + void push_argument(base_statement* arg) + { + arguments.push_back(arg); + } + + + bs_stmt_vec_t& get_arguments() + { + return arguments; + } + + virtual ~__function() = default; +}; + +/* + s3-select function defintions +*/ +struct _fn_add : public base_function +{ + + value var_result; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,2); + + auto iter = args->begin(); + base_statement* x = *iter; + iter++; + base_statement* y = *iter; + + var_result = x->eval() + y->eval(); + + *result = var_result; + + return true; + } +}; + +struct _fn_sum : public base_function +{ + + value sum; + + _fn_sum() + { + aggregate = true; + sum.setnull(); + } + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,1); + + auto iter = args->begin(); + base_statement* x = *iter; + + try + { + if(sum.is_null()) + { + sum = 0; + } + sum = sum + x->eval(); + } + catch (base_s3select_exception& e) + { + if (e.severity() == base_s3select_exception::s3select_exp_en_t::FATAL) + { + throw; + } + } + + return true; + } + + void get_aggregate_result(variable* result) override + { + *result = sum ; + } +}; + +struct _fn_count : public base_function +{ + + int64_t count; + + _fn_count():count(0) + { + aggregate=true; + } + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + if (args->size()) + {// in case argument exist, should count only non-null. + auto iter = args->begin(); + base_statement* x = *iter; + + if(!x->eval().is_null()) + { + count += 1; + } + } + else + {//in case of non-arguments // count() + count += 1; + } + + return true; + } + + void get_aggregate_result(variable* result) override + { + result->set_value(count); + } + +}; + +struct _fn_avg : public base_function +{ + + value sum; + value count{0.0}; + + _fn_avg() : sum(0) { aggregate = true; } + + bool operator()(bs_stmt_vec_t* args, variable *result) override + { + check_args_size(args,1); + + auto iter = args->begin(); + base_statement *x = *iter; + + try + { + sum = sum + x->eval(); + count++; + } + catch (base_s3select_exception &e) + { + throw base_s3select_exception(e.what()); + } + + return true; + } + + void get_aggregate_result(variable *result) override + { + if(count == static_cast(0)) { + value v_null; + v_null.setnull(); + *result=v_null; + } else { + *result = sum/count ; + } + } +}; + +struct _fn_min : public base_function +{ + + value min; + + _fn_min() + { + aggregate=true; + min.setnull(); + } + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,1); + + auto iter = args->begin(); + base_statement* x = *iter; + + if(min.is_null() || min > x->eval()) + { + min=x->eval(); + } + + return true; + } + + void get_aggregate_result(variable* result) override + { + *result = min; + } + +}; + +struct _fn_max : public base_function +{ + + value max; + + _fn_max() + { + aggregate=true; + max.setnull(); + } + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,1); + + auto iter = args->begin(); + base_statement* x = *iter; + + if(max.is_null() || max < x->eval()) + { + max=x->eval(); + } + + return true; + } + + void get_aggregate_result(variable* result) override + { + *result = max; + } + +}; + +struct _fn_to_int : public base_function +{ + value var_result; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,1); + + value v = (*args->begin())->eval(); + + switch (v.type) { + + case value::value_En_t::STRING: + { + char* pend; + errno = 0; + int64_t i= strtol(v.str(), &pend, 10); + if (errno == ERANGE) { + throw base_s3select_exception("converted value would fall out of the range of the result type!"); + } + if (pend == v.str()) { + // no number found + throw base_s3select_exception("text cannot be converted to a number"); + } + if (*pend) { + throw base_s3select_exception("extra characters after the number"); + } + + var_result = i; + } + break; + + case value::value_En_t::FLOAT: + var_result = static_cast(v.dbl()); + break; + + default: + var_result = v.i64(); + break; + } + + *result = var_result; + return true; + } + +}; + +struct _fn_to_float : public base_function +{ + value var_result; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,1); + + value v = (*args->begin())->eval(); + + switch (v.type) { + + case value::value_En_t::STRING: + { + char* pend; + double d = strtod(v.str(), &pend); + if (errno == ERANGE) { + throw base_s3select_exception("converted value would fall out of the range of the result type!"); + } + if (pend == v.str()) { + // no number found + throw base_s3select_exception("text cannot be converted to a number"); + } + if (*pend) { + throw base_s3select_exception("extra characters after the number"); + } + + var_result = d; + } + break; + + case value::value_En_t::FLOAT: + var_result = v.dbl(); + break; + + default: + var_result = v.i64(); + break; + } + + *result = var_result; + return true; + } + +}; + +struct _fn_to_timestamp : public base_function +{ + bsc::rule<> date_separator = bsc::ch_p("-"); + bsc::rule<> time_separator = bsc::ch_p(":"); + bsc::rule<> nano_sec_separator = bsc::ch_p("."); + bsc::rule<> delimiter = bsc::ch_p("T"); + bsc::rule<> zero_timezone = bsc::ch_p("Z"); + bsc::rule<> timezone_sign = bsc::ch_p("-") | bsc::ch_p("+"); + + uint32_t yr = 1700, mo = 1, dy = 1; + bsc::rule<> dig4 = bsc::lexeme_d[bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p]; + bsc::rule<> dig2 = bsc::lexeme_d[bsc::digit_p >> bsc::digit_p]; + + bsc::rule<> d_yyyy_dig = ((dig4[BOOST_BIND_ACTION_PARAM(push_4dig, &yr)]) >> *(delimiter)); + bsc::rule<> d_yyyymmdd_dig = ((dig4[BOOST_BIND_ACTION_PARAM(push_4dig, &yr)]) >> *(date_separator) + >> (dig2[BOOST_BIND_ACTION_PARAM(push_2dig, &mo)]) >> *(date_separator) + >> (dig2[BOOST_BIND_ACTION_PARAM(push_2dig, &dy)]) >> *(delimiter)); + + uint32_t hr = 0, mn = 0, sc = 0, frac_sec = 0, tz_hr = 0, tz_mn = 0, sign = 0, tm_zone = '0'; + + #if BOOST_DATE_TIME_POSIX_TIME_STD_CONFIG + bsc::rule<> fdig9 = bsc::lexeme_d[bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p]; + bsc::rule<> fdig8 = bsc::lexeme_d[bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p]; + bsc::rule<> fdig7 = bsc::lexeme_d[bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p]; + #endif + + bsc::rule<> fdig6 = bsc::lexeme_d[bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p]; + bsc::rule<> fdig5 = bsc::lexeme_d[bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p]; + bsc::rule<> fdig4 = bsc::lexeme_d[bsc::digit_p >> bsc::digit_p >> bsc::digit_p >> bsc::digit_p]; + bsc::rule<> fdig3 = bsc::lexeme_d[bsc::digit_p >> bsc::digit_p >> bsc::digit_p]; + bsc::rule<> fdig2 = bsc::lexeme_d[bsc::digit_p >> bsc::digit_p]; + bsc::rule<> fdig1 = bsc::lexeme_d[bsc::digit_p]; + + bsc::rule<> d_timezone_dig = ((timezone_sign[BOOST_BIND_ACTION_PARAM(push_char, &sign)]) >> (dig2[BOOST_BIND_ACTION_PARAM(push_2dig, &tz_hr)]) >> *(time_separator) + >> (dig2[BOOST_BIND_ACTION_PARAM(push_2dig, &tz_mn)])) | (zero_timezone[BOOST_BIND_ACTION_PARAM(push_char, &tm_zone)]); + + #if BOOST_DATE_TIME_POSIX_TIME_STD_CONFIG + bsc::rule<> fraction_sec = (fdig9[BOOST_BIND_ACTION_PARAM(push_9fdig, &frac_sec)]) | + (fdig8[BOOST_BIND_ACTION_PARAM(push_8fdig, &frac_sec)]) | + (fdig7[BOOST_BIND_ACTION_PARAM(push_7fdig, &frac_sec)]) | + (fdig6[BOOST_BIND_ACTION_PARAM(push_6fdig, &frac_sec)]) | + (fdig5[BOOST_BIND_ACTION_PARAM(push_5fdig, &frac_sec)]) | + (fdig4[BOOST_BIND_ACTION_PARAM(push_4fdig, &frac_sec)]) | + (fdig3[BOOST_BIND_ACTION_PARAM(push_3fdig, &frac_sec)]) | + (fdig2[BOOST_BIND_ACTION_PARAM(push_2fdig, &frac_sec)]) | + (fdig1[BOOST_BIND_ACTION_PARAM(push_1fdig, &frac_sec)]); + #else + bsc::rule<> fraction_sec = (fdig6[BOOST_BIND_ACTION_PARAM(push_6fdig, &frac_sec)]) | + (fdig5[BOOST_BIND_ACTION_PARAM(push_5fdig, &frac_sec)]) | + (fdig4[BOOST_BIND_ACTION_PARAM(push_4fdig, &frac_sec)]) | + (fdig3[BOOST_BIND_ACTION_PARAM(push_3fdig, &frac_sec)]) | + (fdig2[BOOST_BIND_ACTION_PARAM(push_2fdig, &frac_sec)]) | + (fdig1[BOOST_BIND_ACTION_PARAM(push_1fdig, &frac_sec)]); + #endif + + bsc::rule<> d_time_dig = ((dig2[BOOST_BIND_ACTION_PARAM(push_2dig, &hr)]) >> *(time_separator) + >> (dig2[BOOST_BIND_ACTION_PARAM(push_2dig, &mn)]) >> *(time_separator) + >> (dig2[BOOST_BIND_ACTION_PARAM(push_2dig, &sc)]) >> *(nano_sec_separator) + >> (fraction_sec) >> (d_timezone_dig)) | + ((dig2[BOOST_BIND_ACTION_PARAM(push_2dig, &hr)]) >> *(time_separator) + >> (dig2[BOOST_BIND_ACTION_PARAM(push_2dig, &mn)]) >> *(time_separator) + >> (dig2[BOOST_BIND_ACTION_PARAM(push_2dig, &sc)]) >> (d_timezone_dig)) | + ((dig2[BOOST_BIND_ACTION_PARAM(push_2dig, &hr)]) >> *(time_separator) + >> (dig2[BOOST_BIND_ACTION_PARAM(push_2dig, &mn)]) >> (d_timezone_dig)); + + bsc::rule<> d_date_time = ((d_yyyymmdd_dig) >> (d_time_dig)) | (d_yyyymmdd_dig) | (d_yyyy_dig); + + timestamp_t tmstmp; + value v_str; + int tz_hour, tz_min; + + bool datetime_validation() + { + if (yr >= 1400 && yr <= 9999 && mo >= 1 && mo <= 12 && dy >= 1 && hr < 24 && mn < 60 && sc < 60 && tz_hour <= 14 && tz_hour >= -12 && tz_mn < 60) + { + if ( (tz_hour == -12 || tz_hour == 14) && tz_mn > 0) + return false; + + switch (mo) + { + case 1: + case 3: + case 5: + case 7: + case 8: + case 10: + case 12: + if(dy <= 31) + { + return true; + } + break; + case 4: + case 6: + case 9: + case 11: + if(dy <= 30) + { + return true; + } + break; + case 2: + if(dy >= 28) + { + if(!(yr % 4) == 0 && dy > 28) + { + return false; + } + else if(!(yr % 100) == 0 && dy <= 29) + { + return true; + } + else if(!(yr % 400) == 0 && dy > 28) + { + return false; + } + else + { + return true; + } + } + else + { + return true; + } + break; + default: + return false; + break; + } + } + return false; + } + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + + hr = 0; + mn = 0; + sc = 0; + frac_sec = 0; + tz_hr = 0; + tz_mn = 0; + tm_zone = '0'; + + auto iter = args->begin(); + int args_size = args->size(); + + if (args_size != 1) + { + throw base_s3select_exception("to_timestamp should have one parameter"); + } + + base_statement* str = *iter; + + v_str = str->eval(); + + if (v_str.type != value::value_En_t::STRING) + { + throw base_s3select_exception("to_timestamp first argument must be string"); //can skip current row + } + + bsc::parse_info<> info_dig = bsc::parse(v_str.str(), d_date_time); + + tz_hour = tz_hr; + tz_min = tz_mn; + if ((char)sign == '-') + { + tz_hour *= -1; + tz_min *= -1; + } + + if(datetime_validation()==false or !info_dig.full) + { + throw base_s3select_exception("input date-time is illegal"); + } + + boost::posix_time::ptime new_ptime; + + #if BOOST_DATE_TIME_POSIX_TIME_STD_CONFIG + new_ptime = boost::posix_time::ptime(boost::gregorian::date(yr, mo, dy), + boost::posix_time::hours(hr) + + boost::posix_time::minutes(mn) + + boost::posix_time::seconds(sc) + + boost::posix_time::nanoseconds(frac_sec)); + #else + new_ptime = boost::posix_time::ptime(boost::gregorian::date(yr, mo, dy), + boost::posix_time::hours(hr) + + boost::posix_time::minutes(mn) + + boost::posix_time::seconds(sc) + + boost::posix_time::microseconds(frac_sec)); + #endif + + tmstmp = std::make_tuple(new_ptime, boost::posix_time::time_duration(tz_hour, tz_min, 0), (char)tm_zone == 'Z'); + + result->set_value(&tmstmp); + + return true; + } + +}; + +struct _fn_to_string_constant : public base_timestamp_to_string +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + if (!initialized) + { + prepare_to_string_vector(print_vector, para); + initialized = true; + } + + std::string result_ = execute_to_string(print_vector, para); + + result->set_value(result_.c_str()); + return true; + } +}; + +struct _fn_to_string_dynamic : public base_timestamp_to_string +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + print_vector.clear(); + para.clear(); + + prepare_to_string_vector(print_vector, para); + + std::string result_ = execute_to_string(print_vector, para); + + result->set_value(result_.c_str()); + return true; + } +}; + +struct _fn_extract_year_from_timestamp : public base_date_extract +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + result->set_value( (int64_t)new_ptime.date().year()); + return true; + } +}; + +struct _fn_extract_month_from_timestamp : public base_date_extract +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + result->set_value( (int64_t)new_ptime.date().month()); + return true; + } +}; + +struct _fn_extract_day_from_timestamp : public base_date_extract +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + result->set_value( (int64_t)new_ptime.date().day()); + return true; + } +}; + +struct _fn_extract_hour_from_timestamp : public base_date_extract +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + result->set_value( (int64_t)new_ptime.time_of_day().hours()); + return true; + } +}; + +struct _fn_extract_minute_from_timestamp : public base_date_extract +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + result->set_value( (int64_t)new_ptime.time_of_day().minutes()); + return true; + } +}; + +struct _fn_extract_second_from_timestamp : public base_date_extract +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + result->set_value( (int64_t)new_ptime.time_of_day().seconds()); + return true; + } +}; + +struct _fn_extract_week_from_timestamp : public base_date_extract +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + result->set_value( (int64_t)new_ptime.date().week_number()); + return true; + } +}; + +struct _fn_extract_tz_hour_from_timestamp : public base_date_extract +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + result->set_value((int64_t)td.hours()); + return true; + } +}; + +struct _fn_extract_tz_minute_from_timestamp : public base_date_extract +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + result->set_value((int64_t)td.minutes()); + return true; + } +}; + +struct _fn_diff_year_timestamp : public base_date_diff +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + int year1 = ptime1.date().year(); + int year2 = ptime2.date().year(); + boost::posix_time::time_duration time1 = boost::posix_time::time_duration( + ptime1.time_of_day().hours(), ptime1.time_of_day().minutes(), + ptime1.time_of_day().seconds()); + boost::posix_time::time_duration time2 = boost::posix_time::time_duration( + ptime2.time_of_day().hours(), ptime2.time_of_day().minutes(), + ptime2.time_of_day().seconds()); + + if (year2 > year1 && ((ptime2.date().day_of_year() < ptime1.date().day_of_year()) || + (ptime2.date().day_of_year() == ptime1.date().day_of_year() && time2 < time1))) + { + year2 -= 1; + } + else if (year2 < year1 && ((ptime2.date().day_of_year() > ptime1.date().day_of_year()) || + (ptime2.date().day_of_year() == ptime1.date().day_of_year() && time2 > time1))) + { + year2 += 1; + } + + int64_t yr = year2 - year1; + result->set_value( yr ); + return true; + } +}; + +struct _fn_diff_month_timestamp : public base_date_diff +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + int year1 = ptime1.date().year(); + int year2 = ptime2.date().year(); + int mon1 = ptime1.date().month(); + int mon2 = ptime2.date().month(); + boost::posix_time::time_duration time1 = boost::posix_time::time_duration( + ptime1.time_of_day().hours(), ptime1.time_of_day().minutes(), + ptime1.time_of_day().seconds()); + boost::posix_time::time_duration time2 = boost::posix_time::time_duration( + ptime2.time_of_day().hours(), ptime2.time_of_day().minutes(), + ptime2.time_of_day().seconds()); + + if (year2 > year1) + { + if (ptime2.date().day() < ptime1.date().day() || (ptime2.date().day() == ptime1.date().day() && time2 < time1)) + { + mon2 -= 1; + } + + if (ptime2.date().month() < ptime1.date().month()) + { + mon2 += 12; + year2 -= 1; + } + } + else if (year2 < year1) + { + if (ptime2.date().day() > ptime1.date().day() || (ptime2.date().day() == ptime1.date().day() && time2 > time1)) + { + mon1 -= 1; + } + + if (ptime2.date().month() > ptime1.date().month()) + { + mon1 += 12; + year1 -= 1; + } + } + + int64_t mon_diff = (year2 - year1) * 12 + mon2 - mon1; + + result->set_value(mon_diff); + return true; + } +}; + +struct _fn_diff_day_timestamp : public base_date_diff +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + boost::posix_time::time_duration td_res = ptime2 - ptime1; + int total_seconds = (((td_res.hours() * 60) + td_res.minutes()) * 60) + td_res.seconds(); + int64_t days = total_seconds / (24 * 3600); + + result->set_value(days); + return true; + } +}; + +struct _fn_diff_hour_timestamp : public base_date_diff +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + boost::posix_time::time_duration td_res = ptime2 - ptime1; + result->set_value((int64_t)td_res.hours()); + return true; + } +}; + +struct _fn_diff_minute_timestamp : public base_date_diff +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + boost::posix_time::time_duration td_res = ptime2 - ptime1; + result->set_value((int64_t)((td_res.hours() * 60) + td_res.minutes())); + return true; + } +}; + +struct _fn_diff_second_timestamp : public base_date_diff +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + boost::posix_time::time_duration td_res = ptime2 - ptime1; + result->set_value((int64_t)((((td_res.hours() * 60) + td_res.minutes()) * 60) + td_res.seconds())); + return true; + } +}; + +struct _fn_add_year_to_timestamp : public base_date_add +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + new_ptime += boost::gregorian::years( val_quantity.i64() ); + new_tmstmp = std::make_tuple(new_ptime, td, flag); + result->set_value( &new_tmstmp ); + return true; + } +}; + +struct _fn_add_month_to_timestamp : public base_date_add +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + int yr, mn, dy, quant; + quant = val_quantity.i64(); + dy = new_ptime.date().day(); + + int temp = quant % 12; + mn = new_ptime.date().month() + temp; + temp = quant / 12; + yr = new_ptime.date().year() + temp; + + if (mn > 12) + { + yr += 1; + temp = mn % 12; + if (temp == 0) + { + temp = 12; + } + mn = temp; + } + else if (mn < 1) + { + yr -= 1; + if (mn == 0) + { + mn = 12; + } + else + { + mn = 12 + mn; + } + } + + if ((mn == 4 || mn == 6 || mn == 9 || mn == 11) && dy > 30) + { + dy = 30; + } + else if (mn == 2 && dy > 28) + { + if (!(yr % 4) == 0 || ((yr % 100) == 0 && !(yr % 400) == 0)) + { + dy = 28; + } + else + { + dy = 29; + } + } + + #if BOOST_DATE_TIME_POSIX_TIME_STD_CONFIG + new_ptime = boost::posix_time::ptime(boost::gregorian::date(yr, mn, dy), + boost::posix_time::hours(new_ptime.time_of_day().hours()) + + boost::posix_time::minutes(new_ptime.time_of_day().minutes()) + + boost::posix_time::seconds(new_ptime.time_of_day().seconds()) + + boost::posix_time::nanoseconds(new_ptime.time_of_day().fractional_seconds())); + #else + new_ptime = boost::posix_time::ptime(boost::gregorian::date(yr, mn, dy), + boost::posix_time::hours(new_ptime.time_of_day().hours()) + + boost::posix_time::minutes(new_ptime.time_of_day().minutes()) + + boost::posix_time::seconds(new_ptime.time_of_day().seconds()) + + boost::posix_time::microseconds(new_ptime.time_of_day().fractional_seconds())); + #endif + + new_tmstmp = std::make_tuple(new_ptime, td, flag); + result->set_value( &new_tmstmp ); + return true; + } +}; + +struct _fn_add_day_to_timestamp : public base_date_add +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + new_ptime += boost::gregorian::days( val_quantity.i64() ); + new_tmstmp = std::make_tuple(new_ptime, td, flag); + result->set_value( &new_tmstmp ); + return true; + } +}; + +struct _fn_add_hour_to_timestamp : public base_date_add +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + new_ptime += boost::posix_time::hours( val_quantity.i64() ); + new_tmstmp = std::make_tuple(new_ptime, td, flag); + result->set_value( &new_tmstmp ); + return true; + } +}; + +struct _fn_add_minute_to_timestamp : public base_date_add +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + new_ptime += boost::posix_time::minutes( val_quantity.i64() ); + new_tmstmp = std::make_tuple(new_ptime, td, flag); + result->set_value( &new_tmstmp ); + return true; + } +}; + +struct _fn_add_second_to_timestamp : public base_date_add +{ + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + param_validation(args); + + new_ptime += boost::posix_time::seconds( val_quantity.i64() ); + new_tmstmp = std::make_tuple(new_ptime, td, flag); + result->set_value( &new_tmstmp ); + return true; + } +}; + +struct _fn_utcnow : public base_function +{ + timestamp_t now_timestamp; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + int args_size = args->size(); + + if (args_size != 0) + { + throw base_s3select_exception("utcnow does not expect any parameters"); + } + + boost::posix_time::ptime now_ptime = boost::posix_time::ptime( boost::posix_time::second_clock::universal_time()); + now_timestamp = std::make_tuple(now_ptime, boost::posix_time::time_duration(0, 0, 0), false); + result->set_value( &now_timestamp ); + + return true; + } +}; + +struct _fn_between : public base_function +{ + + value res; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + int args_size = args->size(); + + + if (args_size != 3) + { + throw base_s3select_exception("between operates on 3 expressions");//TODO FATAL + } + + auto iter = args->begin(); + + base_statement* second_expr = *iter; + iter++; + base_statement* first_expr = *iter; + iter++; + base_statement* main_expr = *iter; + + value second_expr_val = second_expr->eval(); + value first_expr_val = first_expr->eval(); + value main_expr_val = main_expr->eval(); + + if ((second_expr_val.type == first_expr_val.type && first_expr_val.type == main_expr_val.type) || (second_expr_val.is_number() && first_expr_val.is_number() && main_expr_val.is_number())) + { + if((main_expr_val >= first_expr_val) && (main_expr_val <= second_expr_val)) { + result->set_value(true); + } else { + result->set_value(false); + } + } + return true; + } +}; + +struct _fn_not_between : public base_function +{ + + value res; + _fn_between between_op; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + between_op(args,result); + + if (result->get_value().is_true() == 0) { + result->set_value(true); + } else { + result->set_value(false); + } + return true; + } +}; + +static char s3select_ver[10]="41.a"; + +struct _fn_version : public base_function +{ + value val; //TODO use git to generate sha1 + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + val = &s3select_ver[0]; + *result = val; + return true; + } +}; + +struct _fn_isnull : public base_function +{ + + value res; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,1); + + auto iter = args->begin(); + base_statement* expr = *iter; + value expr_val = expr->eval(); + if ( expr_val.is_null()) { + result->set_value(true); + } else { + result->set_value(false); + } + return true; + } +}; + +struct _fn_is_not_null : public base_function +{ + value res; + _fn_isnull isnull_op; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + + isnull_op(args,result); + + if (result->get_value().is_true() == 0) + result->set_value(true); + else + result->set_value(false); + + return true; + } +}; + +struct _fn_in : public base_function +{ + + value res; + + bool operator()(bs_stmt_vec_t *args, variable *result) override + { + check_args_size(args,1); + + int args_size = static_cast(args->size()-1); + base_statement *main_expr = (*args)[args_size]; + value main_expr_val = main_expr->eval(); + args_size--; + while (args_size>=0) + { + base_statement *expr = (*args)[args_size]; + value expr_val = expr->eval(); + args_size--; + if ((expr_val.type == main_expr_val.type) || (expr_val.is_number() && main_expr_val.is_number())) + { + if (expr_val == main_expr_val) + { + result->set_value(true); + return true; + } + } + } + result->set_value(false); + return true; + } +}; + +struct _fn_like : public base_like +{ + explicit _fn_like(base_statement* esc, base_statement* like_expr) + { + auto is_constant = [&](base_statement* bs) { + if (dynamic_cast(bs) && dynamic_cast(bs)->m_var_type == variable::var_t::COLUMN_VALUE) { + return true; + } else { + return false; + } + }; + + if (is_constant(esc) && is_constant(like_expr)) { + constant_state = true; + } + + if(constant_state == true) + { + param_validation(esc, like_expr); + std::vector like_as_regex = transform(like_expr_val.str(), *escape_expr_val.str()); + compile(like_as_regex); + } + } + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,3); + + auto iter = args->begin(); + + base_statement* escape_expr = *iter; + iter++; + base_statement* like_expr = *iter; + iter++; + base_statement* main_expr = *iter; + + if (constant_state == false) + { + param_validation(escape_expr, like_expr); + std::vector like_as_regex = transform(like_expr_val.str(), *escape_expr_val.str()); + compile(like_as_regex); + } + + value main_expr_val = main_expr->eval(); + if (main_expr_val.type != value::value_En_t::STRING) + { + throw base_s3select_exception("main expression must be string"); + } + + match(main_expr_val, result); + return true; + } +}; + +struct _fn_substr : public base_function +{ + + char buff[4096];// this buffer is persist for the query life time, it use for the results per row(only for the specific function call) + //it prevent from intensive use of malloc/free (fragmentation). + //should validate result length. + //TODO may replace by std::string (dynamic) , or to replace with global allocator , in query scope. + value v_str; + value v_from; + value v_to; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + auto iter = args->begin(); + int args_size = args->size(); + + + if (args_size<2) + { + throw base_s3select_exception("substr accept 2 arguments or 3"); + } + + base_statement* str = *iter; + iter++; + base_statement* from = *iter; + base_statement* to; + + if (args_size == 3) + { + iter++; + to = *iter; + v_to = to->eval(); + if (!v_to.is_number()) + { + throw base_s3select_exception("substr third argument must be number"); //can skip row + } + } + + v_str = str->eval(); + + if(v_str.type != value::value_En_t::STRING) + { + throw base_s3select_exception("substr first argument must be string"); //can skip current row + } + + int str_length = strlen(v_str.str()); + + v_from = from->eval(); + if(!v_from.is_number()) + { + throw base_s3select_exception("substr second argument must be number"); //can skip current row + } + + int64_t f; + int64_t t; + + if (v_from.type == value::value_En_t::FLOAT) + { + f=v_from.dbl(); + } + else + { + f=v_from.i64(); + } + + if (f <= 0 && args_size == 2) + { + f = 1; + } + + if (f>str_length) + { + result->set_value(""); + return true; + } + + if (str_length>(int)sizeof(buff)) + { + throw base_s3select_exception("string too long for internal buffer"); //can skip row + } + + if (args_size == 3) + { + if (v_to.type == value::value_En_t::FLOAT) + { + t = v_to.dbl(); + } + else + { + t = v_to.i64(); + } + + if (f <= 0) + { + t = t + f - 1; + f = 1; + } + + if (t<0) + { + t = 0; + } + + if (t > str_length) + { + t = str_length; + } + + if( (str_length-(f-1)-t) <0) + {//in case the requested length is too long, reduce it to exact length. + t = str_length-(f-1); + } + + strncpy(buff, v_str.str()+f-1, t); + } + else + { + strcpy(buff, v_str.str()+f-1); + } + + result->set_value(buff); + + return true; + } +}; + +struct _fn_charlength : public base_function { + + value v_str; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,1); + + auto iter = args->begin(); + base_statement* str = *iter; + v_str = str->eval(); + if(v_str.type != value::value_En_t::STRING) { + throw base_s3select_exception("content is not string!"); + } else { + int64_t str_length = strlen(v_str.str()); + result->set_value(str_length); + return true; + } + } +}; + +struct _fn_lower : public base_function { + + std::string buff; + value v_str; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,1); + + auto iter = args->begin(); + base_statement* str = *iter; + v_str = str->eval(); + if(v_str.type != value::value_En_t::STRING) { + throw base_s3select_exception("content is not string"); + } else { + buff = v_str.str(); + boost::algorithm::to_lower(buff); + result->set_value(buff.c_str()); + return true; + } + } +}; + +struct _fn_upper : public base_function { + + std::string buff; + value v_str; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,1); + + auto iter = args->begin(); + base_statement* str = *iter; + v_str = str->eval(); + if(v_str.type != value::value_En_t::STRING) { + throw base_s3select_exception("content is not string"); + } else { + buff = v_str.str(); + boost::algorithm::to_upper(buff); + result->set_value(buff.c_str()); + return true; + } + } +}; + +struct _fn_nullif : public base_function { + + value x; + value y; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + auto iter = args->begin(); + + int args_size = args->size(); + if (args_size != 2) + { + throw base_s3select_exception("nullif accept only 2 arguments"); + } + base_statement *first = *iter; + x = first->eval(); + iter++; + base_statement *second = *iter; + y = second->eval(); + if (x.is_null() && y.is_null()) + { + result->set_null(); + return true; + } + if (x.is_null()) + { + result->set_null(); + return true; + } + if (!(x.is_number() && y.is_number())) { + if (x.type != y.type) { + *result = x; + return true; + } + } + if (x != y) { + *result = x; + } else { + result->set_null(); + } + return true; + } + }; + +struct _fn_when_then : public base_function { + + value when_value; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,2); + + auto iter = args->begin(); + + base_statement* then_expr = *iter; + iter ++; + + base_statement* when_expr = *iter; + + when_value = when_expr->eval(); + + if (when_value.is_true())//true + { + *result = then_expr->eval(); + return true; + } + + result->set_null(); + + return true; + } +}; + +struct _fn_when_value_then : public base_function { + + value when_value; + value case_value; + value then_value; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,3); + + auto iter = args->begin(); + + base_statement* then_expr = *iter; + iter++; + + base_statement* when_expr = *iter; + iter++; + + base_statement* case_expr = *iter; + + when_value = when_expr->eval(); + case_value = case_expr->eval(); + then_value = then_expr->eval(); + + if (case_value == when_value) + { + *result = then_value; + return true; + } + + result->set_null(); + return true; + } +}; + +struct _fn_case_when_else : public base_function { + + value when_then_value; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,1); + + base_statement* else_expr = *(args->begin()); + + size_t args_size = args->size() -1; + + for(int ivec=args_size;ivec>0;ivec--) + { + when_then_value = (*args)[ivec]->eval(); + + if(!when_then_value.is_null()) + { + *result = when_then_value; + return true; + } + + } + + *result = else_expr->eval(); + return true; + } +}; + +struct _fn_coalesce : public base_function +{ + + value res; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,1); + + auto iter_begin = args->begin(); + int args_size = args->size(); + while (args_size >= 1) + { + base_statement* expr = *iter_begin; + value expr_val = expr->eval(); + iter_begin++; + if ( !(expr_val.is_null())) { + *result = expr_val; + return true; + } + args_size--; + } + result->set_null(); + return true; + } +}; + +struct _fn_string : public base_function +{ + + value res; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,1); + + auto iter = args->begin(); + + base_statement* expr = *iter; + value expr_val = expr->eval(); + result->set_value((expr_val.to_string())); + return true; + } +}; + +struct _fn_to_bool : public base_function +{ + + value func_arg; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,1); + + int64_t i=0; + func_arg = (*args->begin())->eval(); + + if (func_arg.type == value::value_En_t::FLOAT) + { + i = func_arg.dbl(); + } + else if (func_arg.type == value::value_En_t::DECIMAL || func_arg.type == value::value_En_t::BOOL) + { + i = func_arg.i64(); + } + else + { + i = 0; + } + if (i == 0) + { + result->set_value(false); + } + else + { + result->set_value(true); + } + return true; + } +}; + +struct _fn_trim : public base_function { + + std::string input_string; + value v_remove; + value v_input; + + _fn_trim() + { + v_remove = " "; + } + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,1); + + auto iter = args->begin(); + int args_size = args->size(); + base_statement* str = *iter; + v_input = str->eval(); + if(v_input.type != value::value_En_t::STRING) { + throw base_s3select_exception("content is not string"); + } + input_string = v_input.str(); + if (args_size == 2) { + iter++; + base_statement* next = *iter; + v_remove = next->eval(); + } + boost::trim_right_if(input_string,boost::is_any_of(v_remove.str())); + boost::trim_left_if(input_string,boost::is_any_of(v_remove.str())); + result->set_value(input_string.c_str()); + return true; + } +}; + +struct _fn_leading : public base_function { + + std::string input_string; + value v_remove; + value v_input; + + _fn_leading() + { + v_remove = " "; + } + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,1); + + auto iter = args->begin(); + int args_size = args->size(); + base_statement* str = *iter; + v_input = str->eval(); + if(v_input.type != value::value_En_t::STRING) { + throw base_s3select_exception("content is not string"); + } + input_string = v_input.str(); + if (args_size == 2) { + iter++; + base_statement* next = *iter; + v_remove = next->eval(); + } + boost::trim_left_if(input_string,boost::is_any_of(v_remove.str())); + result->set_value(input_string.c_str()); + return true; + } +}; + +struct _fn_trailing : public base_function { + + std::string input_string; + value v_remove; + value v_input; + + _fn_trailing() + { + v_remove = " "; + } + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + check_args_size(args,1); + + auto iter = args->begin(); + int args_size = args->size(); + base_statement* str = *iter; + v_input = str->eval(); + if(v_input.type != value::value_En_t::STRING) { + throw base_s3select_exception("content is not string"); + } + input_string = v_input.str(); + if (args_size == 2) { + iter++; + base_statement* next = *iter; + v_remove = next->eval(); + } + boost::trim_right_if(input_string,boost::is_any_of(v_remove.str())); + result->set_value(input_string.c_str()); + return true; + } +}; + +struct _fn_cast_to_decimal : public base_function { + + int32_t precision=-1; + int32_t scale=-1; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + //cast(expr as decimal(x,y)) + check_args_size(args,2); + + base_statement* expr = (*args)[1]; + //expr_val should be float or integer + //dynamic value for the decimal operator to get the precision and scale + + _fn_to_float to_float; + bs_stmt_vec_t args_vec; + args_vec.push_back(expr); + to_float(&args_vec,result); + + if (precision == -1 || scale == -1){ + base_statement* decimal_expr = (*args)[0]; + decimal_expr->eval().get_precision_scale(&precision,&scale); + } + + result->set_precision_scale(&precision,&scale); + + return true; + } +}; + +struct _fn_decimal_operator : public base_function { + + int32_t precision=-1; + int32_t scale=-1; + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + //decimal(x,y) operator + check_args_size(args,2); + + auto iter = args->begin(); + base_statement* expr_precision = *iter; + value expr_precision_val = expr_precision->eval(); + + iter++; + base_statement* expr_scale = *iter; + value expr_scale_val = expr_scale->eval(); + + precision = expr_precision_val.i64(); + scale = expr_scale_val.i64(); + + result->set_precision_scale(&precision,&scale); + + return true; + } +}; + +struct _fn_engine_version : public base_function { + + const char* version_description =R"(PR #137 : +the change handle the use cases where the JSON input starts with an anonymous array/object this may cause wrong search result per the user request(SQL statement) + +handle the use-case where the user requests a json-key-path that may point to a non-discrete value. i.e. array or an object. +editorial changes. + +fix for CSV flow, in the case of a "broken row" (upon processing stream of data) + +null results upon aggregation functions on an empty group (no match for where clause). +)"; + + + _fn_engine_version() + { + aggregate = true; + } + + bool operator()(bs_stmt_vec_t* args, variable* result) override + { + result->set_value(version_description); + return true; + } +}; + +base_function* s3select_functions::create(std::string_view fn_name,const bs_stmt_vec_t &arguments) +{ + const FunctionLibrary::const_iterator iter = m_functions_library.find(fn_name.data()); + + if (iter == m_functions_library.end()) + { + std::string msg; + msg = std::string{fn_name} + " " + " function not found"; + throw base_s3select_exception(msg, base_s3select_exception::s3select_exp_en_t::FATAL); + } + + switch (iter->second) + { + case s3select_func_En_t::ADD: + return S3SELECT_NEW(this,_fn_add); + break; + + case s3select_func_En_t::SUM: + return S3SELECT_NEW(this,_fn_sum); + break; + + case s3select_func_En_t::COUNT: + return S3SELECT_NEW(this,_fn_count); + break; + + case s3select_func_En_t::MIN: + return S3SELECT_NEW(this,_fn_min); + break; + + case s3select_func_En_t::MAX: + return S3SELECT_NEW(this,_fn_max); + break; + + case s3select_func_En_t::TO_INT: + return S3SELECT_NEW(this,_fn_to_int); + break; + + case s3select_func_En_t::TO_FLOAT: + return S3SELECT_NEW(this,_fn_to_float); + break; + + case s3select_func_En_t::SUBSTR: + return S3SELECT_NEW(this,_fn_substr); + break; + + case s3select_func_En_t::TO_TIMESTAMP: + return S3SELECT_NEW(this,_fn_to_timestamp); + break; + + case s3select_func_En_t::TO_STRING_CONSTANT: + return S3SELECT_NEW(this,_fn_to_string_constant); + break; + + case s3select_func_En_t::TO_STRING_DYNAMIC: + return S3SELECT_NEW(this,_fn_to_string_dynamic); + break; + + case s3select_func_En_t::TO_BOOL: + return S3SELECT_NEW(this,_fn_to_bool); + break; + + case s3select_func_En_t::EXTRACT_YEAR: + return S3SELECT_NEW(this,_fn_extract_year_from_timestamp); + break; + + case s3select_func_En_t::EXTRACT_MONTH: + return S3SELECT_NEW(this,_fn_extract_month_from_timestamp); + break; + + case s3select_func_En_t::EXTRACT_DAY: + return S3SELECT_NEW(this,_fn_extract_day_from_timestamp); + break; + + case s3select_func_En_t::EXTRACT_HOUR: + return S3SELECT_NEW(this,_fn_extract_hour_from_timestamp); + break; + + case s3select_func_En_t::EXTRACT_MINUTE: + return S3SELECT_NEW(this,_fn_extract_minute_from_timestamp); + break; + + case s3select_func_En_t::EXTRACT_SECOND: + return S3SELECT_NEW(this,_fn_extract_second_from_timestamp); + break; + + case s3select_func_En_t::EXTRACT_WEEK: + return S3SELECT_NEW(this,_fn_extract_week_from_timestamp); + break; + + case s3select_func_En_t::EXTRACT_TIMEZONE_HOUR: + return S3SELECT_NEW(this,_fn_extract_tz_hour_from_timestamp); + break; + + case s3select_func_En_t::EXTRACT_TIMEZONE_MINUTE: + return S3SELECT_NEW(this,_fn_extract_tz_minute_from_timestamp); + break; + + case s3select_func_En_t::DATE_ADD_YEAR: + return S3SELECT_NEW(this,_fn_add_year_to_timestamp); + break; + + case s3select_func_En_t::DATE_ADD_MONTH: + return S3SELECT_NEW(this,_fn_add_month_to_timestamp); + break; + + case s3select_func_En_t::DATE_ADD_DAY: + return S3SELECT_NEW(this,_fn_add_day_to_timestamp); + break; + + case s3select_func_En_t::DATE_ADD_HOUR: + return S3SELECT_NEW(this,_fn_add_hour_to_timestamp); + break; + + case s3select_func_En_t::DATE_ADD_MINUTE: + return S3SELECT_NEW(this,_fn_add_minute_to_timestamp); + break; + + case s3select_func_En_t::DATE_ADD_SECOND: + return S3SELECT_NEW(this,_fn_add_second_to_timestamp); + break; + + case s3select_func_En_t::DATE_DIFF_YEAR: + return S3SELECT_NEW(this,_fn_diff_year_timestamp); + break; + + case s3select_func_En_t::DATE_DIFF_MONTH: + return S3SELECT_NEW(this,_fn_diff_month_timestamp); + break; + + case s3select_func_En_t::DATE_DIFF_DAY: + return S3SELECT_NEW(this,_fn_diff_day_timestamp); + break; + + case s3select_func_En_t::DATE_DIFF_HOUR: + return S3SELECT_NEW(this,_fn_diff_hour_timestamp); + break; + + case s3select_func_En_t::DATE_DIFF_MINUTE: + return S3SELECT_NEW(this,_fn_diff_minute_timestamp); + break; + + case s3select_func_En_t::DATE_DIFF_SECOND: + return S3SELECT_NEW(this,_fn_diff_second_timestamp); + break; + + case s3select_func_En_t::UTCNOW: + return S3SELECT_NEW(this,_fn_utcnow); + break; + + case s3select_func_En_t::AVG: + return S3SELECT_NEW(this,_fn_avg); + break; + + case s3select_func_En_t::LOWER: + return S3SELECT_NEW(this,_fn_lower); + break; + + case s3select_func_En_t::UPPER: + return S3SELECT_NEW(this,_fn_upper); + break; + + case s3select_func_En_t::LENGTH: + return S3SELECT_NEW(this,_fn_charlength); + break; + + case s3select_func_En_t::BETWEEN: + return S3SELECT_NEW(this,_fn_between); + break; + + case s3select_func_En_t::NOT_BETWEEN: + return S3SELECT_NEW(this,_fn_not_between); + break; + + case s3select_func_En_t::IS_NULL: + return S3SELECT_NEW(this,_fn_isnull); + break; + + case s3select_func_En_t::IS_NOT_NULL: + return S3SELECT_NEW(this,_fn_is_not_null); + break; + + case s3select_func_En_t::IN: + return S3SELECT_NEW(this,_fn_in); + break; + + case s3select_func_En_t::VERSION: + return S3SELECT_NEW(this,_fn_version); + break; + + case s3select_func_En_t::NULLIF: + return S3SELECT_NEW(this,_fn_nullif); + break; + + case s3select_func_En_t::LIKE: + return S3SELECT_NEW(this,_fn_like,arguments[0],arguments[1]); + break; + + case s3select_func_En_t::COALESCE: + return S3SELECT_NEW(this,_fn_coalesce); + break; + + case s3select_func_En_t::WHEN_THEN: + return S3SELECT_NEW(this,_fn_when_then); + break; + + case s3select_func_En_t::WHEN_VALUE_THEN: + return S3SELECT_NEW(this,_fn_when_value_then); + break; + + case s3select_func_En_t::CASE_WHEN_ELSE: + return S3SELECT_NEW(this,_fn_case_when_else); + break; + + case s3select_func_En_t::STRING: + return S3SELECT_NEW(this,_fn_string); + break; + + case s3select_func_En_t::TRIM: + return S3SELECT_NEW(this,_fn_trim); + break; + + case s3select_func_En_t::LEADING: + return S3SELECT_NEW(this,_fn_leading); + break; + + case s3select_func_En_t::TRAILING: + return S3SELECT_NEW(this,_fn_trailing); + break; + + case s3select_func_En_t::DECIMAL_OPERATOR: + return S3SELECT_NEW(this,_fn_decimal_operator); + break; + + case s3select_func_En_t::CAST_TO_DECIMAL: + return S3SELECT_NEW(this,_fn_cast_to_decimal); + break; + + case s3select_func_En_t::ENGINE_VERSION: + return S3SELECT_NEW(this,_fn_engine_version); + break; + + default: + throw base_s3select_exception("internal error while resolving function-name"); + break; + } +} + +bool base_statement::is_function() const +{ + if (dynamic_cast<__function*>(const_cast(this))) + { + return true; + } + else + { + return false; + } +} + +const base_statement* base_statement::get_aggregate() const +{ + //search for aggregation function in AST + const base_statement* res = 0; + + if (is_aggregate()) + { + return this; + } + + if (left() && (res=left()->get_aggregate())!=0) + { + return res; + } + + if (right() && (res=right()->get_aggregate())!=0) + { + return res; + } + + if (is_function()) + { + for (auto i : dynamic_cast<__function*>(const_cast(this))->get_arguments()) + { + const base_statement* b=i->get_aggregate(); + if (b) + { + return b; + } + } + } + return 0; +} + +bool base_statement::is_column_reference() const +{ + if(is_column()) + return true; + + if(left()) + return left()->is_column_reference(); + + if(right()) + return right()->is_column_reference(); + + if(is_function()) + { + for(auto a : dynamic_cast<__function*>(const_cast(this))->get_arguments()) + { + if(a->is_column_reference()) + return true; + } + } + + return false; +} + +bool base_statement::is_nested_aggregate(bool &aggr_flow) const +{ + if (is_aggregate()) + { + aggr_flow=true; + for (auto& i : dynamic_cast<__function*>(const_cast(this))->get_arguments()) + { + if (i->get_aggregate() != nullptr) + { + return true; + } + } + } + + if(left() && left()->is_nested_aggregate(aggr_flow)) + return true; + + if(right() && right()->is_nested_aggregate(aggr_flow)) + return true; + + if (is_function()) + { + for (auto& i : dynamic_cast<__function*>(const_cast(this))->get_arguments()) + { + if (i->get_aggregate() != nullptr) + { + return i->is_nested_aggregate(aggr_flow); + } + } + } + + return false; +} + +bool base_statement::is_statement_contain_star_operation() const +{ + if(is_star_operation()) + return true; + + if(left()) + return left()->is_statement_contain_star_operation(); + + if(right()) + return right()->is_statement_contain_star_operation(); + + if(is_function()) + { + for(auto a : dynamic_cast<__function*>(const_cast(this))->get_arguments()) + { + if(a->is_star_operation()) + return true; + } + } + + return false; +} + +bool base_statement::mark_aggreagtion_subtree_to_execute() +{//purpase:: set aggregation subtree as runnable. + //the function search for aggregation function, and mark its subtree {skip = false} + if (is_aggregate()) + set_skip_non_aggregate(false); + + if (left()) + left()->mark_aggreagtion_subtree_to_execute(); + + if(right()) + right()->mark_aggreagtion_subtree_to_execute(); + + if (is_function()) + { + for (auto& i : dynamic_cast<__function*>(this)->get_arguments()) + { + i->mark_aggreagtion_subtree_to_execute(); + } + } + + return true; +} + +void base_statement::push_for_cleanup(std::set& ast_nodes_to_delete)//semantic loop on each projection +{ +//placement new is releasing the main-buffer in which all AST nodes +//allocating from it. meaning no calls to destructors. +//the purpose of this routine is to traverse the AST in map all nodes for cleanup. +//the cleanup method will trigger all destructors. + + ast_nodes_to_delete.insert(this); + + if (left()) + left()->push_for_cleanup(ast_nodes_to_delete); + + if(right()) + right()->push_for_cleanup(ast_nodes_to_delete); + + if (is_function()) + { + for (auto& i : dynamic_cast<__function*>(this)->get_arguments()) + { + i->push_for_cleanup(ast_nodes_to_delete); + } + } +} + +#ifdef _ARROW_EXIST +void base_statement::extract_columns(parquet_file_parser::column_pos_t &cols,const uint16_t max_columns) +{// purpose: to extract all column-ids from query + if(is_column()) //column reference or column position + {variable* v = dynamic_cast(this); + if(dynamic_cast(this)->m_var_type == variable::var_t::VARIABLE_NAME) + {//column reference + + if (v->getScratchArea()->get_column_pos(v->get_name().c_str())>=0) + {//column belong to schema + cols.insert( v->getScratchArea()->get_column_pos(v->get_name().c_str() )); + }else { + if(v->getAlias()->search_alias(v->get_name())) + {//column is an alias --> extract columns belong to alias + //TODO cyclic alias to resolve + v->getAlias()->search_alias(v->get_name())->extract_columns(cols,max_columns); + }else { + //column is not alias --> error + std::stringstream ss; + ss << "column " + v->get_name() + " is not part of schema nor an alias"; + throw base_s3select_exception(ss.str(),base_s3select_exception::s3select_exp_en_t::FATAL); + } + } + }else if(v->m_var_type == variable::var_t::STAR_OPERATION) + { + for(uint16_t i=0;iget_column_pos()>=max_columns) + { + std::stringstream ss; + ss << "column " + std::to_string( v->get_column_pos()+1 ) + " exceed max number of columns"; + throw base_s3select_exception(ss.str(),base_s3select_exception::s3select_exp_en_t::FATAL); + } + cols.insert(v->get_column_pos());//push column positions + } + }else if(is_function()) + { + __function* f = (dynamic_cast<__function*>(this)); + bs_stmt_vec_t args = f->get_arguments(); + for (auto prm : args) + {//traverse function args + prm->extract_columns(cols,max_columns); + } + + } + + //keep traversing down the AST + if(left()) + left()->extract_columns(cols,max_columns); + + if(right()) + right()->extract_columns(cols,max_columns); +} +#endif //_ARROW_EXIST + +} //namespace s3selectEngine + +#endif diff --git a/src/s3select/include/s3select_json_parser.h b/src/s3select/include/s3select_json_parser.h new file mode 100644 index 000000000..aa06163f5 --- /dev/null +++ b/src/s3select/include/s3select_json_parser.h @@ -0,0 +1,829 @@ +#ifndef S3SELECT_JSON_PARSER_H +#define S3SELECT_JSON_PARSER_H + +//TODO add __FILE__ __LINE__ message +#define RAPIDJSON_ASSERT(x) s3select_json_parse_error(x) +bool s3select_json_parse_error(bool b); +bool s3select_json_parse_error(const char* error); + +#include "rapidjson/reader.h" +#include "rapidjson/writer.h" +#include "rapidjson/filereadstream.h" +#include "rapidjson/filewritestream.h" +#include "rapidjson/error/en.h" +#include "rapidjson/document.h" +#include +#include +#include +#include +#include +#include +#include +#include "s3select_oper.h"//class value +#include + +#define JSON_PROCESSING_LIMIT_REACHED 2 + +//TODO missing s3selectEngine namespace + +bool s3select_json_parse_error(bool b) +{ + if(!b) + { + const char* error_str = "failure while processing JSON document"; + throw s3selectEngine::base_s3select_exception(error_str, s3selectEngine::base_s3select_exception::s3select_exp_en_t::FATAL); + } + return false; +} + +bool s3select_json_parse_error(const char* error) +{ + if(!error) + { + const char* error_str = "failure while processing JSON document"; + throw s3selectEngine::base_s3select_exception(error_str, s3selectEngine::base_s3select_exception::s3select_exp_en_t::FATAL); + } + return false; +} + +static auto iequal_predicate = [](std::string& it1, std::string& it2) + { + return boost::iequals(it1,it2); + }; + + +class ChunksStreamer : public rapidjson::MemoryStream { + + //purpose: adding a method `resetBuffer` that enables to parse chunk after chunk + //per each new chunk it reset internal data members + public: + + std::string internal_buffer; + const Ch* next_src_; + size_t next_size_; + + ChunksStreamer():rapidjson::MemoryStream(0,0){next_src_=0;next_size_=0;} + + ChunksStreamer(const Ch *src, size_t size) : rapidjson::MemoryStream(src,size){next_src_=0;next_size_=0;} + + //override Peek methode + Ch Peek() //const + { + if(RAPIDJSON_UNLIKELY(src_ == end_)) + { + if(next_src_)//next chunk exist + {//upon reaching to end of current buffer, to switch with next one + src_ = next_src_; + begin_ = src_; + size_ =next_size_; + end_ = src_ + size_; + + next_src_ = 0; + next_size_ = 0; + return *src_; + } + else return 0; + } + return *src_; + } + + //override Take method + Ch Take() + { + if(RAPIDJSON_UNLIKELY(src_ == end_)) + { + if(next_src_)//next chunk exist + {//upon reaching to end of current buffer, to switch with next one + src_ = next_src_; + begin_ = src_; + size_ = next_size_; + end_ = src_ + size_; + + next_src_ = 0; + next_size_ = 0; + return *src_; + } + else return 0; + } + return *src_++; + } + + void resetBuffer(char* buff, size_t size) + { + if(!src_) + {//first time calling + begin_ = buff; + src_ = buff; + size_ = size; + end_= src_ + size_; + return; + } + + if(!next_src_) + {//save the next-chunk that will be used upon parser reaches end of current buffer + next_src_ = buff; + next_size_ = size; + } + else + {// should not happen + std::cout << "can not replace pointers!!!" << std::endl;//TODO exception + return; + } + } + + void saveRemainingBytes() + {//this routine called per each new chunk + //save the remaining bytes, before its overriden by the next-chunk. + size_t copy_left_sz = getBytesLeft(); //should be very small + internal_buffer.assign(src_,copy_left_sz); + + src_ = internal_buffer.data(); + begin_ = src_; + size_ = copy_left_sz; + end_= src_ + copy_left_sz; + } + + size_t getBytesLeft() { return end_ - src_; } + +}; + +enum class row_state +{ + NA, + OBJECT_START_ROW, + ARRAY_START_ROW +}; + +class json_variable_access { +//purpose: a state-machine for json-variables. +//upon the syntax-parser accepts a variable (projection / where-clause) it create this object. +//this object get events (key,start-array ... etc) as the JSON reader scans the input, +//these events are advancing the states until it reaches to the last one, result with pushing value into scratch-area. + +private: + +// to set the following. +std::vector* from_clause; +std::vector* key_path; +//m_current_depth : trace the depth of the reader, including "anonymous"(meaning JSON may begin with array that has no name attached to it) +int* m_current_depth; +//m_current_depth_non_anonymous : trace the depth of the reader, NOT including "anonymous" array/object. +//upon user request the following _1.a[12].b, the key-name{a} may reside on some array with no-name, +//the state machine that search for a[12].b, does NOT contain states for that "anonymous" array, +//thus, the state-machine will fail to trace the user request for that specific key.path +int* m_current_depth_non_anonymous; +std::function * m_exact_match_cb; +// a state number : (_1).a.b.c[ 17 ].d.e (a.b)=1 (c[)=2 (17)=3 (.d.e)=4 +size_t current_state;//contain the current state of the state machine for searching-expression (each JSON variable in SQL statement has a searching expression) +int nested_array_level;//in the case of array within array it contain the nesting level +int m_json_index; +s3selectEngine::value v_null; +size_t m_from_clause_size; + +struct variable_state_md { + std::vector required_path;//set by the syntax-parser. in the case of array its empty + int required_array_entry_no;//set by the syntax-parser, in the case of object-key its -1. + int actual_array_entry_no;//upon scanning the JSON input, this value increased by 1 each new element + int required_depth_size;// depth of state, is aggregated (include the previous). it's the summary of key-elements and array-operator's. + int required_key_depth_size;// same as the above, not including the array-operators. + int last_array_start;//it actually mark the nested-array-level (array within array) +}; + +std::vector variable_states;//vector is populated upon syntax phase. + +public: + +json_variable_access():from_clause(nullptr),key_path(nullptr),m_current_depth(nullptr),m_current_depth_non_anonymous(nullptr),m_exact_match_cb(nullptr),current_state(-1),nested_array_level(0),m_json_index(-1),v_null(nullptr),m_from_clause_size(0) +{} + +void init( + std::vector* reader_from_clause, + std::vector* reader_key_path, + int* reader_current_depth, + int* reader_m_current_depth_non_anonymous, + std::function * excat_match_cb, + int json_index) +{//this routine should be called before scanning the JSON input + from_clause = reader_from_clause; + key_path = reader_key_path; + m_exact_match_cb = excat_match_cb; + //m_current_depth and m_current_depth_non_anonymous points to the JSON reader variables. + m_current_depth = reader_current_depth; + m_current_depth_non_anonymous = reader_m_current_depth_non_anonymous; + current_state = 0; + m_json_index = json_index; + m_from_clause_size = from_clause->size(); + + //loop on variable_states compute required_depth_size +} + +void clear() +{ + variable_states.clear(); +} + +void debug_info() +{ + auto f = [](std::vector x){std::string res;for(auto i : x){res.append(i);res.append(".");};return res;}; + + std::cout << "m_current_depth=" << *m_current_depth << " required_depth_size= " << reader_position_state().required_depth_size << " "; + std::cout << "variable_states[ current_state ].last_array_start=" << reader_position_state().last_array_start; + std::cout << " current_state=" << current_state << " key_path=" << f(*key_path) << std::endl; +} +#define DBG {std::cout << "event=" << __FUNCTION__ << std::endl; debug_info();} +#undef DBG +#define DBG + +void compile_state_machine() +{ + size_t aggregated_required_depth_size = 0; + size_t aggregated_required_key_depth_size = 0; + for(auto& v : variable_states) + { + if(v.required_path.size()) + { + v.required_depth_size = aggregated_required_depth_size + v.required_path.size();//depth size in general, including array + v.required_key_depth_size = aggregated_required_key_depth_size;//depth include ONLY key parts + aggregated_required_key_depth_size += v.required_path.size(); + } + else + { + v.required_depth_size = aggregated_required_depth_size + 1; + } + aggregated_required_depth_size = v.required_depth_size; + } +} + +void push_variable_state(std::vector& required_path,int required_array_entry_no) +{ + struct variable_state_md new_state={required_path,required_array_entry_no,-1,0,0,-1}; + variable_states.push_back(new_state); + //TODO required_path.size() > 0 or required_path,required_array_entry_no>=0 : not both + compile_state_machine(); +} + +struct variable_state_md& reader_position_state() +{ + if (current_state>=variable_states.size()) + {//in case the state-machine reached a "dead-end", should push a null for that JSON variable + //going back one state. + (*m_exact_match_cb)(v_null,m_json_index); + decrease_current_state(); + } + + return variable_states[ current_state ]; +} + +bool is_array_state() +{ + return (reader_position_state().required_array_entry_no >= 0); +} + +bool is_reader_located_on_required_depth() +{ + //upon user request `select _1.a.b from s3object[*].c.d;` the c.d sould "cut off" from m_current_depth_non_anonymous + //to get the correct depth of the state-machine + return ((*m_current_depth_non_anonymous - static_cast(m_from_clause_size)) == reader_position_state().required_depth_size); +} + +bool is_on_final_state() +{ + return (current_state == (variable_states.size())); + //&& *m_current_depth == variable_states[ current_state -1 ].required_depth_size); + + // NOTE: by ignoring the current-depth, the matcher gives precedence to key-path match, while not ignoring accessing using array + // meaning, upon requeting a.b[12] , the [12] is not ignored, the a<-->b distance should be calculated as key distance, i.e. not counting array/object with *no keys*. + // user may request 'select _1.phonearray.num'; the reader will traverse `num` exist in `phonearray` +} + +bool is_reader_passed_required_array_entry() +{ + return (reader_position_state().actual_array_entry_no > reader_position_state().required_array_entry_no); +} + +bool is_reader_located_on_array_according_to_current_state() +{ + return (nested_array_level == reader_position_state().last_array_start); +} + +bool is_reader_position_depth_lower_than_required() +{ + //upon user request `select _1.a.b from s3object[*].c.d;` the c.d sould "cut off" from m_current_depth_non_anonymous + //to have the correct depth of the state-machine + return ((*m_current_depth_non_anonymous - static_cast(m_from_clause_size)) < reader_position_state().required_depth_size); +} + +bool is_reader_located_on_array_entry_according_to_current_state() +{ + return (reader_position_state().actual_array_entry_no == reader_position_state().required_array_entry_no); +} + +void increase_current_state() +{ + DBG + + if(current_state >= variable_states.size()) return; + current_state ++; +} + +void decrease_current_state() +{ + DBG + + if(current_state == 0) return; + current_state --; +} + +void key() +{ + DBG + + if(reader_position_state().required_path.size())//current state is a key + { + std::vector* filter = &reader_position_state().required_path; + auto required_key_depth_size = reader_position_state().required_key_depth_size; + if(std::equal((*key_path).begin()+(*from_clause).size() + required_key_depth_size, //key-path-start-point + from-clause-depth-size + key-depth + (*key_path).end(), + (*filter).begin(), + (*filter).end(), iequal_predicate)) + { + increase_current_state();//key match according to user request, advancing to the next state + } + } +} + +void increase_array_index() +{ + if(is_reader_located_on_required_depth() && is_array_state())//TODO && is_array_state(). is it necessary? what about nesting level + { + DBG + reader_position_state().actual_array_entry_no++; + } +} + +void dec_key() +{ + DBG + + if(is_reader_position_depth_lower_than_required()) + {//actual key-path is shorter than required + decrease_current_state(); + return; + } + + if(is_reader_located_on_required_depth() && is_array_state())//TODO && is_array_state(). is it necessary?; json_element_state.back() != ARRAY_STATE) + {//key-path-depth matches, and it an array + if(is_reader_located_on_array_entry_according_to_current_state()) + {//we reached the required array entry + increase_current_state(); + } + else if(is_reader_passed_required_array_entry()) + {//had passed the array entry + decrease_current_state(); + } + } +} + +void new_value(s3selectEngine::value& v) +{ + DBG + + if(is_on_final_state()) + { + (*m_exact_match_cb)(v, m_json_index); + decrease_current_state();//the state-machine reached its final destination, "going back" one state, upon another match condition the matched value will override the last one + } + increase_array_index();//next-value in array +} + +void end_object() +{ + increase_array_index(); +} + +void end_array() +{ + //init the correct array index + DBG + + if(is_reader_located_on_array_according_to_current_state()) + {//it reached end of required array + reader_position_state().actual_array_entry_no = 0; + decrease_current_state(); + } + nested_array_level --; + + // option 1. move out of one array, and enter a new one; option-2. enter an object + increase_array_index();//increase only upon correct array //TODO move it into dec_key()? + dec_key(); +} + +void start_array() +{ + DBG + + nested_array_level++; + if(is_reader_located_on_required_depth()) + {//reader entered an array required by JSON variable + reader_position_state().actual_array_entry_no = 0; + reader_position_state().last_array_start = nested_array_level; + + if(is_reader_located_on_array_entry_according_to_current_state()) + {//we reached the required array entry -> next state + increase_current_state(); + } + } +} + +}; //json_variable_access + +class json_variables_operations { + + public: + + std::vector> json_statement_variables{}; + + void init(std::vector>& jsv, //TODO init upon construction? + std::vector * from_clause, + std::vector* key_path, + int* current_depth, + int* current_depth_non_anonymous, + std::function * exact_match_cb) + { + json_statement_variables = jsv; + int i=0;//the index per JSON variable + for(auto& var : json_statement_variables) + { + var.first->init(from_clause, + key_path, + current_depth, + current_depth_non_anonymous, + exact_match_cb,i++); + } + } + + void start_array() + { + for(auto& j : json_statement_variables) + { + j.first->start_array(); + } + } + void end_array() + { + for(auto& j : json_statement_variables) + { + j.first->end_array(); + } + } + void dec_key() + { + for(auto& j : json_statement_variables) + { + j.first->dec_key(); + } + } + void end_object() + { + for(auto& j : json_statement_variables) + { + j.first->end_object(); + } + } + void key() + { + for(auto& j : json_statement_variables) + { + j.first->key(); + } + } + void new_value(s3selectEngine::value& v) + { + for(auto& j : json_statement_variables) + { + j.first->new_value(v); + } + } +};//json_variables_operations + +class JsonParserHandler : public rapidjson::BaseReaderHandler, JsonParserHandler> { + + public: + + typedef enum {OBJECT_STATE,ARRAY_STATE} en_json_elm_state_t; + typedef std::pair, s3selectEngine::value> json_key_value_t; + + row_state state = row_state::NA; + std::function m_exact_match_cb; + std::function m_star_operation_cb; + + json_variables_operations variable_match_operations; + int row_count{}; + std::vector from_clause{}; + bool prefix_match{}; + s3selectEngine::value var_value; + ChunksStreamer stream_buffer; + bool init_buffer_stream; + rapidjson::Reader reader; + std::vector json_element_state; + std::vector key_path; + std::function m_s3select_processing; + int m_start_row_depth; + int m_current_depth; + int m_current_depth_non_anonymous; + bool m_star_operation; + int m_sql_processing_status; + bool m_fatal_initialization_ind = false; + std::string m_fatal_initialization_description; + + JsonParserHandler() : prefix_match(false),init_buffer_stream(false),m_start_row_depth(-1),m_current_depth(0),m_current_depth_non_anonymous(0),m_star_operation(false),m_sql_processing_status(0) + { + } + + std::string get_key_path() + {//for debug + std::string res; + for(const auto & i: key_path) + { + res.append(i); + res.append(std::string("/")); + } + return res; + } + + void dec_key_path() + { + if (json_element_state.size()) { + if(json_element_state.back() != ARRAY_STATE) { + if(key_path.size() != 0) { + key_path.pop_back(); + } + } + } + + variable_match_operations.dec_key(); + + //TODO m_current_depth-- should done here + if(m_start_row_depth > m_current_depth) + { + prefix_match = false; + } else + if (prefix_match) { + if (state == row_state::ARRAY_START_ROW && m_start_row_depth == m_current_depth) { + m_sql_processing_status = m_s3select_processing(); //per each element in array + ++row_count; + } + } + } + + void push_new_key_value(s3selectEngine::value& v) + { + if (m_star_operation && prefix_match) + { + json_key_value_t key_value(key_path,v); + m_star_operation_cb(key_value); + } + if (prefix_match) + variable_match_operations.new_value(v); + + dec_key_path(); + } + + bool Null() { + var_value.setnull(); + push_new_key_value(var_value); + return true; } + + bool Bool(bool b) { + var_value = b; + push_new_key_value(var_value); + return true; } + + bool Int(int i) { + var_value = i; + push_new_key_value(var_value); + return true; } + + bool Uint(unsigned u) { + var_value = u; + push_new_key_value(var_value); + return true; } + + bool Int64(int64_t i) { + var_value = i; + push_new_key_value(var_value); + return true; } + + bool Uint64(uint64_t u) { + var_value = u; + push_new_key_value(var_value); + return true; } + + bool Double(double d) { + var_value = d; + push_new_key_value(var_value); + return true; } + + bool String(const char* str, rapidjson::SizeType length, bool copy) { + //TODO use copy + var_value = str; + push_new_key_value(var_value); + return true; + } + + bool Key(const char* str, rapidjson::SizeType length, bool copy) { + key_path.push_back(std::string(str)); + + if(!m_current_depth_non_anonymous){ + //important: upon a key and m_current_depth_non_anonymous is ZERO + //it should advance by 1. to get the correct current depth(for non anonymous counter). + m_current_depth_non_anonymous++; + } + + if(from_clause.size() == 0 || std::equal(key_path.begin(), key_path.end(), from_clause.begin(), from_clause.end(), iequal_predicate)) { + prefix_match = true; + } + + variable_match_operations.key(); + + return true; + } + + bool is_already_row_started() + { + if(state == row_state::OBJECT_START_ROW || state == row_state::ARRAY_START_ROW) + return true; + else + return false; + } + + bool StartObject() { + json_element_state.push_back(OBJECT_STATE); + m_current_depth++; + if(key_path.size()){ + //advancing the counter only upon there is a key. + m_current_depth_non_anonymous++; + } + + if (prefix_match && !is_already_row_started()) { + state = row_state::OBJECT_START_ROW; + m_start_row_depth = m_current_depth; + ++row_count; + } + + return true; + } + + bool EndObject(rapidjson::SizeType memberCount) { + json_element_state.pop_back(); + m_current_depth --; + m_current_depth_non_anonymous --; + + variable_match_operations.end_object(); + + dec_key_path(); + if (state == row_state::OBJECT_START_ROW && (m_start_row_depth > m_current_depth)) { + m_sql_processing_status = m_s3select_processing(); + state = row_state::NA; + } + return true; + } + + bool StartArray() { + json_element_state.push_back(ARRAY_STATE); + m_current_depth++; + if(key_path.size()){ + //advancing the counter only upon there is a key. + m_current_depth_non_anonymous++; + } + + if (prefix_match && !is_already_row_started()) { + state = row_state::ARRAY_START_ROW; + m_start_row_depth = m_current_depth; + } + + variable_match_operations.start_array(); + + return true; + } + + bool EndArray(rapidjson::SizeType elementCount) { + json_element_state.pop_back(); + m_current_depth--; + m_current_depth_non_anonymous--; + + dec_key_path(); + + if (state == row_state::ARRAY_START_ROW && (m_start_row_depth > m_current_depth)) { + state = row_state::NA; + } + + variable_match_operations.end_array(); + + return true; + } + + void set_prefix_match(std::vector& requested_prefix_match) + {//purpose: set the filter according to SQL statement(from clause) + from_clause = requested_prefix_match; + if(from_clause.size() ==0) + { + prefix_match = true; + m_start_row_depth = m_current_depth; + } + } + + void set_statement_json_variables(std::vector>& statement_variables) + {//purpose: set the json variables extracted from the SQL statement(projection columns, predicates columns) + variable_match_operations.init( + statement_variables, + &from_clause, + &key_path, + &m_current_depth, + &m_current_depth_non_anonymous, + &m_exact_match_cb); + } + + void set_exact_match_callback(std::function f) + {//purpose: upon key is matching one of the exact filters, the callback is called. + m_exact_match_cb = f; + } + + void set_s3select_processing_callback(std::function& f) + {//purpose: execute s3select statement on matching row (according to filters) + m_s3select_processing = f; + } + + void set_push_per_star_operation_callback( std::function cb) + { + m_star_operation_cb = cb; + } + + void set_star_operation() + { + m_star_operation = true; + } + + bool is_fatal_initialization() + { + return m_fatal_initialization_ind; + } + + int process_json_buffer(char* json_buffer,size_t json_buffer_sz, bool end_of_stream=false) + {//user keeps calling with buffers, the method is not aware of the object size. + + + try{ + if(!init_buffer_stream) + { + //set the memoryStreamer + reader.IterativeParseInit(); + init_buffer_stream = true; + + } + + //the non-processed bytes plus the next chunk are copy into main processing buffer + if(!end_of_stream) + stream_buffer.resetBuffer(json_buffer, json_buffer_sz); + + while (!reader.IterativeParseComplete()) { + reader.IterativeParseNext(stream_buffer, *this); + + //once all key-values move into s3select(for further filtering and processing), it should be cleared + + //TODO in the case the chunk is too small or some value in input is too big, the parsing will fail. + if (!end_of_stream && stream_buffer.next_src_==0 && stream_buffer.getBytesLeft() < 2048) + {//the non processed bytes will be processed on next fetched chunk + //TODO save remaining-bytes to internal buffer (or caller will use 2 sets of buffer) + stream_buffer.saveRemainingBytes(); + return 0; + } + if(m_sql_processing_status == JSON_PROCESSING_LIMIT_REACHED)//return status(int) from callback + { + return JSON_PROCESSING_LIMIT_REACHED; + } + + // error message + if(reader.HasParseError()) { + rapidjson::ParseErrorCode c = reader.GetParseErrorCode(); + size_t ofs = reader.GetErrorOffset(); + std::stringstream error_str; + error_str << "parsing error. code:" << c << " position: " << ofs << std::endl; + throw s3selectEngine::base_s3select_exception(error_str.str(), s3selectEngine::base_s3select_exception::s3select_exp_en_t::FATAL); + return -1; + } + }//while reader.IterativeParseComplete + } + catch(std::exception &e){ + std::stringstream error_str; + error_str << "failed to process JSON : " << e.what() << std::endl; + throw s3selectEngine::base_s3select_exception(error_str.str(), s3selectEngine::base_s3select_exception::s3select_exp_en_t::FATAL); + return -1; + } + return 0; + } +}; + + +#endif + diff --git a/src/s3select/include/s3select_oper.h b/src/s3select/include/s3select_oper.h new file mode 100644 index 000000000..89544fc1d --- /dev/null +++ b/src/s3select/include/s3select_oper.h @@ -0,0 +1,3326 @@ +#ifndef __S3SELECT_OPER__ +#define __S3SELECT_OPER__ + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include "s3select_parquet_intrf.h" //NOTE: should include first (c++11 std::string_view) + + +#if __has_include () && REGEX_HS + #include +#elif __has_include () && REGEX_RE2 + #include +#else + #include + #undef REGEX_HS + #undef REGEX_RE2 +#endif + +namespace bsc = BOOST_SPIRIT_CLASSIC_NS; + +namespace s3selectEngine +{ + +//=== stl allocator definition +//this allocator is fit for placement new (no calls to heap) + +class chunkalloc_out_of_mem +{ +}; + +template +class ChunkAllocator : public std::allocator +{ +public: + typedef size_t size_type; + typedef T* pointer; + size_t buffer_capacity; + char* buffer_ptr; + + //only ONE pool,not allocated dynamically; main assumption, caller knows in advance its memory limitations. + char buffer[pool_sz]; + + template + struct rebind + { + typedef ChunkAllocator<_Tp1, pool_sz> other; + }; + + //================================== + inline T* _Allocate(size_t num_of_element, T*) + { + // allocate storage for _Count elements of type T + + pointer res = (pointer)(buffer_ptr + buffer_capacity); + + buffer_capacity+= sizeof(T) * num_of_element; + + size_t addr_alignment = (buffer_capacity % sizeof(char*)); + buffer_capacity += addr_alignment != 0 ? sizeof(char*) - addr_alignment : 0; + + if (buffer_capacity> sizeof(buffer)) + { + throw chunkalloc_out_of_mem(); + } + + return res; + } + + //================================== + inline pointer allocate(size_type n, [[maybe_unused]] const void* hint = 0) + { + return (_Allocate(n, (pointer)0)); + } + + //================================== + inline void deallocate(pointer p, size_type n) + { + } + + //================================== + ChunkAllocator() noexcept : std::allocator() + { + // alloc from main-buffer + buffer_capacity = 0; + memset( &buffer[0], 0, sizeof(buffer)); + buffer_ptr = &buffer[0]; + } + + //================================== + ChunkAllocator(const ChunkAllocator& other) noexcept : std::allocator(other) + { + // copy const + buffer_capacity = 0; + buffer_ptr = &buffer[0]; + } + + //================================== + ~ChunkAllocator() noexcept + { + //do nothing + } +}; + +class base_statement; +//typedef std::vector bs_stmt_vec_t; //without specific allocator + +//ChunkAllocator, prevent allocation from heap. +typedef std::vector > bs_stmt_vec_t; + +class base_s3select_exception : public std::exception +{ + +public: + enum class s3select_exp_en_t + { + NONE, + ERROR, + FATAL + } ; + +private: + s3select_exp_en_t m_severity; + +public: + std::string _msg; + explicit base_s3select_exception(const char* n) : m_severity(s3select_exp_en_t::NONE) + { + _msg.assign(n); + } + base_s3select_exception(const char* n, s3select_exp_en_t severity) : m_severity(severity) + { + _msg.assign(n); + } + base_s3select_exception(std::string n, s3select_exp_en_t severity) : m_severity(severity) + { + _msg = n; + } + + virtual const char* what() const noexcept + { + return _msg.c_str(); + } + + s3select_exp_en_t severity() + { + return m_severity; + } + + virtual ~base_s3select_exception() = default; +}; + + + +class s3select_allocator //s3select is the "owner" +{ +private: + + std::vector list_of_buff; + std::vector list_of_ptr; + u_int32_t m_idx; + +#define __S3_ALLOCATION_BUFF__ (24*1024) + void check_capacity(size_t sz) + { + if (sz>__S3_ALLOCATION_BUFF__) + { + throw base_s3select_exception("requested size too big", base_s3select_exception::s3select_exp_en_t::FATAL); + } + + if ((m_idx + sz) >= __S3_ALLOCATION_BUFF__) + { + list_of_buff.push_back((char*)malloc(__S3_ALLOCATION_BUFF__)); + m_idx = 0; + } + } + + void inc(size_t sz) + { + m_idx += sz; + m_idx += sizeof(char*) - (m_idx % sizeof(char*)); //alignment + } + +public: + s3select_allocator():m_idx(0) + { + list_of_buff.push_back((char*)malloc(__S3_ALLOCATION_BUFF__)); + } + + void *alloc(size_t sz) + { + check_capacity(sz); + + char* buff = list_of_buff.back(); + + u_int32_t idx = m_idx; + + inc(sz); + + return &buff[ idx ]; + } + + void push_for_delete(void *p) + {//in case of using S3SELECT_NO_PLACEMENT_NEW + list_of_ptr.push_back((char*)p); + } + + virtual ~s3select_allocator() + { + for(auto b : list_of_buff) + { + free(b); + } + + for(auto b : list_of_ptr) + {//in case of using S3SELECT_NO_PLACEMENT_NEW + delete(b); + } + } +}; + +// placement new for allocation of all s3select objects on single(or few) buffers, deallocation of those objects is by releasing the buffer. +#define S3SELECT_NEW(self, type , ... ) [=]() \ + { \ + auto res=new (self->getAllocator()->alloc(sizeof(type))) type(__VA_ARGS__); \ + return res; \ + }(); + +// no placement new; actually, its an oridinary new with additional functionality for deleting the AST nodes. +// (this changes, is for verifying the valgrind report on leak) +#define S3SELECT_NO_PLACEMENT_NEW(self, type , ... ) [=]() \ + { \ + auto res=new type(__VA_ARGS__); \ + self->getAllocator()->push_for_delete(res); \ + return res; \ + }(); + +class s3select_reserved_word +{ + public: + + enum class reserve_word_en_t + { + NA, + S3S_NULL,//TODO check AWS defintions for reserve words, its a long list , what about functions-names? + S3S_NAN, + S3S_TRUE, + S3S_FALSE + } ; + + using reserved_words = std::map; + + const reserved_words m_reserved_words= + { + {"null",reserve_word_en_t::S3S_NULL},{"NULL",reserve_word_en_t::S3S_NULL}, + {"nan",reserve_word_en_t::S3S_NAN},{"NaN",reserve_word_en_t::S3S_NAN}, + {"true",reserve_word_en_t::S3S_TRUE},{"TRUE",reserve_word_en_t::S3S_TRUE}, + {"false",reserve_word_en_t::S3S_FALSE},{"FALSE",reserve_word_en_t::S3S_FALSE} + }; + + bool is_reserved_word(std::string & token) + { + return m_reserved_words.find(token) != m_reserved_words.end() ; + } + + reserve_word_en_t get_reserved_word(std::string & token) + { + if (is_reserved_word(token)==true) + { + return m_reserved_words.find(token)->second; + } + else + { + return reserve_word_en_t::NA; + } + } + +}; + +class base_statement; +class projection_alias +{ +//purpose: mapping between alias-name to base_statement* +//those routines are *NOT* intensive, works once per query parse time. + +private: + std::vector< std::pair > alias_map; + +public: + std::vector< std::pair >* get() + { + return &alias_map; + } + + bool insert_new_entry(std::string alias_name, base_statement* bs) + { + //purpose: only unique alias names. + + for(auto alias: alias_map) + { + if(alias.first.compare(alias_name) == 0) + { + return false; //alias name already exist + } + + } + std::pair new_alias(alias_name, bs); + alias_map.push_back(new_alias); + + return true; + } + + base_statement* search_alias(std::string alias_name) + { + for(auto alias: alias_map) + { + if(alias.first.compare(alias_name) == 0) + { + return alias.second; //refernce to execution node + } + } + return 0; + } +}; + +struct binop_plus +{ + double operator()(double a, double b) + { + return a + b; + } +}; + +struct binop_minus +{ + double operator()(double a, double b) + { + return a - b; + } +}; + +struct binop_mult +{ + double operator()(double a, double b) + { + return a * b; + } +}; + +struct binop_div +{ + double operator()(double a, double b) + { + if (b == 0) { + if( std::isnan(a)) { + return a; + } else { + throw base_s3select_exception("division by zero is not allowed"); + } + } else { + return a / b; + } + } +}; + +struct binop_pow +{ + double operator()(double a, double b) + { + return pow(a, b); + } +}; + +struct binop_modulo +{ + int64_t operator()(int64_t a, int64_t b) + { + if (b == 0) + { + throw base_s3select_exception("Mod zero is not allowed"); + } else { + return a % b; + } + } +}; + +typedef std::tuple timestamp_t; + +class value; +class multi_values +{ + public: + std::vector values; + + public: + void push_value(value* v); + + void clear() + { + values.clear(); + } + +}; + +class value +{ + +public: + typedef union + { + int64_t num; + char* str;//TODO consider string_view(save copy) + double dbl; + timestamp_t* timestamp; + bool b; + } value_t; + + multi_values multiple_values; + +private: + value_t __val; + //JSON query has a unique structure, the variable-name reside on input. there are cases were it should be extracted. + std::vector m_json_key; + std::string m_to_string; + //std::basic_string,ChunkAllocator> m_to_string; + std::string m_str_value; + //std::basic_string,ChunkAllocator> m_str_value; + + int32_t m_precision=-1; + int32_t m_scale=-1; + +public: + enum class value_En_t + { + DECIMAL, + FLOAT, + STRING, + TIMESTAMP, + S3NULL, + S3NAN, + BOOL, + MULTIPLE_VALUES, + NA + } ; + value_En_t type; + + explicit value(int64_t n) : type(value_En_t::DECIMAL) + { + __val.num = n; + } + explicit value(int n) : type(value_En_t::DECIMAL) + { + __val.num = n; + } + explicit value(bool b) : type(value_En_t::BOOL) + { + __val.num = (int64_t)b; + } + explicit value(double d) : type(value_En_t::FLOAT) + { + __val.dbl = d; + } + explicit value(timestamp_t* timestamp) : type(value_En_t::TIMESTAMP) + { + __val.timestamp = timestamp; + } + + explicit value(const char* s) : type(value_En_t::STRING) + { + m_str_value.assign(s); + __val.str = m_str_value.data(); + } + + explicit value(std::nullptr_t) : type(value_En_t::S3NULL) + {} + + ~value() + {//TODO should be a part of the cleanup routine(__function::push_for_cleanup) + multiple_values.values.clear(); + } + + value():type(value_En_t::NA) + { + __val.num=0; + } + + bool is_number() const + { + if ((type == value_En_t::DECIMAL || type == value_En_t::FLOAT)) + { + return true; + } + + return false; + } + + bool is_string() const + { + return type == value_En_t::STRING; + } + bool is_timestamp() const + { + return type == value_En_t::TIMESTAMP; + } + + bool is_bool() const + { + return type == value_En_t::BOOL; + } + + bool is_null() const + { + return type == value_En_t::S3NULL; + } + + bool is_nan() const + { + if (type == value_En_t::FLOAT) { + return std::isnan(this->__val.dbl); + } + return type == value_En_t::S3NAN; + } + + bool is_true() + { + return (i64()!=0 && !is_null()); + } + + void set_nan() + { + __val.dbl = NAN; + type = value_En_t::FLOAT; + } + + void set_true() + { + __val.num = 1; + type = value_En_t::BOOL; + } + + void set_false() + { + __val.num = 0; + type = value_En_t::BOOL; + } + + void setnull() + { + type = value_En_t::S3NULL; + } + + void set_precision_scale(int32_t* precision, int32_t* scale) + { + m_precision = *precision; + m_scale = *scale; + } + + void get_precision_scale(int32_t* precision, int32_t* scale) + { + *precision = m_precision; + *scale = m_scale; + } + + void set_string_nocopy(char* str) + {//purpose: value does not own the string + __val.str = str; + type = value_En_t::STRING; + } + + value_En_t _type() const { return type; } + + void set_json_key_path(std::vector& key_path) + { + m_json_key = key_path; + } + + const char* to_string() //TODO very intensive , must improve this + { + + if (type != value_En_t::STRING) + { + if (type == value_En_t::DECIMAL) + { + m_to_string.assign( boost::lexical_cast(__val.num) ); + } + if (type == value_En_t::BOOL) + { + if(__val.num == 0) + { + m_to_string.assign("false"); + } + else + { + m_to_string.assign("true"); + } + } + else if(type == value_En_t::FLOAT) + { + if(m_precision != -1 && m_scale != -1) + { + std::stringstream ss; + ss << std::fixed << std::setprecision(m_scale) << __val.dbl; + m_to_string = ss.str(); + } + else + { + m_to_string.assign( boost::lexical_cast(__val.dbl) ); + } + } + else if (type == value_En_t::TIMESTAMP) + { + boost::posix_time::ptime new_ptime; + boost::posix_time::time_duration td; + bool flag; + + std::tie(new_ptime, td, flag) = *__val.timestamp; + + if (flag) + { + m_to_string = to_iso_extended_string(new_ptime) + "Z"; + } + else + { + std::string tz_hour = std::to_string(std::abs(td.hours())); + std::string tz_mint = std::to_string(std::abs(td.minutes())); + std::string sign; + if (td.is_negative()) + sign = "-"; + else + sign = "+"; + + m_to_string = to_iso_extended_string(new_ptime) + sign + + std::string(2 - tz_hour.length(), '0') + tz_hour + ":" + + std::string(2 - tz_mint.length(), '0') + tz_mint; + } + } + else if (type == value_En_t::S3NULL) + { + m_to_string.assign("null"); + } + } + else + { + m_to_string.assign( __val.str ); + } + + if(m_json_key.size()) + { + std::string key_path; + for(auto& p : m_json_key) + {//TODO upon star-operation key-path assignment is very intensive + key_path.append(p); + key_path.append("."); + } + + key_path.append(" : "); + key_path.append(m_to_string); + m_to_string = key_path; + } + + return m_to_string.c_str(); + } + + value(const value& o) + { + if(o.type == value_En_t::STRING) + { + if(o.m_str_value.size()) + { + m_str_value = o.m_str_value; + __val.str = m_str_value.data(); + } + else if(o.__val.str) + { + __val.str = o.__val.str; + } + } + else + { + this->__val = o.__val; + } + + this->m_json_key = o.m_json_key; + + this->type = o.type; + } + + value& operator=(value& o) + { + if(o.type == value_En_t::STRING) + { + if(o.m_str_value.size()) + { + m_str_value = o.m_str_value; + __val.str = m_str_value.data(); + } + else if(o.__val.str) + { + __val.str = o.__val.str; + } + } + else + { + this->__val = o.__val; + } + + this->type = o.type; + + this->m_json_key = o.m_json_key; + + return *this; + } + + value& operator=(const char* s) + { + m_str_value.assign(s); + this->__val.str = m_str_value.data(); + this->type = value_En_t::STRING; + + return *this; + } + + value& operator=(int64_t i) + { + this->__val.num = i; + this->type = value_En_t::DECIMAL; + + return *this; + } + + value& operator=(int i) + { + this->__val.num = i; + this->type = value_En_t::DECIMAL; + + return *this; + } + + value& operator=(unsigned i) + { + this->__val.num = i; + this->type = value_En_t::DECIMAL; + + return *this; + } + + value& operator=(uint64_t i) + { + this->__val.num = i; + this->type = value_En_t::DECIMAL; + + return *this; + } + + value& operator=(double d) + { + this->__val.dbl = d; + this->type = value_En_t::FLOAT; + + return *this; + } + + value& operator=(bool b) + { + this->__val.num = (int64_t)b; + this->type = value_En_t::BOOL; + + return *this; + } + + value& operator=(timestamp_t* p) + { + this->__val.timestamp = p; + this->type = value_En_t::TIMESTAMP; + + return *this; + } + + int64_t i64() + { + return __val.num; + } + + const char* str() + { + return __val.str; + } + + double dbl() + { + return __val.dbl; + } + + bool bl() + { + return __val.b; + } + + timestamp_t* timestamp() const + { + return __val.timestamp; + } + + bool operator<(const value& v)//basic compare operator , most itensive runtime operation + { + //TODO NA possible? + if (is_string() && v.is_string()) + { + return strcmp(__val.str, v.__val.str) < 0; + } + + if (is_number() && v.is_number()) + { + + if(type != v.type) //conversion //TODO find better way + { + if (type == value_En_t::DECIMAL) + { + return (double)__val.num < v.__val.dbl; + } + else + { + return __val.dbl < (double)v.__val.num; + } + } + else //no conversion + { + if(type == value_En_t::DECIMAL) + { + return __val.num < v.__val.num; + } + else + { + return __val.dbl < v.__val.dbl; + } + + } + } + + if(is_timestamp() && v.is_timestamp()) + { + return *timestamp() < *(v.timestamp()); + } + + if(is_nan() || v.is_nan()) + { + return false; + } + + throw base_s3select_exception("operands not of the same type(numeric , string), while comparision"); + } + + bool operator>(const value& v) //basic compare operator , most itensive runtime operation + { + //TODO NA possible? + if (is_string() && v.is_string()) + { + return strcmp(__val.str, v.__val.str) > 0; + } + + if (is_number() && v.is_number()) + { + + if(type != v.type) //conversion //TODO find better way + { + if (type == value_En_t::DECIMAL) + { + return (double)__val.num > v.__val.dbl; + } + else + { + return __val.dbl > (double)v.__val.num; + } + } + else //no conversion + { + if(type == value_En_t::DECIMAL) + { + return __val.num > v.__val.num; + } + else + { + return __val.dbl > v.__val.dbl; + } + + } + } + + if(is_timestamp() && v.is_timestamp()) + { + return *timestamp() > *(v.timestamp()); + } + + if(is_nan() || v.is_nan()) + { + return false; + } + + throw base_s3select_exception("operands not of the same type(numeric , string), while comparision"); + } + + friend bool operator==(const value& lhs, const value& rhs) //basic compare operator , most itensive runtime operation + { + //TODO NA possible? + if (lhs.is_string() && rhs.is_string()) + { + return strcmp(lhs.__val.str, rhs.__val.str) == 0; + } + + + if (lhs.is_number() && rhs.is_number()) + { + + if(lhs.type != rhs.type) //conversion //TODO find better way + { + if (lhs.type == value_En_t::DECIMAL) + { + return (double)lhs.__val.num == rhs.__val.dbl; + } + else + { + return lhs.__val.dbl == (double)rhs.__val.num; + } + } + else //no conversion + { + if(lhs.type == value_En_t::DECIMAL) + { + return lhs.__val.num == rhs.__val.num; + } + else + { + return lhs.__val.dbl == rhs.__val.dbl; + } + + } + } + + if(lhs.is_timestamp() && rhs.is_timestamp()) + { + return *(lhs.timestamp()) == *(rhs.timestamp()); + } + + if( + (lhs.is_bool() && rhs.is_bool()) + || + (lhs.is_number() && rhs.is_bool()) + || + (lhs.is_bool() && rhs.is_number()) + ) + { + return lhs.__val.num == rhs.__val.num; + } + + if (lhs.is_nan() || rhs.is_nan()) + { + return false; + } + +// in the case of NULL on right-side or NULL on left-side, the result is false. + if(lhs.is_null() || rhs.is_null()) + { + return false; + } + + throw base_s3select_exception("operands not of the same type(numeric , string), while comparision"); + } + bool operator<=(const value& v) + { + if (is_nan() || v.is_nan()) { + return false; + } else { + return !(*this>v); + } + } + + bool operator>=(const value& v) + { + if (is_nan() || v.is_nan()) { + return false; + } else { + return !(*this //conversion rules for arithmetical binary operations + value& compute(value& l, const value& r) //left should be this, it contain the result + { + binop __op; + + if (l.is_string() || r.is_string()) + { + throw base_s3select_exception("illegal binary operation with string"); + } + if (l.is_bool() || r.is_bool()) + { + throw base_s3select_exception("illegal binary operation with bool type"); + } + + if (l.is_number() && r.is_number()) + { + if (l.type != r.type) + { + //conversion + + if (l.type == value_En_t::DECIMAL) + { + l.__val.dbl = __op((double)l.__val.num, r.__val.dbl); + l.type = value_En_t::FLOAT; + } + else + { + l.__val.dbl = __op(l.__val.dbl, (double)r.__val.num); + l.type = value_En_t::FLOAT; + } + } + else + { + //no conversion + + if (l.type == value_En_t::DECIMAL) + { + l.__val.num = __op(l.__val.num, r.__val.num ); + l.type = value_En_t::DECIMAL; + } + else + { + l.__val.dbl = __op(l.__val.dbl, r.__val.dbl ); + l.type = value_En_t::FLOAT; + } + } + } + + if (l.is_null() || r.is_null()) + { + l.setnull(); + } else if(l.is_nan() || r.is_nan()) { + l.set_nan(); + } + + return l; + } + + value& operator+(const value& v) + { + return compute(*this, v); + } + + value operator++(int) + { + *this = *this + static_cast(1); + return *this; + } + + value& operator-(const value& v) + { + return compute(*this, v); + } + + value& operator*(const value& v) + { + return compute(*this, v); + } + + value& operator/(value& v) + { + if (v.is_null() || this->is_null()) { + v.setnull(); + return v; + } else { + return compute(*this, v); + } + } + + value& operator^(const value& v) + { + return compute(*this, v); + } + + value & operator%(const value &v) + { + if(v.type == value_En_t::DECIMAL) { + return compute(*this,v); + } else { + throw base_s3select_exception("wrong use of modulo operation!"); + } + } +}; + +void multi_values::push_value(value *v) +{ + //v could be single or multiple values + if (v->type == value::value_En_t::MULTIPLE_VALUES) + { + for (auto sv : v->multiple_values.values) + { + values.push_back(sv); + } + } + else + { + values.push_back(v); + } +} + + +class scratch_area +{ + +private: + std::vector *m_schema_values; //values got a type + int m_upper_bound; + + std::vector> m_column_name_pos; + bool parquet_type; + char str_buff[4096]; + uint16_t buff_loc; + int max_json_idx; + timestamp_t tmstmp; + +public: + + typedef std::pair,value> json_key_value_t; + typedef std::vector< json_key_value_t > json_star_op_cont_t; + json_star_op_cont_t m_json_star_operation; + + scratch_area():m_upper_bound(-1),parquet_type(false),buff_loc(0),max_json_idx(-1) + { + m_schema_values = new std::vector(128,value(nullptr)); + } + + ~scratch_area() + { + delete m_schema_values; + } + + json_star_op_cont_t* get_star_operation_cont() + { + return &m_json_star_operation; + } + + void clear_data() + { + m_json_star_operation.clear(); + for(int i=0;i<=max_json_idx;i++) + { + (*m_schema_values)[i].setnull(); + } + } + + void set_column_pos(const char* n, int pos)//TODO use std::string + { + m_column_name_pos.push_back( std::pair(n, pos)); + } + + void update(std::vector& tokens, size_t num_of_tokens) + { + size_t i=0; + //increase the Vector::m_schema_values capacity(it should happen few times) + if ((*m_schema_values).capacity() < tokens.size()) + { + (*m_schema_values).resize( tokens.size() * 2 ); + } + + for(auto s : tokens) + { + if (i>=num_of_tokens) + { + break; + } + //not copy the string content. + (*m_schema_values)[i++].set_string_nocopy(s); + } + m_upper_bound = i; + + } + + int get_column_pos(const char* n) + { + //done only upon building the AST, not on "runtime" + + for( auto iter : m_column_name_pos) + { + if (!strcmp(iter.first.c_str(), n)) + { + return iter.second; + } + } + + return -1; + } + + void set_parquet_type() + { + parquet_type = true; + } + + void get_column_value(uint16_t column_pos, value &v) + { + if (column_pos > ((*m_schema_values).size()-1)) + { + throw base_s3select_exception("accessing scratch buffer beyond its size"); + } + + v = (*m_schema_values)[ column_pos ]; + } + + value* get_column_value(uint16_t column_pos) + { + if (column_pos > ((*m_schema_values).size()-1)) + { + throw base_s3select_exception("accessing scratch buffer beyond its size"); + } + + return &(*m_schema_values)[ column_pos ]; + } + + int get_num_of_columns() + { + return m_upper_bound; + } + + int update_json_varible(value v,int json_idx) + { + if(json_idx>max_json_idx) + { + max_json_idx = json_idx; + } + + //increase the Vector::m_schema_values capacity(it should happen few times) + if ((*m_schema_values).capacity() < static_cast(max_json_idx)) + { + (*m_schema_values).resize(max_json_idx * 2); + } + + (*m_schema_values)[ json_idx ] = v; + + if(json_idx>m_upper_bound) + { + m_upper_bound = json_idx; + } + return 0; + } + +#ifdef _ARROW_EXIST + +#define S3SELECT_MICROSEC (1000*1000) +#define S3SELECT_MILLISEX (1000) + + int update(std::vector &parquet_row_value, parquet_file_parser::column_pos_t &column_positions) + { + //TODO no need for copy , possible to save referece (its save last row for calculation) + + parquet_file_parser::column_pos_t::iterator column_pos_iter = column_positions.begin(); + m_upper_bound =0; + buff_loc=0; + + //increase the Vector::m_schema_values capacity(it should happen few times) + if ((*m_schema_values).capacity() < parquet_row_value.size()) + { + (*m_schema_values).resize(parquet_row_value.size() * 2); + } + + if (*column_pos_iter > ((*m_schema_values).size()-1)) + { + throw base_s3select_exception("accessing scratch buffer beyond its size"); + } + + for(auto v : parquet_row_value) + { + //TODO (parquet_value_t) --> (value) , or better get it as value (i.e. parquet reader know class-value) + //TODO temporary + switch( v.type ) + { + case parquet_file_parser::parquet_type::INT32: + (*m_schema_values)[ *column_pos_iter ] = v.num; + break; + + case parquet_file_parser::parquet_type::INT64: + (*m_schema_values)[ *column_pos_iter ] = v.num; + break; + + case parquet_file_parser::parquet_type::DOUBLE: + (*m_schema_values)[ *column_pos_iter ] = v.dbl; + break; + + case parquet_file_parser::parquet_type::STRING: + //TODO waste of CPU + //TODO value need to present string with char* and length + + memcpy(str_buff+buff_loc, v.str, v.str_len); + str_buff[buff_loc+v.str_len] = 0; + (*m_schema_values)[ *column_pos_iter ] = str_buff+buff_loc; + buff_loc += v.str_len+1; + break; + + case parquet_file_parser::parquet_type::PARQUET_NULL: + + (*m_schema_values)[ *column_pos_iter ].setnull(); + break; + + case parquet_file_parser::parquet_type::TIMESTAMP: //TODO milli-sec, micro-sec, nano-sec + { + auto tm_sec = v.num/S3SELECT_MICROSEC; //TODO should use the correct unit + boost::posix_time::ptime new_ptime = boost::posix_time::from_time_t( tm_sec ); + boost::posix_time::time_duration td_zero((tm_sec/3600)%24,(tm_sec/60)%24,tm_sec%60); + tmstmp = std::make_tuple(new_ptime, td_zero, (char)'Z'); + (*m_schema_values)[ *column_pos_iter ] = &tmstmp; + } + break; + + default: + throw base_s3select_exception("wrong parquet type for conversion."); + + //return -1;//TODO exception + } + m_upper_bound = *column_pos_iter+1; + column_pos_iter ++; + } + return 0; + } +#endif // _ARROW_EXIST + +}; + +class base_statement +{ + +protected: + + scratch_area* m_scratch; + projection_alias* m_aliases; + bool is_last_call; //valid only for aggregation functions + bool m_is_cache_result; + value m_alias_result; + base_statement* m_projection_alias; + int m_eval_stack_depth; + bool m_skip_non_aggregate_op; + value value_na; + //JSON queries has different syntax from other data-sources(Parquet,CSV) + bool m_json_statement; + uint64_t number_of_calls = 0; + std::string operator_name; + +public: + base_statement():m_scratch(nullptr), is_last_call(false), m_is_cache_result(false), + m_projection_alias(nullptr), m_eval_stack_depth(0), m_skip_non_aggregate_op(false),m_json_statement(false) {} + + void set_operator_name(const char* op) + { +#ifdef S3SELECT_PROF + operator_name = op; +#endif + } + + virtual value& eval() + { +#ifdef S3SELECT_PROF + number_of_calls++; +#endif + //purpose: on aggregation flow to run only the correct subtree(aggregation subtree) + + if (m_skip_non_aggregate_op == false) + return eval_internal();//not skipping this node. + else + { + //skipping this node. + //in case execution should skip a node, it will traverse (left and right) + //and search for subtree to execute. + //example: sum( ... ) - sum( ... ) ; the minus operand is skipped while sum() operand is not. + if(left()) + left()->eval_internal(); + + if(right()) + right()->eval_internal(); + + } + + return value_na; + } + + virtual value& eval_internal() = 0; + +public: + virtual base_statement* left() const + { + return 0; + } + virtual base_statement* right() const + { + return 0; + } + virtual std::string print(int ident) =0;//TODO complete it, one option to use level parametr in interface , + virtual bool semantic() =0;//done once , post syntax , traverse all nodes and validate semantics. + + virtual void traverse_and_apply(scratch_area* sa, projection_alias* pa,bool json_statement) + { + m_scratch = sa; + m_aliases = pa; + m_json_statement = json_statement; + + if (left()) + { + left()->traverse_and_apply(m_scratch, m_aliases, json_statement); + } + if (right()) + { + right()->traverse_and_apply(m_scratch, m_aliases, json_statement); + } + } + + virtual void set_skip_non_aggregate(bool skip_non_aggregate_op) + { + m_skip_non_aggregate_op = skip_non_aggregate_op; + + if (left()) + { + left()->set_skip_non_aggregate(m_skip_non_aggregate_op); + } + if (right()) + { + right()->set_skip_non_aggregate(m_skip_non_aggregate_op); + } + } + + virtual bool is_aggregate() const + { + return false; + } + + virtual bool is_column() const + { + return false; + } + + virtual bool is_star_operation() const + { + return false; + } + + virtual void resolve_node() + {//part of semantic analysis(TODO maybe semantic method should handle this) + if (left()) + { + left()->resolve_node(); + } + if (right()) + { + right()->resolve_node(); + } + } + + bool is_json_statement() + { + return m_json_statement; + } + + bool is_function() const; + const base_statement* get_aggregate() const; + bool is_nested_aggregate(bool&) const; + bool is_column_reference() const; + bool mark_aggreagtion_subtree_to_execute(); + bool is_statement_contain_star_operation() const; + void push_for_cleanup(std::set&); + +#ifdef _ARROW_EXIST + void extract_columns(parquet_file_parser::column_pos_t &cols,const uint16_t max_columns); +#endif + + virtual void set_last_call() + { + is_last_call = true; + if(left()) + { + left()->set_last_call(); + } + if(right()) + { + right()->set_last_call(); + } + } + + bool is_set_last_call() + { + return is_last_call; + } + + void invalidate_cache_result() + { + m_is_cache_result = false; + } + + bool is_result_cached() + { + return m_is_cache_result == true; + } + + void set_result_cache(value& eval_result) + { + m_alias_result = eval_result; + m_is_cache_result = true; + } + + void dec_call_stack_depth() + { + m_eval_stack_depth --; + } + + value& get_result_cache() + { + return m_alias_result; + } + + int& get_eval_call_depth() + { + m_eval_stack_depth++; + return m_eval_stack_depth; + } + + virtual ~base_statement() +{ +#ifdef S3SELECT_PROF +std::cout<< operator_name << ":" << number_of_calls <~base_statement(); + } + + scratch_area* getScratchArea() + { + return m_scratch; + } + + projection_alias* getAlias() + { + return m_aliases; + } + +}; + +class variable : public base_statement +{ + +public: + + enum class var_t + { + NA, + VARIABLE_NAME,//schema column (i.e. age , price , ...) + COLUMN_VALUE, //concrete value (string,number,boolean) + JSON_VARIABLE,//a key-path reference + POS, // CSV column number (i.e. _1 , _2 ... ) + STAR_OPERATION, //'*' + } ; + var_t m_var_type; + +private: + + std::string _name; + int column_pos; + value var_value; + int json_variable_idx; + + const int undefined_column_pos = -1; + const int column_alias = -2; + const char* this_operator_name = "variable"; + +public: + variable():m_var_type(var_t::NA), _name(""), column_pos(-1), json_variable_idx(-1){set_operator_name(this_operator_name);} + + explicit variable(int64_t i) : m_var_type(var_t::COLUMN_VALUE), column_pos(-1), var_value(i), json_variable_idx(-1){set_operator_name(this_operator_name);} + + explicit variable(double d) : m_var_type(var_t::COLUMN_VALUE), _name("#"), column_pos(-1), var_value(d), json_variable_idx(-1){set_operator_name(this_operator_name);} + + explicit variable(int i) : m_var_type(var_t::COLUMN_VALUE), column_pos(-1), var_value(i), json_variable_idx(-1){set_operator_name(this_operator_name);} + + explicit variable(const std::string& n) : m_var_type(var_t::VARIABLE_NAME), _name(n), column_pos(-1), json_variable_idx(-1){set_operator_name(this_operator_name);} + + explicit variable(const std::string& n, var_t tp, size_t json_idx) : m_var_type(var_t::NA) + {//only upon JSON use case + set_operator_name(this_operator_name); + if(tp == variable::var_t::JSON_VARIABLE) + { + m_var_type = variable::var_t::JSON_VARIABLE; + json_variable_idx = static_cast(json_idx); + _name = n;//"#"; debug + } + } + + variable(const std::string& n, var_t tp) : m_var_type(var_t::NA) + { + set_operator_name(this_operator_name); + if(tp == variable::var_t::POS) + { + _name = n; + m_var_type = tp; + int pos = atoi( n.c_str() + 1 ); //TODO >0 < (schema definition , semantic analysis) + column_pos = pos -1;// _1 is the first column ( zero position ) + } + else if (tp == variable::var_t::COLUMN_VALUE) + { + _name = "#"; + m_var_type = tp; + column_pos = -1; + var_value = n.c_str(); + } + else if (tp ==variable::var_t::STAR_OPERATION) + { + _name = "#"; + m_var_type = tp; + column_pos = -1; + } + } + + explicit variable(s3select_reserved_word::reserve_word_en_t reserve_word) + { + set_operator_name(this_operator_name); + if (reserve_word == s3select_reserved_word::reserve_word_en_t::S3S_NULL) + { + m_var_type = variable::var_t::COLUMN_VALUE; + column_pos = undefined_column_pos; + var_value.type = value::value_En_t::S3NULL;//TODO use set_null + } + else if (reserve_word == s3select_reserved_word::reserve_word_en_t::S3S_NAN) + { + m_var_type = variable::var_t::COLUMN_VALUE; + column_pos = undefined_column_pos; + var_value.set_nan(); + } + else if (reserve_word == s3select_reserved_word::reserve_word_en_t::S3S_TRUE) + { + m_var_type = variable::var_t::COLUMN_VALUE; + column_pos = -1; + var_value.set_true(); + } + else if (reserve_word == s3select_reserved_word::reserve_word_en_t::S3S_FALSE) + { + m_var_type = variable::var_t::COLUMN_VALUE; + column_pos = -1; + var_value.set_false(); + } + else + { + _name = "#"; + m_var_type = var_t::NA; + column_pos = undefined_column_pos; + } + } + + void operator=(value& v) + { + var_value = v; + } + + void set_value(const char* s) + { + var_value = s; + } + + void set_value(double d) + { + var_value = d; + } + + void set_value(int64_t i) + { + var_value = i; + } + + void set_value(timestamp_t* p) + { + var_value = p; + } + + void set_value(bool b) + { + var_value = b; + var_value.type = value::value_En_t::BOOL; + } + + void set_null() + { + var_value.setnull(); + } + + void set_precision_scale(int32_t* p, int32_t* s) + { + var_value.set_precision_scale(p, s); + } + + virtual ~variable() {} + + virtual bool is_column() const //is reference to column. + { + if(m_var_type == var_t::VARIABLE_NAME || m_var_type == var_t::POS || m_var_type == var_t::STAR_OPERATION) + { + return true; + } + return false; + } + + virtual bool is_star_operation() const + { + if(m_var_type == var_t::STAR_OPERATION) + { + return true; + } + return false; + } + + value& get_value() + { + return var_value; //TODO is it correct + } + + std::string get_name() + { + return _name; + } + + int get_column_pos() + { + return column_pos; + } + + virtual value::value_En_t get_value_type() + { + return var_value.type; + } + + value& star_operation() + {//purpose return content of all columns in a input stream + if(is_json_statement()) + return json_star_operation(); + + var_value.multiple_values.clear(); + for(int i=0; iget_num_of_columns(); i++) + { + var_value.multiple_values.push_value( m_scratch->get_column_value(i) ); + } + var_value.type = value::value_En_t::MULTIPLE_VALUES; + return var_value; + } + + value& json_star_operation() + {//purpose: per JSON star-operation it needs to get column-name(full-path) and its value + + var_value.multiple_values.clear(); + for(auto& key_value : *m_scratch->get_star_operation_cont()) + { + key_value.second.set_json_key_path(key_value.first); + var_value.multiple_values.push_value(&key_value.second); + } + + var_value.type = value::value_En_t::MULTIPLE_VALUES; + + return var_value; + } + + virtual value& eval_internal() + { + if (m_var_type == var_t::COLUMN_VALUE) + { + return var_value; // a literal,could be deciml / float / string + } + else if(m_var_type == var_t::STAR_OPERATION) + { + return star_operation(); + } + else if(m_var_type == var_t::JSON_VARIABLE && json_variable_idx >= 0) + { + column_pos = json_variable_idx; //TODO handle column alias + } + else if (column_pos == undefined_column_pos) + { + //done once , for the first time + column_pos = m_scratch->get_column_pos(_name.c_str()); + + if(column_pos>=0 && m_aliases->search_alias(_name.c_str())) + { + throw base_s3select_exception(std::string("multiple definition of column {") + _name + "} as schema-column and alias", base_s3select_exception::s3select_exp_en_t::FATAL); + } + + + if (column_pos == undefined_column_pos) + { + //not belong to schema , should exist in aliases + m_projection_alias = m_aliases->search_alias(_name.c_str()); + + //not enter this scope again + column_pos = column_alias; + if(m_projection_alias == 0) + { + throw base_s3select_exception(std::string("alias {")+_name+std::string("} or column not exist in schema"), base_s3select_exception::s3select_exp_en_t::FATAL); + } + } + + } + + if (m_projection_alias) + { + if (m_projection_alias->get_eval_call_depth()>2) + { + throw base_s3select_exception("number of calls exceed maximum size, probably a cyclic reference to alias", base_s3select_exception::s3select_exp_en_t::FATAL); + } + + if (m_projection_alias->is_result_cached() == false) + { + var_value = m_projection_alias->eval(); + m_projection_alias->set_result_cache(var_value); + } + else + { + var_value = m_projection_alias->get_result_cache(); + } + + m_projection_alias->dec_call_stack_depth(); + } + else + { + m_scratch->get_column_value(column_pos,var_value); + //in the case of successive column-delimiter {1,some_data,,3}=> third column is NULL + if (var_value.is_string() && (var_value.str()== 0 || (var_value.str() && *var_value.str()==0))){ + var_value.setnull();//TODO is it correct for Parquet + } + } + + return var_value; + } + + virtual std::string print(int ident) + { + //std::string out = std::string(ident,' ') + std::string("var:") + std::to_string(var_value.__val.num); + //return out; + return std::string("#");//TBD + } + + virtual bool semantic() + { + return false; + } + +}; + +class arithmetic_operand : public base_statement +{ + +public: + + enum class cmp_t {NA, EQ, LE, LT, GT, GE, NE} ; + +private: + base_statement* l; + base_statement* r; + + cmp_t _cmp; + value var_value; + bool negation_result;//false: dont negate ; upon NOT operator(unary) its true + +public: + + virtual bool semantic() + { + return true; + } + + base_statement* left() const override + { + return l; + } + base_statement* right() const override + { + return r; + } + + virtual std::string print(int ident) + { + //std::string out = std::string(ident,' ') + "compare:" += std::to_string(_cmp) + "\n" + l->print(ident-5) +r->print(ident+5); + //return out; + return std::string("#");//TBD + } + + virtual value& eval_internal() + { + value l_val = l->eval(); + value r_val; + if (l_val.is_null()) { + var_value.setnull(); + return var_value; + } else {r_val = r->eval();} + if(r_val.is_null()) { + var_value.setnull(); + return var_value; + } + + switch (_cmp) + { + case cmp_t::EQ: + return var_value = bool( (l_val == r_val) ^ negation_result ); + break; + + case cmp_t::LE: + return var_value = bool( (l_val <= r_val) ^ negation_result ); + break; + + case cmp_t::GE: + return var_value = bool( (l_val >= r_val) ^ negation_result ); + break; + + case cmp_t::NE: + return var_value = bool( (l_val != r_val) ^ negation_result ); + break; + + case cmp_t::GT: + return var_value = bool( (l_val > r_val) ^ negation_result ); + break; + + case cmp_t::LT: + return var_value = bool( (l_val < r_val) ^ negation_result ); + break; + + default: + throw base_s3select_exception("internal error"); + break; + } + } + + arithmetic_operand(base_statement* _l, cmp_t c, base_statement* _r):l(_l), r(_r), _cmp(c),negation_result(false){set_operator_name("arithmetic_operand");} + + explicit arithmetic_operand(base_statement* p)//NOT operator + { + l = dynamic_cast(p)->l; + r = dynamic_cast(p)->r; + _cmp = dynamic_cast(p)->_cmp; + // not( not ( logical expression )) == ( logical expression ); there is no limitation for number of NOT. + negation_result = ! dynamic_cast(p)->negation_result; + } + + virtual ~arithmetic_operand() {} +}; + +class logical_operand : public base_statement +{ + +public: + + enum class oplog_t {AND, OR, NA}; + +private: + base_statement* l; + base_statement* r; + + oplog_t _oplog; + value var_value; + bool negation_result;//false: dont negate ; upon NOT operator(unary) its true + +public: + + base_statement* left() const override + { + return l; + } + base_statement* right() const override + { + return r; + } + + virtual bool semantic() + { + return true; + } + + logical_operand(base_statement* _l, oplog_t _o, base_statement* _r):l(_l), r(_r), _oplog(_o),negation_result(false){set_operator_name("logical_operand");} + + explicit logical_operand(base_statement * p)//NOT operator + { + l = dynamic_cast(p)->l; + r = dynamic_cast(p)->r; + _oplog = dynamic_cast(p)->_oplog; + // not( not ( logical expression )) == ( logical expression ); there is no limitation for number of NOT. + negation_result = ! dynamic_cast(p)->negation_result; + } + + virtual ~logical_operand() {} + + virtual std::string print(int ident) + { + //std::string out = std::string(ident, ' ') + "logical_operand:" += std::to_string(_oplog) + "\n" + l->print(ident - 5) + r->print(ident + 5); + //return out; + return std::string("#");//TBD + } + virtual value& eval_internal() + { + if (!l || !r) + { + throw base_s3select_exception("missing operand for logical ", base_s3select_exception::s3select_exp_en_t::FATAL); + } + value a = l->eval(); + if (_oplog == oplog_t::AND) + { + if (!a.is_null() && a.i64() == false) { + bool res = false ^ negation_result; + return var_value = res; + } + value b = r->eval(); + if(!b.is_null() && b.i64() == false) { + bool res = false ^ negation_result; + return var_value = res; + } else { + if (a.is_null() || b.is_null()) { + var_value.setnull(); + return var_value; + } else { + bool res = true ^ negation_result ; + return var_value =res; + } + } + } + else + { + if (a.is_true()) { + bool res = true ^ negation_result; + return var_value = res; + } + value b = r->eval(); + if(b.is_true() == true) { + bool res = true ^ negation_result; + return var_value = res; + } else { + if (a.is_null() || b.is_null()) { + var_value.setnull(); + return var_value; + } else { + bool res = false ^ negation_result ; + return var_value =res; + } + } + } + } +}; + +class mulldiv_operation : public base_statement +{ + +public: + + enum class muldiv_t {NA, MULL, DIV, POW, MOD} ; + +private: + base_statement* l; + base_statement* r; + + muldiv_t _mulldiv; + value var_value; + value tmp_value; + +public: + + base_statement* left() const override + { + return l; + } + base_statement* right() const override + { + return r; + } + + virtual bool semantic() + { + return true; + } + + virtual std::string print(int ident) + { + //std::string out = std::string(ident, ' ') + "mulldiv_operation:" += std::to_string(_mulldiv) + "\n" + l->print(ident - 5) + r->print(ident + 5); + //return out; + return std::string("#");//TBD + } + + virtual value& eval_internal() + { + switch (_mulldiv) + { + case muldiv_t::MULL: + tmp_value = l->eval(); + return var_value = tmp_value * r->eval(); + break; + + case muldiv_t::DIV: + tmp_value = l->eval(); + return var_value = tmp_value / r->eval(); + break; + + case muldiv_t::POW: + tmp_value = l->eval(); + return var_value = tmp_value ^ r->eval(); + break; + + case muldiv_t::MOD: + tmp_value = l->eval(); + return var_value = tmp_value % r->eval(); + break; + + default: + throw base_s3select_exception("internal error"); + break; + } + } + + mulldiv_operation(base_statement* _l, muldiv_t c, base_statement* _r):l(_l), r(_r), _mulldiv(c){set_operator_name("mulldiv_operation");} + + virtual ~mulldiv_operation() {} +}; + +class addsub_operation : public base_statement +{ + +public: + + enum class addsub_op_t {ADD, SUB, NA}; + +private: + base_statement* l; + base_statement* r; + + addsub_op_t _op; + value var_value; + value tmp_value; + +public: + + base_statement* left() const override + { + return l; + } + base_statement* right() const override + { + return r; + } + + virtual bool semantic() + { + return true; + } + + addsub_operation(base_statement* _l, addsub_op_t _o, base_statement* _r):l(_l), r(_r), _op(_o) {} + + virtual ~addsub_operation() {} + + virtual std::string print(int ident) + { + //std::string out = std::string(ident, ' ') + "addsub_operation:" += std::to_string(_op) + "\n" + l->print(ident - 5) + r->print(ident + 5); + return std::string("#");//TBD + } + + virtual value& eval_internal() + { + if (_op == addsub_op_t::NA) // -num , +num , unary-operation on number + { + if (l) + { + return var_value = l->eval(); + } + else if (r) + { + return var_value = r->eval(); + } + } + else if (_op == addsub_op_t::ADD) + {tmp_value=l->eval(); + return var_value = (tmp_value + r->eval()); + } + else + {tmp_value=l->eval(); + return var_value = (tmp_value - r->eval()); + } + + return var_value; + } +}; + +class negate_function_operation : public base_statement +{ + //purpose: some functions (between,like,in) are participating in where-clause as predicates; thus NOT unary-operator may operate on them. + + private: + + base_statement* function_to_negate; + value res; + + public: + + explicit negate_function_operation(base_statement *f):function_to_negate(f){set_operator_name("negate_function_operation");} + + virtual std::string print(int ident) + { + return std::string("#");//TBD + } + + virtual bool semantic() + { + return true; + } + + base_statement* left() const override + { + return function_to_negate; + } + + virtual value& eval_internal() + { + res = function_to_negate->eval(); + + if (res.is_number() || res.is_bool())//TODO is integer type + { + if (res.is_true()) + { + res = (bool)0; + } + else + { + res = (bool)1; + } + } + + return res; + } + +}; + +class base_function +{ + +protected: + bool aggregate; + +public: + //TODO add semantic to base-function , it operate once on function creation + // validate semantic on creation instead on run-time + virtual bool operator()(bs_stmt_vec_t* args, variable* result) = 0; + std::string m_function_name; + base_function() : aggregate(false) {} + bool is_aggregate() const + { + return aggregate == true; + } + virtual void get_aggregate_result(variable*) {} + + virtual ~base_function() = default; + + virtual void dtor() + {//release function-body implementation + this->~base_function(); + } + + void check_args_size(bs_stmt_vec_t* args, uint16_t required, const char* error_msg) + {//verify for atleast required parameters + if(args->size() < required) + { + throw base_s3select_exception(error_msg,base_s3select_exception::s3select_exp_en_t::FATAL); + } + } + + void check_args_size(bs_stmt_vec_t* args,uint16_t required) + { + if(args->size() < required) + { + std::string error_msg = m_function_name + " requires for " + std::to_string(required) + " arguments"; + throw base_s3select_exception(error_msg,base_s3select_exception::s3select_exp_en_t::FATAL); + } + } + + void set_function_name(const char* name) + { + m_function_name.assign(name); + } +}; + +class base_date_extract : public base_function +{ + protected: + value val_timestamp; + boost::posix_time::ptime new_ptime; + boost::posix_time::time_duration td; + bool flag; + + public: + void param_validation(bs_stmt_vec_t*& args) + { + auto iter = args->begin(); + int args_size = args->size(); + + if (args_size < 1) + { + throw base_s3select_exception("to_timestamp should have 2 parameters"); + } + + base_statement* ts = *iter; + val_timestamp = ts->eval(); + if(val_timestamp.is_timestamp()== false) + { + throw base_s3select_exception("second parameter is not timestamp"); + } + + std::tie(new_ptime, td, flag) = *val_timestamp.timestamp(); + } + +}; + +class base_date_diff : public base_function +{ + protected: + boost::posix_time::ptime ptime1; + boost::posix_time::ptime ptime2; + + public: + void param_validation(bs_stmt_vec_t*& args) + { + auto iter = args->begin(); + int args_size = args->size(); + + if (args_size < 2) + { + throw base_s3select_exception("datediff need 3 parameters"); + } + + base_statement* dt1_param = *iter; + value val_ts1 = dt1_param->eval(); + + if (val_ts1.is_timestamp() == false) + { + throw base_s3select_exception("second parameter should be timestamp"); + } + + iter++; + base_statement* dt2_param = *iter; + value val_ts2 = dt2_param->eval(); + + if (val_ts2.is_timestamp() == false) + { + throw base_s3select_exception("third parameter should be timestamp"); + } + + boost::posix_time::ptime ts1_ptime; + boost::posix_time::time_duration ts1_td; + boost::posix_time::ptime ts2_ptime; + boost::posix_time::time_duration ts2_td; + + std::tie(ts1_ptime, ts1_td, std::ignore) = *val_ts1.timestamp(); + std::tie(ts2_ptime, ts2_td, std::ignore) = *val_ts2.timestamp(); + + ptime1 = ts1_ptime + boost::posix_time::hours(ts1_td.hours() * -1); + ptime1 += boost::posix_time::minutes(ts1_td.minutes() * -1); + ptime2 = ts2_ptime + boost::posix_time::hours(ts2_td.hours() * -1); + ptime2 += boost::posix_time::minutes(ts2_td.minutes() * -1); + } + +}; + +class base_date_add : public base_function +{ + protected: + value val_quantity; + boost::posix_time::ptime new_ptime; + boost::posix_time::time_duration td; + bool flag; + timestamp_t new_tmstmp; + + public: + void param_validation(bs_stmt_vec_t*& args) + { + auto iter = args->begin(); + int args_size = args->size(); + + if (args_size < 2) + { + throw base_s3select_exception("add_to_timestamp should have 3 parameters"); + } + + base_statement* quan = *iter; + val_quantity = quan->eval(); + + if (val_quantity.is_number() == false) + { + throw base_s3select_exception("second parameter should be number"); //TODO what about double? + } + + iter++; + base_statement* ts = *iter; + value val_ts = ts->eval(); + + if(val_ts.is_timestamp() == false) + { + throw base_s3select_exception("third parameter should be time-stamp"); + } + + std::tie(new_ptime, td, flag) = *val_ts.timestamp(); + } + +}; + +class base_time_to_string +{ + protected: + std::vector months = { "January", "February", "March","April", + "May", "June", "July", "August", "September", + "October", "November", "December"}; + public: + virtual std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) = 0; + virtual ~base_time_to_string() = default; +}; + +class derive_yyyy : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + int64_t yr = new_ptime.date().year(); + return std::string(param - 4, '0') + std::to_string(yr); + } +} yyyy_to_string; + +class derive_yy : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + int64_t yr = new_ptime.date().year(); + return std::string(2 - std::to_string(yr%100).length(), '0') + std::to_string(yr%100); + } +} yy_to_string; + +class derive_y : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + int64_t yr = new_ptime.date().year(); + return std::to_string(yr); + } +} y_to_string; + +class derive_mmmmm_month : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + int64_t mnth = new_ptime.date().month(); + return (months[mnth - 1]).substr(0, 1); + } +} mmmmm_month_to_string; + +class derive_mmmm_month : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + int64_t mnth = new_ptime.date().month(); + return months[mnth - 1]; + } +} mmmm_month_to_string; + +class derive_mmm_month : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + int64_t mnth = new_ptime.date().month(); + return (months[mnth - 1]).substr(0, 3); + } +} mmm_month_to_string; + +class derive_mm_month : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + int64_t mnth = new_ptime.date().month(); + std::string mnth_str = std::to_string(mnth); + return std::string(2 - mnth_str.length(), '0') + mnth_str; + } +} mm_month_to_string; + +class derive_m_month : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + int64_t mnth = new_ptime.date().month(); + return std::to_string(mnth); + } +} m_month_to_string; + +class derive_dd : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + std::string day = std::to_string(new_ptime.date().day()); + return std::string(2 - day.length(), '0') + day; + } +} dd_to_string; + +class derive_d : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + std::string day = std::to_string(new_ptime.date().day()); + return day; + } +} d_to_string; + +class derive_a : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + int64_t hr = new_ptime.time_of_day().hours(); + std::string meridiem = (hr < 12 ? "AM" : "PM"); + return meridiem; + } +} a_to_string; + +class derive_hh : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + int64_t hr = new_ptime.time_of_day().hours(); + std::string hr_12 = std::to_string(hr%12 == 0 ? 12 : hr%12); + return std::string(2 - hr_12.length(), '0') + hr_12; + } +} hh_to_string; + +class derive_h : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + int64_t hr = new_ptime.time_of_day().hours(); + std::string hr_12 = std::to_string(hr%12 == 0 ? 12 : hr%12); + return hr_12; + } +} h_to_string; + +class derive_h2 : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + int64_t hr = new_ptime.time_of_day().hours(); + std::string hr_24 = std::to_string(hr); + return std::string(2 - hr_24.length(), '0') + hr_24; + } +} h2_to_string; + +class derive_h1 : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + int64_t hr = new_ptime.time_of_day().hours(); + return std::to_string(hr); + } +} h1_to_string; + +class derive_mm : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + std::string mint = std::to_string(new_ptime.time_of_day().minutes()); + return std::string(2 - mint.length(), '0') + mint; + } +} mm_to_string; + +class derive_m : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + std::string mint = std::to_string(new_ptime.time_of_day().minutes()); + return mint; + } +} m_to_string; + +class derive_ss : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + std::string sec = std::to_string(new_ptime.time_of_day().seconds()); + return std::string(2 - sec.length(), '0') + sec; + } +} ss_to_string; + +class derive_s : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + std::string sec = std::to_string(new_ptime.time_of_day().seconds()); + return sec; + } +} s_to_string; + +class derive_frac_sec : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + std::string frac_seconds = std::to_string(new_ptime.time_of_day().fractional_seconds()); + #if BOOST_DATE_TIME_POSIX_TIME_STD_CONFIG + frac_seconds = std::string(9 - frac_seconds.length(), '0') + frac_seconds; + #else + frac_seconds = std::string(6 - frac_seconds.length(), '0') + frac_seconds; + #endif + if (param >= frac_seconds.length()) + { + return frac_seconds + std::string(param - frac_seconds.length(), '0'); + } + else + { + return frac_seconds.substr(0, param); + } + } +} frac_sec_to_string; + +class derive_n : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + int frac_seconds = new_ptime.time_of_day().fractional_seconds(); + + if(frac_seconds == 0) + return std::to_string(frac_seconds); + else + { + #if BOOST_DATE_TIME_POSIX_TIME_STD_CONFIG + return std::to_string(frac_seconds); + #else + return std::to_string(frac_seconds) + std::string(3, '0'); + #endif + } + } +} n_to_string; + +class derive_x1 : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + int tz_hour = td.hours(); + int tz_minute = td.minutes(); + if (tz_hour == 0 && tz_minute == 0) + { + return "Z"; + } + else if (tz_minute == 0) + { + std::string tz_hr = std::to_string(std::abs(tz_hour)); + return (td.is_negative() ? "-" : "+") + std::string(2 - tz_hr.length(), '0') + tz_hr; + } + else + { + std::string tz_hr = std::to_string(std::abs(tz_hour)); + std::string tz_mn = std::to_string(std::abs(tz_minute)); + return (td.is_negative() ? "-" : "+") + std::string(2 - tz_hr.length(), '0') + tz_hr + std::string(2 - tz_mn.length(), '0') + tz_mn; + } + } +} x1_to_string; + +class derive_x2 : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + int tz_hour = td.hours(); + int tz_minute = td.minutes(); + if (tz_hour == 0 && tz_minute == 0) + { + return "Z"; + } + else + { + std::string tz_hr = std::to_string(std::abs(tz_hour)); + std::string tz_mn = std::to_string(std::abs(tz_minute)); + return (td.is_negative() ? "-" : "+") + std::string(2 - tz_hr.length(), '0') + tz_hr + std::string(2 - tz_mn.length(), '0') + tz_mn; + } + } +} x2_to_string; + +class derive_x3 : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + int tz_hour = td.hours(); + int tz_minute = td.minutes(); + if (tz_hour == 0 && tz_minute == 0) + { + return "Z"; + } + else + { + std::string tz_hr = std::to_string(std::abs(tz_hour)); + std::string tz_mn = std::to_string(std::abs(tz_minute)); + return (td.is_negative() ? "-" : "+") + std::string(2 - tz_hr.length(), '0') + tz_hr + ":" + std::string(2 - tz_mn.length(), '0') + tz_mn; + } + } +} x3_to_string; + +class derive_x : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + int tz_minute = td.minutes(); + std::string tz_hr = std::to_string(std::abs(td.hours())); + if (tz_minute == 0) + { + return (td.is_negative() ? "-" : "+") + std::string(2 - tz_hr.length(), '0') + tz_hr; + } + else + { + std::string tz_mn = std::to_string(std::abs(tz_minute)); + return (td.is_negative() ? "-" : "+") + std::string(2 - tz_hr.length(), '0') + tz_hr + std::string(2 - tz_mn.length(), '0') + tz_mn; + } + } +} x_to_string; + +class derive_xx : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + std::string tz_hr = std::to_string(std::abs(td.hours())); + std::string tz_mn = std::to_string(std::abs(td.minutes())); + return (td.is_negative() ? "-" : "+") + std::string(2 - tz_hr.length(), '0') + tz_hr + std::string(2 - tz_mn.length(), '0') + tz_mn; + } +} xx_to_string; + +class derive_xxx : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + std::string tz_hr = std::to_string(std::abs(td.hours())); + std::string tz_mn = std::to_string(std::abs(td.minutes())); + return (td.is_negative() ? "-" : "+") + std::string(2 - tz_hr.length(), '0') + tz_hr + ":" + std::string(2 - tz_mn.length(), '0') + tz_mn; + } +} xxx_to_string; + +class derive_delimiter : public base_time_to_string +{ + public: + std::string print_time(boost::posix_time::ptime& new_ptime, boost::posix_time::time_duration& td, uint32_t param) + { + char ch = param; + return std::string(1, ch); + } +} delimiter_to_string; + +class base_timestamp_to_string : public base_function +{ + protected: + boost::posix_time::ptime new_ptime; + boost::posix_time::time_duration td; + bool flag; + std::string format; + std::vector m_metachar {'y', 'M', 'd', 'a', 'h', 'H', 'm', 's', 'S', 'n', 'X', 'x'}; + std::vector m_metaword_vec {"yyy", "yy", "y", "MMMMM", "MMMM", "MMM", "MM", "M", + "dd", "d", "a", "hh", "h", "HH", "H", "mm", "m", "ss", "s", "n", + "XXXXX", "XXXX", "XXX", "XX", "X", "xxxxx", "xxxx", "xxx", "xx", + "x"}; + std::vector print_vector; + std::vector para; + bool initialized = false; + + using to_string_lib_t = std::map; + + const to_string_lib_t time_to_string_functions = + { + {"yyyy+", &yyyy_to_string}, + {"yyy", &y_to_string}, + {"yy", &yy_to_string}, + {"y", &y_to_string}, + {"MMMMM", &mmmmm_month_to_string}, + {"MMMM", &mmmm_month_to_string}, + {"MMM", &mmm_month_to_string}, + {"MM", &mm_month_to_string}, + {"M", &m_month_to_string}, + {"dd", &dd_to_string }, + {"d", &d_to_string }, + {"a", &a_to_string }, + {"hh", &hh_to_string}, + {"h", &h_to_string}, + {"HH", &h2_to_string}, + {"H", &h1_to_string}, + {"mm", &mm_to_string}, + {"m", &m_to_string}, + {"ss", &ss_to_string}, + {"s", &s_to_string}, + {"S+", &frac_sec_to_string}, + {"n", &n_to_string}, + {"XXXXX", &x3_to_string}, + {"XXXX", &x2_to_string}, + {"XXX", &x3_to_string}, + {"XX", &x2_to_string}, + {"X", &x1_to_string}, + {"xxxxx", &xxx_to_string}, + {"xxxx", &xx_to_string}, + {"xxx", &xxx_to_string}, + {"xx", &xx_to_string}, + {"x", &x_to_string}, + {"delimiter", &delimiter_to_string} + }; + + public: + void param_validation(bs_stmt_vec_t*& args) + { + auto iter = args->begin(); + int args_size = args->size(); + + if (args_size < 2) + { + throw base_s3select_exception("to_string need 2 parameters"); + } + + base_statement* dt1_param = *iter; + value val_timestamp = dt1_param->eval(); + + if (val_timestamp.is_timestamp() == false) + { + throw base_s3select_exception("first parameter should be timestamp"); + } + + iter++; + base_statement* frmt = *iter; + value val_format = frmt->eval(); + + if (val_format.is_string() == false) + { + throw base_s3select_exception("second parameter should be string"); + } + + std::tie(new_ptime, td, flag) = *val_timestamp.timestamp(); + format = val_format.to_string(); + } + + uint32_t length_same_char_str(std::string str, char ch) + { + int i = 0; + while(str[i] == ch) + i++; + return i; + } + + void prepare_to_string_vector(std::vector& print_vector, std::vector& para) + { + for (uint32_t i = 0; i < format.length(); i++) + { + if (std::find(m_metachar.begin(), m_metachar.end() , format[i]) != m_metachar.end()) + { + if (format.substr(i, 4).compare("yyyy") == 0) + { + uint32_t len = length_same_char_str(format.substr(i), 'y'); + auto it = time_to_string_functions.find("yyyy+"); + print_vector.push_back( it->second); + para.push_back(len); + i += len - 1; + continue; + } + else if (format[i] == 'S') + { + uint32_t len = length_same_char_str(format.substr(i), 'S'); + auto it = time_to_string_functions.find("S+"); + print_vector.push_back( it->second); + para.push_back(len); + i += len - 1; + continue; + } + + for (auto word : m_metaword_vec) + { + if (format.substr(i, word.length()).compare(word) == 0) + { + auto it = time_to_string_functions.find(word.c_str()); + print_vector.push_back( it->second); + para.push_back('\0'); + i += word.length() - 1; + break; + } + } + } + else + { + auto it = time_to_string_functions.find("delimiter"); + print_vector.push_back( it->second ); + para.push_back(format[i]); + } + } + } + + std::string execute_to_string(std::vector& print_vector, std::vector& para) + { + std::string res; + int temp = 0; + for(auto p : print_vector) + { + res += p->print_time(new_ptime, td, para.at(temp)); + temp++; + } + return res; + } + +}; + + +class base_like : public base_function +{ + protected: + value like_expr_val; + value escape_expr_val; + bool constant_state = false; + #if REGEX_HS + hs_database_t* compiled_regex; + hs_scratch_t *scratch = NULL; + bool res; + #elif REGEX_RE2 + std::unique_ptr compiled_regex; + #else + std::regex compiled_regex; + #endif + + public: + void param_validation(base_statement* escape_expr, base_statement* like_expr) + { + escape_expr_val = escape_expr->eval(); + if (escape_expr_val.type != value::value_En_t::STRING) + { + throw base_s3select_exception("esacpe expression must be string"); + } + + like_expr_val = like_expr->eval(); + if (like_expr_val.type != value::value_En_t::STRING) + { + throw base_s3select_exception("like expression must be string"); + } + } + + std::vector transform(const char* s, char escape) + { + enum state_expr_t {START, ESCAPE, START_STAR_CHAR, START_METACHAR, START_ANYCHAR, METACHAR, + STAR_CHAR, ANYCHAR, END }; + state_expr_t st{START}; + + const char *p = s; + size_t size = strlen(s); + size_t i = 0; + std::vector v; + + while(*p) + { + switch (st) + { + case START: + if (*p == escape) + { + st = ESCAPE; + v.push_back('^'); + } + else if (*p == '%') + { + v.push_back('^'); + v.push_back('.'); + v.push_back('*'); + st = START_STAR_CHAR; + } + else if (*p == '_') + { + v.push_back('^'); + v.push_back('.'); + st=START_METACHAR; + } + else + { + v.push_back('^'); + v.push_back(*p); + st=START_ANYCHAR; + } + break; + + case START_STAR_CHAR: + if (*p == escape) + { + st = ESCAPE; + } + else if (*p == '%') + { + st = START_STAR_CHAR; + } + else if (*p == '_') + { + v.push_back('.'); + st = METACHAR; + } + else + { + v.push_back(*p); + st = ANYCHAR; + } + break; + + case START_METACHAR: + if (*p == escape) + { + st = ESCAPE; + } + else if(*p == '_') + { + v.push_back('.'); + st = METACHAR; + } + else if(*p == '%') + { + v.push_back('.'); + v.push_back('*'); + st = STAR_CHAR; + } + else + { + v.push_back(*p); + st = ANYCHAR; + } + break; + + case START_ANYCHAR: + if (*p == escape) + { + st = ESCAPE; + } + else if (*p == '_' && i == size-1) + { + v.push_back('.'); + v.push_back('$'); + st = END; + } + else if (*p == '_') + { + v.push_back('.'); + st = METACHAR; + } + else if (*p == '%' && i == size-1) + { + v.push_back('.'); + v.push_back('*'); + v.push_back('$'); + st = END; + } + else if (*p == '%') + { + v.push_back('.'); + v.push_back('*'); + st = STAR_CHAR; + } + else if (i == size-1) + { + v.push_back(*p); + v.push_back('$'); + st = END; + } + else + { + v.push_back(*p); + st = ANYCHAR; + } + break; + + case METACHAR: + if (*p == escape) + { + st = ESCAPE; + } + else if (*p == '_' && i == size-1) + { + v.push_back('.'); + v.push_back('$'); + st = END; + } + else if (*p == '_') + { + v.push_back('.'); + st = METACHAR; + } + else if (*p == '%' && i == size-1) + { + v.push_back('.'); + v.push_back('*'); + v.push_back('$'); + st = END; + } + else if (*p == '%') + { + v.push_back('.'); + v.push_back('*'); + st = STAR_CHAR; + } + else if (i == size-1) + { + v.push_back(*p); + v.push_back('$'); + st = END; + } + else + { + v.push_back(*p); + st = ANYCHAR; + } + break; + + case ANYCHAR: + if (*p == escape) + { + st = ESCAPE; + } + else if (*p == '_' && i == size-1) + { + v.push_back('.'); + v.push_back('$'); + st = END; + } + else if (*p == '_') + { + v.push_back('.'); + st = METACHAR; + } + else if (*p == '%' && i == size-1) + { + v.push_back('.'); + v.push_back('*'); + v.push_back('$'); + st = END; + } + else if (*p == '%') + { + v.push_back('.'); + v.push_back('*'); + st = STAR_CHAR; + } + else if (i == size-1) + { + v.push_back(*p); + v.push_back('$'); + st = END; + } + else + { + v.push_back(*p); + st = ANYCHAR; + } + break; + + case STAR_CHAR: + if (*p == escape) + { + st = ESCAPE; + } + else if (*p == '%' && i == size-1) + { + v.push_back('$'); + st = END; + } + else if (*p == '%') + { + st = STAR_CHAR; + } + else if (*p == '_' && i == size-1) + { + v.push_back('.'); + v.push_back('$'); + st = END; + } + else if (*p == '_') + { + v.push_back('.'); + st = METACHAR; + } + else if (i == size-1) + { + v.push_back(*p); + v.push_back('$'); + st = END; + } + else + { + v.push_back(*p); + st = ANYCHAR; + } + break; + + case ESCAPE: + if (i == size-1) + { + v.push_back(*p); + v.push_back('$'); + st = END; + } + else + { + v.push_back(*p); + st = ANYCHAR; + } + break; + + case END: + return v; + + default: + throw base_s3select_exception("missing state!"); + break; + } + p++; + i++; + } + return v; + } + + void compile(std::vector& like_regex) + { + std::string like_as_regex_str(like_regex.begin(), like_regex.end()); + + #if REGEX_HS + std::string temp = "^" + like_as_regex_str + "\\z"; //for anchoring start and end + char* c_regex = &temp[0]; + hs_compile_error_t *compile_err; + if (hs_compile(c_regex, HS_FLAG_DOTALL, HS_MODE_BLOCK, NULL, &compiled_regex, + &compile_err) != HS_SUCCESS) + { + throw base_s3select_exception("ERROR: Unable to compile pattern."); + } + + if (hs_alloc_scratch(compiled_regex, &scratch) != HS_SUCCESS) + { + throw base_s3select_exception("ERROR: Unable to allocate scratch space."); + } + #elif REGEX_RE2 + compiled_regex = std::make_unique(like_as_regex_str); + #else + compiled_regex = std::regex(like_as_regex_str); + #endif + } + + void match(value& main_expr_val, variable* result) + { + std::string content_str = main_expr_val.to_string(); + #if REGEX_HS + const char* content = content_str.c_str(); + res = false; + + if (hs_scan(compiled_regex, content, strlen(content), 0, scratch, eventHandler, &res) != + HS_SUCCESS) + { + throw base_s3select_exception("ERROR: Unable to scan input buffer. Exiting."); + } + + result->set_value(res); + #elif REGEX_RE2 + re2::StringPiece res[1]; + + if (compiled_regex->Match(content_str, 0, content_str.size(), RE2::ANCHOR_BOTH, res, 1)) + { + result->set_value(true); + } + else + { + result->set_value(false); + } + #else + if (std::regex_match(content_str, compiled_regex)) + { + result->set_value(true); + } + else + { + result->set_value(false); + } + #endif + } + + static int eventHandler(unsigned int id, unsigned long long from, unsigned long long to, + unsigned int flags, void* ctx) + { + *((bool*)ctx) = true; + return 0; + } + +}; + +};//namespace + +#endif diff --git a/src/s3select/include/s3select_parquet_intrf.h b/src/s3select/include/s3select_parquet_intrf.h new file mode 100644 index 000000000..df04e1618 --- /dev/null +++ b/src/s3select/include/s3select_parquet_intrf.h @@ -0,0 +1,2079 @@ + +#pragma once + +#if ! __has_include () || ! __has_include () || !__has_include () +# undef _ARROW_EXIST +#endif + +#ifdef _ARROW_EXIST + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include + +#include +#include + +#include "internal_file_decryptor.h" +#include "encryption_internal.h" + +#if ARROW_VERSION_MAJOR < 9 +#define _ARROW_FD fd_ +#define _ARROW_FD_TYPE int +#else +#define _ARROW_FD fd_.fd() +#define _ARROW_FD_TYPE arrow::internal::FileDescriptor +#endif + +/******************************************/ +/******************************************/ +class optional_yield; +namespace s3selectEngine { +class rgw_s3select_api { + + // global object for setting interface between RGW and parquet-reader + private: + + public: + + std::function range_req_fptr; + std::function get_size_fptr; + optional_yield *m_y; + + void set_range_req_api(std::function fp) + { + range_req_fptr = fp; + } + + void set_get_size_api(std::function fp) + { + get_size_fptr = fp; + } +}; +} + +/******************************************/ +/******************************************/ +/******************************************/ + +static constexpr uint8_t kParquetMagic[4] = {'P', 'A', 'R', '1'}; +static constexpr uint8_t kParquetEMagic[4] = {'P', 'A', 'R', 'E'}; +constexpr int kGcmTagLength = 16; + +namespace arrow { +namespace io { +namespace internal { + +ARROW_EXPORT void CloseFromDestructor(FileInterface* file); + +// Validate a (offset, size) region (as given to ReadAt) against +// the file size. Return the actual read size. +ARROW_EXPORT Result ValidateReadRange(int64_t offset, int64_t size, + int64_t file_size); +// Validate a (offset, size) region (as given to WriteAt) against +// the file size. Short writes are not allowed. +ARROW_EXPORT Status ValidateWriteRange(int64_t offset, int64_t size, int64_t file_size); + +// Validate a (offset, size) region (as given to ReadAt or WriteAt), without +// knowing the file size. +ARROW_EXPORT Status ValidateRange(int64_t offset, int64_t size); + +ARROW_EXPORT +std::vector CoalesceReadRanges(std::vector ranges, + int64_t hole_size_limit, + int64_t range_size_limit); + +ARROW_EXPORT +::arrow::internal::ThreadPool* GetIOThreadPool(); + +} // namespace internal +} // namespace io +} + + +// RGWimpl and OSFile implements the access to storage objects, OSFile(filesystem files) RGWimpl( ceph S3 ) +// ObjectInterface(temporary) is "empty base class" enables injections of access function to storage-objects +// ReadableFileImpl an implementation layer to ObjectInterface objects +// ReadableFile a layer which call to ReadableFileImpl, enable runtime switching between implementations +// ParquetFileReader is the main interface (underline implementation is transparent to this layer) +// + + +namespace arrow { +class Buffer; +namespace io { + +class ObjectInterface { + +#define NOT_IMPLEMENTED {std::cout << "not implemented" << std::endl;} + +//purpose: to implement the range-request from single object +public: + ObjectInterface() : fd_(-1), is_open_(false), size_(-1), need_seeking_(false) {} + + virtual ~ObjectInterface(){} + + // Note: only one of the Open* methods below may be called on a given instance + + virtual Status OpenWritable(const std::string& path, bool truncate, bool append, bool write_only){return Status::OK();} + + // This is different from OpenWritable(string, ...) in that it doesn't + // truncate nor mandate a seekable file + virtual Status OpenWritable(int fd){return Status::OK();} + + virtual Status OpenReadable(const std::string& path){return Status::OK();} + + virtual Status OpenReadable(int fd){return Status::OK();} + + virtual Status CheckClosed() const {return Status::OK();} + + virtual Status Close(){return Status::OK();} + + virtual Result Read(int64_t nbytes, void* out){return Result(-1);} + + virtual Result ReadAt(int64_t position, int64_t nbytes, void* out){return Result(-1);} + + virtual Status Seek(int64_t pos){return Status::OK();} + + virtual Result Tell() const {return Result(-1);} + + virtual Status Write(const void* data, int64_t length){return Status::OK();} + + virtual int fd() const{return -1;} + + virtual bool is_open() const{return false;} + + virtual int64_t size() const{return -1;} + + virtual FileMode::type mode() const{return FileMode::READ;} + + #if 0 + std::mutex& lock(){} + #endif + + protected: + virtual Status SetFileName(const std::string& file_name){return Status::OK();} + + virtual Status SetFileName(int fd){return Status::OK();} + + virtual Status CheckPositioned(){return Status::OK();} + + ::arrow::internal::PlatformFilename file_name_; + + std::mutex lock_; + + // File descriptor + _ARROW_FD_TYPE fd_; + + FileMode::type mode_; + + bool is_open_; + int64_t size_; + // Whether ReadAt made the file position non-deterministic. + std::atomic need_seeking_; + +}; //ObjectInterface + +} //namespace io +} //namespace arrow + +namespace arrow { + +using internal::IOErrorFromErrno; + +namespace io { + +class OSFile : public ObjectInterface { + public: + OSFile() : fd_(-1), is_open_(false), size_(-1), need_seeking_(false) {} + + ~OSFile() {} + + // Note: only one of the Open* methods below may be called on a given instance + + Status OpenWritable(const std::string& path, bool truncate, bool append, + bool write_only) override { + RETURN_NOT_OK(SetFileName(path)); + + ARROW_ASSIGN_OR_RAISE(fd_, ::arrow::internal::FileOpenWritable(file_name_, write_only, + truncate, append)); + is_open_ = true; + mode_ = write_only ? FileMode::WRITE : FileMode::READWRITE; + + if (!truncate) { + ARROW_ASSIGN_OR_RAISE(size_, ::arrow::internal::FileGetSize(_ARROW_FD)); + } else { + size_ = 0; + } + return Status::OK(); + } + + // This is different from OpenWritable(string, ...) in that it doesn't + // truncate nor mandate a seekable file + Status OpenWritable(int fd) override { + auto result = ::arrow::internal::FileGetSize(fd); + if (result.ok()) { + size_ = *result; + } else { + // Non-seekable file + size_ = -1; + } + RETURN_NOT_OK(SetFileName(fd)); + is_open_ = true; + mode_ = FileMode::WRITE; + #if ARROW_VERSION_MAJOR < 9 + fd_ = fd; + #else + fd_ = arrow::internal::FileDescriptor{fd}; + #endif + return Status::OK(); + } + + Status OpenReadable(const std::string& path) override { + RETURN_NOT_OK(SetFileName(path)); + + ARROW_ASSIGN_OR_RAISE(fd_, ::arrow::internal::FileOpenReadable(file_name_)); + ARROW_ASSIGN_OR_RAISE(size_, ::arrow::internal::FileGetSize(_ARROW_FD)); + + is_open_ = true; + mode_ = FileMode::READ; + return Status::OK(); + } + + Status OpenReadable(int fd) override { + ARROW_ASSIGN_OR_RAISE(size_, ::arrow::internal::FileGetSize(fd)); + RETURN_NOT_OK(SetFileName(fd)); + is_open_ = true; + mode_ = FileMode::READ; + #if ARROW_VERSION_MAJOR < 9 + fd_ = fd; + #else + fd_ = arrow::internal::FileDescriptor{fd}; + #endif + return Status::OK(); + } + + Status CheckClosed() const override { + if (!is_open_) { + return Status::Invalid("Invalid operation on closed file"); + } + return Status::OK(); + } + + Status Close() override { + if (is_open_) { + // Even if closing fails, the fd will likely be closed (perhaps it's + // already closed). + is_open_ = false; + #if ARROW_VERSION_MAJOR < 9 + int fd = fd_; + fd_ = -1; + RETURN_NOT_OK(::arrow::internal::FileClose(fd)); + #else + RETURN_NOT_OK(fd_.Close()); + #endif + } + return Status::OK(); + } + + Result Read(int64_t nbytes, void* out) override { + RETURN_NOT_OK(CheckClosed()); + RETURN_NOT_OK(CheckPositioned()); + return ::arrow::internal::FileRead(_ARROW_FD, reinterpret_cast(out), nbytes); + } + + Result ReadAt(int64_t position, int64_t nbytes, void* out) override { + RETURN_NOT_OK(CheckClosed()); + RETURN_NOT_OK(internal::ValidateRange(position, nbytes)); + // ReadAt() leaves the file position undefined, so require that we seek + // before calling Read() or Write(). + need_seeking_.store(true); + return ::arrow::internal::FileReadAt(_ARROW_FD, reinterpret_cast(out), position, + nbytes); + } + + Status Seek(int64_t pos) override { + RETURN_NOT_OK(CheckClosed()); + if (pos < 0) { + return Status::Invalid("Invalid position"); + } + Status st = ::arrow::internal::FileSeek(_ARROW_FD, pos); + if (st.ok()) { + need_seeking_.store(false); + } + return st; + } + + Result Tell() const override { + RETURN_NOT_OK(CheckClosed()); + return ::arrow::internal::FileTell(_ARROW_FD); + } + + Status Write(const void* data, int64_t length) override { + RETURN_NOT_OK(CheckClosed()); + + std::lock_guard guard(lock_); + RETURN_NOT_OK(CheckPositioned()); + if (length < 0) { + return Status::IOError("Length must be non-negative"); + } + return ::arrow::internal::FileWrite(_ARROW_FD, reinterpret_cast(data), + length); + } + + int fd() const override { return _ARROW_FD; } + + bool is_open() const override { return is_open_; } + + int64_t size() const override { return size_; } + + FileMode::type mode() const override { return mode_; } + + std::mutex& lock() { return lock_; } + + protected: + Status SetFileName(const std::string& file_name) override { + return ::arrow::internal::PlatformFilename::FromString(file_name).Value(&file_name_); + } + + Status SetFileName(int fd) override { + std::stringstream ss; + ss << ""; + return SetFileName(ss.str()); + } + + Status CheckPositioned() override { + if (need_seeking_.load()) { + return Status::Invalid( + "Need seeking after ReadAt() before " + "calling implicitly-positioned operation"); + } + return Status::OK(); + } + + ::arrow::internal::PlatformFilename file_name_; + + std::mutex lock_; + + // File descriptor + _ARROW_FD_TYPE fd_; + + FileMode::type mode_; + + bool is_open_; + int64_t size_; + // Whether ReadAt made the file position non-deterministic. + std::atomic need_seeking_; +}; +} // namespace io +} // namespace arrow + +namespace arrow { +class Buffer; +namespace io { + +class RGWimpl : public ObjectInterface { + +//purpose: to implement the range-request from single object +public: + RGWimpl(s3selectEngine::rgw_s3select_api* rgw) : fd_(-1), is_open_(false), size_(-1), need_seeking_(false),m_rgw_impl(rgw) {} + + ~RGWimpl(){} + +#define NOT_IMPLEMENT { \ + std::stringstream ss; \ + ss << " method " << __FUNCTION__ << " is not implemented;"; \ + throw parquet::ParquetException(ss.str()); \ + } + + // Note: only one of the Open* methods below may be called on a given instance + + Status OpenWritable(const std::string& path, bool truncate, bool append, bool write_only) { NOT_IMPLEMENT;return Status::OK(); } + + // This is different from OpenWritable(string, ...) in that it doesn't + // truncate nor mandate a seekable file + Status OpenWritable(int fd) {NOT_IMPLEMENT;return Status::OK(); } + + Status OpenReadable(const std::string& path) { + //RGW-implement + + RETURN_NOT_OK(SetFileName(path));//TODO can skip that + size_ = m_rgw_impl->get_size_fptr(); + + is_open_ = true; + mode_ = FileMode::READ; + return Status::OK(); + } + + Status OpenReadable(int fd) {NOT_IMPLEMENT;return Status::OK(); } + + Status CheckClosed() const { + //RGW-implement + if (!is_open_) { + return Status::Invalid("Invalid operation on closed file"); + } + return Status::OK(); + } + + Status Close() { + //RGW-implement + if (is_open_) { + // Even if closing fails, the fd will likely be closed (perhaps it's + // already closed). + is_open_ = false; + //int fd = fd_; + #if ARROW_VERSION_MAJOR < 9 + fd_ = -1; + #else + fd_.Close(); + #endif + //RETURN_NOT_OK(::arrow::internal::FileClose(fd)); + } + return Status::OK(); + } + + Result Read(int64_t nbytes, void* out) { + NOT_IMPLEMENT; + RETURN_NOT_OK(CheckClosed()); + RETURN_NOT_OK(CheckPositioned()); + return ::arrow::internal::FileRead(_ARROW_FD, reinterpret_cast(out), nbytes); + } + + Result ReadAt(int64_t position, int64_t nbytes, void* out) { + + Result status = m_rgw_impl->range_req_fptr(position,nbytes,out,m_rgw_impl->m_y); + + return status; + } + + Status Seek(int64_t pos) {NOT_IMPLEMENT;return Status::OK(); } + + Result Tell() const { + NOT_IMPLEMENT; + return Result(0); + } + + Status Write(const void* data, int64_t length) { + NOT_IMPLEMENT; + return Status::OK(); + } + + int fd() const { return _ARROW_FD; } + + bool is_open() const { return is_open_; } + + int64_t size() const { return size_; } + + FileMode::type mode() const { return mode_; } + + std::mutex& lock() { return lock_; } //TODO skip + + protected: + Status SetFileName(const std::string& file_name) override { + return ::arrow::internal::PlatformFilename::FromString(file_name).Value(&file_name_); + } + + Status SetFileName(int fd) {NOT_IMPLEMENT; return Status::OK(); } + + Status CheckPositioned() {NOT_IMPLEMENT; return Status::OK(); } + + ::arrow::internal::PlatformFilename file_name_; + + std::mutex lock_; + + // File descriptor + _ARROW_FD_TYPE fd_; + + FileMode::type mode_; + + bool is_open_; + int64_t size_; + // Whether ReadAt made the file position non-deterministic. + std::atomic need_seeking_; + +private: + + s3selectEngine::rgw_s3select_api* m_rgw_impl; +}; + +} //namespace io +} //namespace arrow + +namespace arrow { + +class Buffer; +class MemoryPool; +class Status; + +namespace io { +namespace ceph { + +/// \brief An operating system file open in read-only mode. +/// +/// Reads through this implementation are unbuffered. If many small reads +/// need to be issued, it is recommended to use a buffering layer for good +/// performance. +class ARROW_EXPORT ReadableFile + : public internal::RandomAccessFileConcurrencyWrapper { + public: + ~ReadableFile() override; + + /// \brief Open a local file for reading + /// \param[in] path with UTF8 encoding + /// \param[in] pool a MemoryPool for memory allocations + /// \return ReadableFile instance + static Result> Open( + const std::string& path,s3selectEngine::rgw_s3select_api* rgw,MemoryPool* pool = default_memory_pool()); + + /// \brief Open a local file for reading + /// \param[in] fd file descriptor + /// \param[in] pool a MemoryPool for memory allocations + /// \return ReadableFile instance + /// + /// The file descriptor becomes owned by the ReadableFile, and will be closed + /// on Close() or destruction. + static Result> Open( + int fd, MemoryPool* pool = default_memory_pool()); + + bool closed() const override; + + int file_descriptor() const; + + Status WillNeed(const std::vector& ranges) override; + + private: + friend RandomAccessFileConcurrencyWrapper; + + explicit ReadableFile(MemoryPool* pool,s3selectEngine::rgw_s3select_api* rgw); + + Status DoClose(); + Result DoTell() const; + Result DoRead(int64_t nbytes, void* buffer); + Result> DoRead(int64_t nbytes); + + /// \brief Thread-safe implementation of ReadAt + Result DoReadAt(int64_t position, int64_t nbytes, void* out); + + /// \brief Thread-safe implementation of ReadAt + Result> DoReadAt(int64_t position, int64_t nbytes); + + Result DoGetSize(); + Status DoSeek(int64_t position); + + class ARROW_NO_EXPORT ReadableFileImpl; + std::unique_ptr impl_; +}; + + +} // namespace ceph +} // namespace io +} // namespace arrow + +// ---------------------------------------------------------------------- +// ReadableFileImpl implementation + +namespace arrow { +namespace io { +namespace ceph { + +class ReadableFile::ReadableFileImpl : public ObjectInterface { + public: + + ~ReadableFileImpl() + { + if(IMPL != nullptr) + { + delete IMPL; + } + } + +#ifdef CEPH_USE_FS + explicit ReadableFileImpl(MemoryPool* pool) : pool_(pool) {IMPL=new OSFile();} +#endif + explicit ReadableFileImpl(MemoryPool* pool,s3selectEngine::rgw_s3select_api* rgw) : pool_(pool) {IMPL=new RGWimpl(rgw);} + + Status Open(const std::string& path) { return IMPL->OpenReadable(path); } + + Status Open(int fd) { return IMPL->OpenReadable(fd); } + + Result> ReadBuffer(int64_t nbytes) { + ARROW_ASSIGN_OR_RAISE(auto buffer, AllocateResizableBuffer(nbytes, pool_)); + + ARROW_ASSIGN_OR_RAISE(int64_t bytes_read, IMPL->Read(nbytes, buffer->mutable_data())); + if (bytes_read < nbytes) { + RETURN_NOT_OK(buffer->Resize(bytes_read)); + buffer->ZeroPadding(); + } + return buffer; + } + + Result> ReadBufferAt(int64_t position, int64_t nbytes) { + ARROW_ASSIGN_OR_RAISE(auto buffer, AllocateResizableBuffer(nbytes, pool_)); + + ARROW_ASSIGN_OR_RAISE(int64_t bytes_read, + IMPL->ReadAt(position, nbytes, buffer->mutable_data())); + if (bytes_read < nbytes) { + RETURN_NOT_OK(buffer->Resize(bytes_read)); + buffer->ZeroPadding(); + } + return buffer; + } + + Status WillNeed(const std::vector& ranges) { + RETURN_NOT_OK(CheckClosed()); + for (const auto& range : ranges) { + RETURN_NOT_OK(internal::ValidateRange(range.offset, range.length)); +#if defined(POSIX_FADV_WILLNEED) + if (posix_fadvise(_ARROW_FD, range.offset, range.length, POSIX_FADV_WILLNEED)) { + return IOErrorFromErrno(errno, "posix_fadvise failed"); + } +#elif defined(F_RDADVISE) // macOS, BSD? + struct { + off_t ra_offset; + int ra_count; + } radvisory{range.offset, static_cast(range.length)}; + if (radvisory.ra_count > 0 && fcntl(_ARROW_FD, F_RDADVISE, &radvisory) == -1) { + return IOErrorFromErrno(errno, "fcntl(fd, F_RDADVISE, ...) failed"); + } +#endif + } + return Status::OK(); + } + + ObjectInterface *IMPL;//TODO to declare in ObjectInterface + + private: + + MemoryPool* pool_; + +}; + +// ReadableFile implemmetation +ReadableFile::ReadableFile(MemoryPool* pool,s3selectEngine::rgw_s3select_api* rgw) { impl_.reset(new ReadableFileImpl(pool,rgw)); } + +ReadableFile::~ReadableFile() { internal::CloseFromDestructor(this); } + +Result> ReadableFile::Open(const std::string& path, + s3selectEngine::rgw_s3select_api* rgw, + MemoryPool* pool + ) { + auto file = std::shared_ptr(new ReadableFile(pool,rgw)); + RETURN_NOT_OK(file->impl_->Open(path)); + return file; +} + +Result> ReadableFile::Open(int fd, MemoryPool* pool) { + NOT_IMPLEMENT; + auto file = std::shared_ptr(new ReadableFile(pool,0)); + RETURN_NOT_OK(file->impl_->Open(fd)); + return file; +} + +Status ReadableFile::DoClose() { return impl_->Close(); } + +bool ReadableFile::closed() const { return !impl_->is_open(); } + +Status ReadableFile::WillNeed(const std::vector& ranges) { + return impl_->WillNeed(ranges); +} + +Result ReadableFile::DoTell() const { return impl_->Tell(); } + +Result ReadableFile::DoRead(int64_t nbytes, void* out) { + return impl_->IMPL->Read(nbytes, out); +} + +Result ReadableFile::DoReadAt(int64_t position, int64_t nbytes, void* out) { + return impl_->IMPL->ReadAt(position, nbytes, out); +} + +Result> ReadableFile::DoReadAt(int64_t position, int64_t nbytes) { + return impl_->ReadBufferAt(position, nbytes); +} + +Result> ReadableFile::DoRead(int64_t nbytes) { + return impl_->ReadBuffer(nbytes); +} + +Result ReadableFile::DoGetSize() { return impl_->IMPL->size(); } + +Status ReadableFile::DoSeek(int64_t pos) { return impl_->IMPL->Seek(pos); } + +int ReadableFile::file_descriptor() const { return impl_->IMPL->fd(); } + +} // namepace ceph +} // namespace io +} // namespace arrow + + +namespace parquet { + +class ColumnReader; +class FileMetaData; +class PageReader; +class RandomAccessSource; +class RowGroupMetaData; + +namespace ceph { +class PARQUET_EXPORT RowGroupReader { + public: + // Forward declare a virtual class 'Contents' to aid dependency injection and more + // easily create test fixtures + // An implementation of the Contents class is defined in the .cc file + struct Contents { + virtual ~Contents() {} + virtual std::unique_ptr GetColumnPageReader(int i) = 0; + virtual const RowGroupMetaData* metadata() const = 0; + virtual const ReaderProperties* properties() const = 0; + }; + + explicit RowGroupReader(std::unique_ptr contents); + + // Returns the rowgroup metadata + const RowGroupMetaData* metadata() const; + + // Construct a ColumnReader for the indicated row group-relative + // column. Ownership is shared with the RowGroupReader. + std::shared_ptr Column(int i); + + std::unique_ptr GetColumnPageReader(int i); + + private: + // Holds a pointer to an instance of Contents implementation + std::unique_ptr contents_; +}; + +class PARQUET_EXPORT ParquetFileReader { + public: + // Declare a virtual class 'Contents' to aid dependency injection and more + // easily create test fixtures + // An implementation of the Contents class is defined in the .cc file + struct PARQUET_EXPORT Contents { + static std::unique_ptr Open( + std::shared_ptr<::arrow::io::RandomAccessFile> source, + const ReaderProperties& props = default_reader_properties(), + std::shared_ptr metadata = NULLPTR); + + virtual ~Contents() = default; + // Perform any cleanup associated with the file contents + virtual void Close() = 0; + virtual std::shared_ptr GetRowGroup(int i) = 0; + virtual std::shared_ptr metadata() const = 0; + }; + + ParquetFileReader(); + ~ParquetFileReader(); + + // Create a reader from some implementation of parquet-cpp's generic file + // input interface + // + // If you cannot provide exclusive access to your file resource, create a + // subclass of RandomAccessSource that wraps the shared resource + ARROW_DEPRECATED("Use arrow::io::RandomAccessFile version") + static std::unique_ptr Open( + std::unique_ptr source, + const ReaderProperties& props = default_reader_properties(), + std::shared_ptr metadata = NULLPTR); + + // Create a file reader instance from an Arrow file object. Thread-safety is + // the responsibility of the file implementation + static std::unique_ptr Open( + std::shared_ptr<::arrow::io::RandomAccessFile> source, + const ReaderProperties& props = default_reader_properties(), + std::shared_ptr metadata = NULLPTR); + + // API Convenience to open a serialized Parquet file on disk, using Arrow IO + // interfaces. + static std::unique_ptr OpenFile( + const std::string& path,s3selectEngine::rgw_s3select_api* rgw, bool memory_map = true, + const ReaderProperties& props = default_reader_properties(), + std::shared_ptr metadata = NULLPTR + ); + + void Open(std::unique_ptr contents); + void Close(); + + // The RowGroupReader is owned by the FileReader + std::shared_ptr RowGroup(int i); + + // Returns the file metadata. Only one instance is ever created + std::shared_ptr metadata() const; + + /// Pre-buffer the specified column indices in all row groups. + /// + /// Readers can optionally call this to cache the necessary slices + /// of the file in-memory before deserialization. Arrow readers can + /// automatically do this via an option. This is intended to + /// increase performance when reading from high-latency filesystems + /// (e.g. Amazon S3). + /// + /// After calling this, creating readers for row groups/column + /// indices that were not buffered may fail. Creating multiple + /// readers for the a subset of the buffered regions is + /// acceptable. This may be called again to buffer a different set + /// of row groups/columns. + /// + /// If memory usage is a concern, note that data will remain + /// buffered in memory until either \a PreBuffer() is called again, + /// or the reader itself is destructed. Reading - and buffering - + /// only one row group at a time may be useful. + void PreBuffer(const std::vector& row_groups, + const std::vector& column_indices, + const ::arrow::io::IOContext& ctx, + const ::arrow::io::CacheOptions& options); + + private: + // Holds a pointer to an instance of Contents implementation + std::unique_ptr contents_; +}; + +// Read only Parquet file metadata +std::shared_ptr PARQUET_EXPORT +ReadMetaData(const std::shared_ptr<::arrow::io::RandomAccessFile>& source); + +/// \brief Scan all values in file. Useful for performance testing +/// \param[in] columns the column numbers to scan. If empty scans all +/// \param[in] column_batch_size number of values to read at a time when scanning column +/// \param[in] reader a ParquetFileReader instance +/// \return number of semantic rows in file +PARQUET_EXPORT +int64_t ScanFileContents(std::vector columns, const int32_t column_batch_size, + ParquetFileReader* reader); + +}//namespace ceph +}//namespace parquet + + +namespace parquet { + +namespace ceph { + +// PARQUET-978: Minimize footer reads by reading 64 KB from the end of the file +static constexpr int64_t kDefaultFooterReadSize = 64 * 1024; +static constexpr uint32_t kFooterSize = 8; + +// For PARQUET-816 +static constexpr int64_t kMaxDictHeaderSize = 100; + +// ---------------------------------------------------------------------- +// RowGroupReader public API + +RowGroupReader::RowGroupReader(std::unique_ptr contents) + : contents_(std::move(contents)) {} + +std::shared_ptr RowGroupReader::Column(int i) { + if (i >= metadata()->num_columns()) { + std::stringstream ss; + ss << "Trying to read column index " << i << " but row group metadata has only " + << metadata()->num_columns() << " columns"; + throw ParquetException(ss.str()); + } + const ColumnDescriptor* descr = metadata()->schema()->Column(i); + + std::unique_ptr page_reader = contents_->GetColumnPageReader(i); + return ColumnReader::Make( + descr, std::move(page_reader), + const_cast(contents_->properties())->memory_pool()); +} + +std::unique_ptr RowGroupReader::GetColumnPageReader(int i) { + if (i >= metadata()->num_columns()) { + std::stringstream ss; + ss << "Trying to read column index " << i << " but row group metadata has only " + << metadata()->num_columns() << " columns"; + throw ParquetException(ss.str()); + } + return contents_->GetColumnPageReader(i); +} + +// Returns the rowgroup metadata +const RowGroupMetaData* RowGroupReader::metadata() const { return contents_->metadata(); } + +/// Compute the section of the file that should be read for the given +/// row group and column chunk. +::arrow::io::ReadRange ComputeColumnChunkRange(FileMetaData* file_metadata, + int64_t source_size, int row_group_index, + int column_index) { + auto row_group_metadata = file_metadata->RowGroup(row_group_index); + auto column_metadata = row_group_metadata->ColumnChunk(column_index); + + int64_t col_start = column_metadata->data_page_offset(); + if (column_metadata->has_dictionary_page() && + column_metadata->dictionary_page_offset() > 0 && + col_start > column_metadata->dictionary_page_offset()) { + col_start = column_metadata->dictionary_page_offset(); + } + + int64_t col_length = column_metadata->total_compressed_size(); + // PARQUET-816 workaround for old files created by older parquet-mr + const ApplicationVersion& version = file_metadata->writer_version(); + if (version.VersionLt(ApplicationVersion::PARQUET_816_FIXED_VERSION())) { + // The Parquet MR writer had a bug in 1.2.8 and below where it didn't include the + // dictionary page header size in total_compressed_size and total_uncompressed_size + // (see IMPALA-694). We add padding to compensate. + int64_t bytes_remaining = source_size - (col_start + col_length); + int64_t padding = std::min(kMaxDictHeaderSize, bytes_remaining); + col_length += padding; + } + + return {col_start, col_length}; +} + +// RowGroupReader::Contents implementation for the Parquet file specification +class SerializedRowGroup : public RowGroupReader::Contents { + public: + SerializedRowGroup(std::shared_ptr source, + std::shared_ptr<::arrow::io::internal::ReadRangeCache> cached_source, + int64_t source_size, FileMetaData* file_metadata, + int row_group_number, const ReaderProperties& props, + std::shared_ptr file_decryptor = nullptr) + : source_(std::move(source)), + cached_source_(std::move(cached_source)), + source_size_(source_size), + file_metadata_(file_metadata), + properties_(props), + row_group_ordinal_(row_group_number), + file_decryptor_(file_decryptor) { + row_group_metadata_ = file_metadata->RowGroup(row_group_number); + } + + const RowGroupMetaData* metadata() const override { return row_group_metadata_.get(); } + + const ReaderProperties* properties() const override { return &properties_; } + + std::unique_ptr GetColumnPageReader(int i) override { + // Read column chunk from the file + auto col = row_group_metadata_->ColumnChunk(i); + + ::arrow::io::ReadRange col_range = + ComputeColumnChunkRange(file_metadata_, source_size_, row_group_ordinal_, i); + std::shared_ptr stream; + if (cached_source_) { + // PARQUET-1698: if read coalescing is enabled, read from pre-buffered + // segments. + PARQUET_ASSIGN_OR_THROW(auto buffer, cached_source_->Read(col_range)); + stream = std::make_shared<::arrow::io::BufferReader>(buffer); + } else { + stream = properties_.GetStream(source_, col_range.offset, col_range.length); + } + + std::unique_ptr crypto_metadata = col->crypto_metadata(); + + // Column is encrypted only if crypto_metadata exists. + if (!crypto_metadata) { + return PageReader::Open(stream, col->num_values(), col->compression(), + properties_.memory_pool()); + } + + if (file_decryptor_ == nullptr) { + throw ParquetException("RowGroup is noted as encrypted but no file decryptor"); + } + + constexpr auto kEncryptedRowGroupsLimit = 32767; + if (i > kEncryptedRowGroupsLimit) { + throw ParquetException("Encrypted files cannot contain more than 32767 row groups"); + } + + // The column is encrypted + std::shared_ptr<::parquet::Decryptor> meta_decryptor; + std::shared_ptr data_decryptor; + // The column is encrypted with footer key + if (crypto_metadata->encrypted_with_footer_key()) { + meta_decryptor = file_decryptor_->GetFooterDecryptorForColumnMeta(); + data_decryptor = file_decryptor_->GetFooterDecryptorForColumnData(); + + CryptoContext ctx(col->has_dictionary_page(), row_group_ordinal_, + static_cast(i), meta_decryptor, data_decryptor); + return PageReader::Open(stream, col->num_values(), col->compression(), + #if ARROW_VERSION_MAJOR > 8 + false, + #endif + properties_.memory_pool(), &ctx); + } + + // The column is encrypted with its own key + std::string column_key_metadata = crypto_metadata->key_metadata(); + const std::string column_path = crypto_metadata->path_in_schema()->ToDotString(); + + meta_decryptor = + file_decryptor_->GetColumnMetaDecryptor(column_path, column_key_metadata); + data_decryptor = + file_decryptor_->GetColumnDataDecryptor(column_path, column_key_metadata); + + CryptoContext ctx(col->has_dictionary_page(), row_group_ordinal_, + static_cast(i), meta_decryptor, data_decryptor); + return PageReader::Open(stream, col->num_values(), col->compression(), + #if ARROW_VERSION_MAJOR > 8 + false, + #endif + properties_.memory_pool(), &ctx); + } + + private: + std::shared_ptr source_; + // Will be nullptr if PreBuffer() is not called. + std::shared_ptr<::arrow::io::internal::ReadRangeCache> cached_source_; + int64_t source_size_; + FileMetaData* file_metadata_; + std::unique_ptr row_group_metadata_; + ReaderProperties properties_; + int row_group_ordinal_; + std::shared_ptr file_decryptor_; +}; + +// ---------------------------------------------------------------------- +// SerializedFile: An implementation of ParquetFileReader::Contents that deals +// with the Parquet file structure, Thrift deserialization, and other internal +// matters + +// This class takes ownership of the provided data source +class SerializedFile : public ParquetFileReader::Contents { + public: + SerializedFile(std::shared_ptr source, + const ReaderProperties& props = default_reader_properties()) + : source_(std::move(source)), properties_(props) { + PARQUET_ASSIGN_OR_THROW(source_size_, source_->GetSize()); + } + + ~SerializedFile() override { + try { + Close(); + } catch (...) { + } + } + + void Close() override { + if (file_decryptor_) file_decryptor_->WipeOutDecryptionKeys(); + } + + std::shared_ptr GetRowGroup(int i) override { + std::unique_ptr contents( + new SerializedRowGroup(source_, cached_source_, source_size_, + file_metadata_.get(), i, properties_, file_decryptor_)); + return std::make_shared(std::move(contents)); + } + + std::shared_ptr metadata() const override { return file_metadata_; } + + void set_metadata(std::shared_ptr metadata) { + file_metadata_ = std::move(metadata); + } + + void PreBuffer(const std::vector& row_groups, + const std::vector& column_indices, + const ::arrow::io::IOContext& ctx, + const ::arrow::io::CacheOptions& options) { + cached_source_ = + std::make_shared<::arrow::io::internal::ReadRangeCache>(source_, ctx, options); + //std::vector ranges; + std::vector<::arrow::io::ReadRange> ranges; + for (int row : row_groups) { + for (int col : column_indices) { + ranges.push_back( + ComputeColumnChunkRange(file_metadata_.get(), source_size_, row, col)); + } + } + PARQUET_THROW_NOT_OK(cached_source_->Cache(ranges)); + } + + void ParseMetaData() { + if (source_size_ == 0) { + throw ParquetInvalidOrCorruptedFileException("Parquet file size is 0 bytes"); + } else if (source_size_ < kFooterSize) { + throw ParquetInvalidOrCorruptedFileException( + "Parquet file size is ", source_size_, + " bytes, smaller than the minimum file footer (", kFooterSize, " bytes)"); + } + + int64_t footer_read_size = std::min(source_size_, kDefaultFooterReadSize); + PARQUET_ASSIGN_OR_THROW( + auto footer_buffer, + source_->ReadAt(source_size_ - footer_read_size, footer_read_size)); + + // Check if all bytes are read. Check if last 4 bytes read have the magic bits + if (footer_buffer->size() != footer_read_size || + (memcmp(footer_buffer->data() + footer_read_size - 4, kParquetMagic, 4) != 0 && + memcmp(footer_buffer->data() + footer_read_size - 4, kParquetEMagic, 4) != 0)) { + throw ParquetInvalidOrCorruptedFileException( + "Parquet magic bytes not found in footer. Either the file is corrupted or this " + "is not a parquet file."); + } + + if (memcmp(footer_buffer->data() + footer_read_size - 4, kParquetEMagic, 4) == 0) { + // Encrypted file with Encrypted footer. + ParseMetaDataOfEncryptedFileWithEncryptedFooter(footer_buffer, footer_read_size); + return; + } + + // No encryption or encryption with plaintext footer mode. + std::shared_ptr metadata_buffer; + uint32_t metadata_len, read_metadata_len; + ParseUnencryptedFileMetadata(footer_buffer, footer_read_size, &metadata_buffer, + &metadata_len, &read_metadata_len); + + auto file_decryption_properties = properties_.file_decryption_properties().get(); + if (!file_metadata_->is_encryption_algorithm_set()) { // Non encrypted file. + if (file_decryption_properties != nullptr) { + if (!file_decryption_properties->plaintext_files_allowed()) { + throw ParquetException("Applying decryption properties on plaintext file"); + } + } + } else { + // Encrypted file with plaintext footer mode. + ParseMetaDataOfEncryptedFileWithPlaintextFooter( + file_decryption_properties, metadata_buffer, metadata_len, read_metadata_len); + } + } + + private: + std::shared_ptr source_; + std::shared_ptr<::arrow::io::internal::ReadRangeCache> cached_source_; + int64_t source_size_; + std::shared_ptr file_metadata_; + ReaderProperties properties_; + + std::shared_ptr<::parquet::InternalFileDecryptor> file_decryptor_; + + void ParseUnencryptedFileMetadata(const std::shared_ptr& footer_buffer, + int64_t footer_read_size, + std::shared_ptr* metadata_buffer, + uint32_t* metadata_len, uint32_t* read_metadata_len); + + std::string HandleAadPrefix(FileDecryptionProperties* file_decryption_properties, + EncryptionAlgorithm& algo); + + void ParseMetaDataOfEncryptedFileWithPlaintextFooter( + FileDecryptionProperties* file_decryption_properties, + const std::shared_ptr& metadata_buffer, uint32_t metadata_len, + uint32_t read_metadata_len); + + void ParseMetaDataOfEncryptedFileWithEncryptedFooter( + const std::shared_ptr& footer_buffer, int64_t footer_read_size); +}; + +void SerializedFile::ParseUnencryptedFileMetadata( + const std::shared_ptr& footer_buffer, int64_t footer_read_size, + std::shared_ptr* metadata_buffer, uint32_t* metadata_len, + uint32_t* read_metadata_len) { + *metadata_len = ::arrow::util::SafeLoadAs( + reinterpret_cast(footer_buffer->data()) + footer_read_size - + kFooterSize); + int64_t metadata_start = source_size_ - kFooterSize - *metadata_len; + if (*metadata_len > source_size_ - kFooterSize) { + throw ParquetInvalidOrCorruptedFileException( + "Parquet file size is ", source_size_, + " bytes, smaller than the size reported by metadata (", metadata_len, "bytes)"); + } + + // Check if the footer_buffer contains the entire metadata + if (footer_read_size >= (*metadata_len + kFooterSize)) { + *metadata_buffer = SliceBuffer( + footer_buffer, footer_read_size - *metadata_len - kFooterSize, *metadata_len); + } else { + PARQUET_ASSIGN_OR_THROW(*metadata_buffer, + source_->ReadAt(metadata_start, *metadata_len)); + if ((*metadata_buffer)->size() != *metadata_len) { + throw ParquetException("Failed reading metadata buffer (requested " + + std::to_string(*metadata_len) + " bytes but got " + + std::to_string((*metadata_buffer)->size()) + " bytes)"); + } + } + + *read_metadata_len = *metadata_len; + file_metadata_ = FileMetaData::Make((*metadata_buffer)->data(), read_metadata_len); +} + +void SerializedFile::ParseMetaDataOfEncryptedFileWithEncryptedFooter( + const std::shared_ptr& footer_buffer, int64_t footer_read_size) { + // encryption with encrypted footer + // both metadata & crypto metadata length + uint32_t footer_len = ::arrow::util::SafeLoadAs( + reinterpret_cast(footer_buffer->data()) + footer_read_size - + kFooterSize); + int64_t crypto_metadata_start = source_size_ - kFooterSize - footer_len; + if (kFooterSize + footer_len > source_size_) { + throw ParquetInvalidOrCorruptedFileException( + "Parquet file size is ", source_size_, + " bytes, smaller than the size reported by footer's (", footer_len, "bytes)"); + } + std::shared_ptr crypto_metadata_buffer; + // Check if the footer_buffer contains the entire metadata + if (footer_read_size >= (footer_len + kFooterSize)) { + crypto_metadata_buffer = SliceBuffer( + footer_buffer, footer_read_size - footer_len - kFooterSize, footer_len); + } else { + PARQUET_ASSIGN_OR_THROW(crypto_metadata_buffer, + source_->ReadAt(crypto_metadata_start, footer_len)); + if (crypto_metadata_buffer->size() != footer_len) { + throw ParquetException("Failed reading encrypted metadata buffer (requested " + + std::to_string(footer_len) + " bytes but got " + + std::to_string(crypto_metadata_buffer->size()) + " bytes)"); + } + } + auto file_decryption_properties = properties_.file_decryption_properties().get(); + if (file_decryption_properties == nullptr) { + throw ParquetException( + "Could not read encrypted metadata, no decryption found in reader's properties"); + } + uint32_t crypto_metadata_len = footer_len; + std::shared_ptr file_crypto_metadata = + FileCryptoMetaData::Make(crypto_metadata_buffer->data(), &crypto_metadata_len); + // Handle AAD prefix + EncryptionAlgorithm algo = file_crypto_metadata->encryption_algorithm(); + std::string file_aad = HandleAadPrefix(file_decryption_properties, algo); + file_decryptor_ = std::make_shared<::parquet::InternalFileDecryptor>( + file_decryption_properties, file_aad, algo.algorithm, + file_crypto_metadata->key_metadata(), properties_.memory_pool()); + + int64_t metadata_offset = source_size_ - kFooterSize - footer_len + crypto_metadata_len; + uint32_t metadata_len = footer_len - crypto_metadata_len; + PARQUET_ASSIGN_OR_THROW(auto metadata_buffer, + source_->ReadAt(metadata_offset, metadata_len)); + if (metadata_buffer->size() != metadata_len) { + throw ParquetException("Failed reading metadata buffer (requested " + + std::to_string(metadata_len) + " bytes but got " + + std::to_string(metadata_buffer->size()) + " bytes)"); + } + + file_metadata_ = + FileMetaData::Make(metadata_buffer->data(), &metadata_len, file_decryptor_); + //FileMetaData::Make(metadata_buffer->data(), &metadata_len, default_reader_properties(), file_decryptor_); //version>9 +} + +void SerializedFile::ParseMetaDataOfEncryptedFileWithPlaintextFooter( + FileDecryptionProperties* file_decryption_properties, + const std::shared_ptr& metadata_buffer, uint32_t metadata_len, + uint32_t read_metadata_len) { + // Providing decryption properties in plaintext footer mode is not mandatory, for + // example when reading by legacy reader. + if (file_decryption_properties != nullptr) { + EncryptionAlgorithm algo = file_metadata_->encryption_algorithm(); + // Handle AAD prefix + std::string file_aad = HandleAadPrefix(file_decryption_properties, algo); + file_decryptor_ = std::make_shared<::parquet::InternalFileDecryptor>( + file_decryption_properties, file_aad, algo.algorithm, + file_metadata_->footer_signing_key_metadata(), properties_.memory_pool()); + // set the InternalFileDecryptor in the metadata as well, as it's used + // for signature verification and for ColumnChunkMetaData creation. +#if GAL_set_file_decryptor_declare_private + file_metadata_->set_file_decryptor(file_decryptor_); +#endif + if (file_decryption_properties->check_plaintext_footer_integrity()) { + if (metadata_len - read_metadata_len != + (parquet::encryption::kGcmTagLength + parquet::encryption::kNonceLength)) { + throw ParquetInvalidOrCorruptedFileException( + "Failed reading metadata for encryption signature (requested ", + parquet::encryption::kGcmTagLength + parquet::encryption::kNonceLength, + " bytes but have ", metadata_len - read_metadata_len, " bytes)"); + } + + if (!file_metadata_->VerifySignature(metadata_buffer->data() + read_metadata_len)) { + throw ParquetInvalidOrCorruptedFileException( + "Parquet crypto signature verification failed"); + } + } + } +} + +std::string SerializedFile::HandleAadPrefix( + FileDecryptionProperties* file_decryption_properties, EncryptionAlgorithm& algo) { + std::string aad_prefix_in_properties = file_decryption_properties->aad_prefix(); + std::string aad_prefix = aad_prefix_in_properties; + bool file_has_aad_prefix = algo.aad.aad_prefix.empty() ? false : true; + std::string aad_prefix_in_file = algo.aad.aad_prefix; + + if (algo.aad.supply_aad_prefix && aad_prefix_in_properties.empty()) { + throw ParquetException( + "AAD prefix used for file encryption, " + "but not stored in file and not supplied " + "in decryption properties"); + } + + if (file_has_aad_prefix) { + if (!aad_prefix_in_properties.empty()) { + if (aad_prefix_in_properties.compare(aad_prefix_in_file) != 0) { + throw ParquetException( + "AAD Prefix in file and in properties " + "is not the same"); + } + } + aad_prefix = aad_prefix_in_file; + std::shared_ptr aad_prefix_verifier = + file_decryption_properties->aad_prefix_verifier(); + if (aad_prefix_verifier != nullptr) aad_prefix_verifier->Verify(aad_prefix); + } else { + if (!algo.aad.supply_aad_prefix && !aad_prefix_in_properties.empty()) { + throw ParquetException( + "AAD Prefix set in decryption properties, but was not used " + "for file encryption"); + } + std::shared_ptr aad_prefix_verifier = + file_decryption_properties->aad_prefix_verifier(); + if (aad_prefix_verifier != nullptr) { + throw ParquetException( + "AAD Prefix Verifier is set, but AAD Prefix not found in file"); + } + } + return aad_prefix + algo.aad.aad_file_unique; +} + +// ---------------------------------------------------------------------- +// ParquetFileReader public API + +ParquetFileReader::ParquetFileReader() {} + +ParquetFileReader::~ParquetFileReader() { + try { + Close(); + } catch (...) { + } +} + +// Open the file. If no metadata is passed, it is parsed from the footer of +// the file +std::unique_ptr ParquetFileReader::Contents::Open( + std::shared_ptr source, const ReaderProperties& props, + std::shared_ptr metadata) { + std::unique_ptr result( + new SerializedFile(std::move(source), props)); + + // Access private methods here, but otherwise unavailable + SerializedFile* file = static_cast(result.get()); + + if (metadata == nullptr) { + // Validates magic bytes, parses metadata, and initializes the SchemaDescriptor + file->ParseMetaData(); + } else { + file->set_metadata(std::move(metadata)); + } + + return result; +} + +std::unique_ptr ParquetFileReader::Open( + std::shared_ptr<::arrow::io::RandomAccessFile> source, const ReaderProperties& props, + std::shared_ptr metadata) { + auto contents = SerializedFile::Open(std::move(source), props, std::move(metadata)); + std::unique_ptr result(new ParquetFileReader()); + result->Open(std::move(contents)); + return result; +} + +#if GAL_NOT_IMPLEMENTED +std::unique_ptr ParquetFileReader::Open( + std::unique_ptr source, const ReaderProperties& props, + std::shared_ptr metadata) { + auto wrapper = std::make_shared(std::move(source)); + return Open(std::move(wrapper), props, std::move(metadata)); +} +#endif + +std::unique_ptr ParquetFileReader::OpenFile( + const std::string& path, s3selectEngine::rgw_s3select_api* rgw, bool memory_map, const ReaderProperties& props, + std::shared_ptr metadata) { + std::shared_ptr<::arrow::io::RandomAccessFile> source; + if (memory_map) { + PARQUET_ASSIGN_OR_THROW( + source, ::arrow::io::MemoryMappedFile::Open(path, ::arrow::io::FileMode::READ));//GAL change that also, or to remove? + } else { + PARQUET_ASSIGN_OR_THROW(source, + ::arrow::io::ceph::ReadableFile::Open(path, rgw, props.memory_pool())); + } + + return Open(std::move(source), props, std::move(metadata)); +} + +void ParquetFileReader::Open(std::unique_ptr contents) { + contents_ = std::move(contents); +} + +void ParquetFileReader::Close() { + if (contents_) { + contents_->Close(); + } +} + +std::shared_ptr ParquetFileReader::metadata() const { + return contents_->metadata(); +} + +std::shared_ptr ParquetFileReader::RowGroup(int i) { + if (i >= metadata()->num_row_groups()) { + std::stringstream ss; + ss << "Trying to read row group " << i << " but file only has " + << metadata()->num_row_groups() << " row groups"; + throw ParquetException(ss.str()); + } + return contents_->GetRowGroup(i); +} + +void ParquetFileReader::PreBuffer(const std::vector& row_groups, + const std::vector& column_indices, + const ::arrow::io::IOContext& ctx, + const ::arrow::io::CacheOptions& options) { + // Access private methods here + SerializedFile* file = + ::arrow::internal::checked_cast(contents_.get()); + file->PreBuffer(row_groups, column_indices, ctx, options); +} + +// ---------------------------------------------------------------------- +// File metadata helpers + +std::shared_ptr ReadMetaData( + const std::shared_ptr<::arrow::io::RandomAccessFile>& source) { + return ParquetFileReader::Open(source)->metadata(); +} + +// ---------------------------------------------------------------------- +// File scanner for performance testing +#if GAL_ScanAllValues_is_no_declare +int64_t ScanFileContents(std::vector columns, const int32_t column_batch_size, + ParquetFileReader* reader) { + std::vector rep_levels(column_batch_size); + std::vector def_levels(column_batch_size); + + int num_columns = static_cast(columns.size()); + + // columns are not specified explicitly. Add all columns + if (columns.size() == 0) { + num_columns = reader->metadata()->num_columns(); + columns.resize(num_columns); + for (int i = 0; i < num_columns; i++) { + columns[i] = i; + } + } + + std::vector total_rows(num_columns, 0); + + for (int r = 0; r < reader->metadata()->num_row_groups(); ++r) { + auto group_reader = reader->RowGroup(r); + int col = 0; + for (auto i : columns) { + std::shared_ptr col_reader = group_reader->Column(i); + size_t value_byte_size = GetTypeByteSize(col_reader->descr()->physical_type()); + std::vector values(column_batch_size * value_byte_size); + + int64_t values_read = 0; + while (col_reader->HasNext()) { + int64_t levels_read = + ScanAllValues(column_batch_size, def_levels.data(), rep_levels.data(), + values.data(), &values_read, col_reader.get()); + if (col_reader->descr()->max_repetition_level() > 0) { + for (int64_t i = 0; i < levels_read; i++) { + if (rep_levels[i] == 0) { + total_rows[col]++; + } + } + } else { + total_rows[col] += levels_read; + } + } + col++; + } + } + + for (int i = 1; i < num_columns; ++i) { + if (total_rows[0] != total_rows[i]) { + throw ParquetException("Parquet error: Total rows among columns do not match"); + } + } + + return total_rows[0]; +} +#endif + +} //namespace ceph +} //namespace parquet + +/******************************************/ +/******************************************/ +/******************************************/ +class column_reader_wrap +{ + +private: + + int64_t m_rownum; + parquet::Type::type m_type; + std::shared_ptr m_row_group_reader; + int m_row_grouop_id; + uint16_t m_col_id; + parquet::ceph::ParquetFileReader* m_parquet_reader; + std::shared_ptr m_ColumnReader; + bool m_end_of_stream; + bool m_read_last_value; + + +public: + + enum class parquet_type + { + NA_TYPE, + STRING, + INT32, + INT64, + FLOAT, + DOUBLE, + TIMESTAMP, + PARQUET_NULL + }; + + struct parquet_value + { + int64_t num; + char *str; //str is pointing to offset in string which is NOT null terminated. + uint16_t str_len; + double dbl; + parquet_type type; + + parquet_value():type(parquet_type::NA_TYPE){} + }; + + typedef struct parquet_value parquet_value_t; + + enum class parquet_column_read_state {PARQUET_OUT_OF_RANGE,PARQUET_READ_OK}; + + private: + parquet_value_t m_last_value; + + public: + column_reader_wrap(std::unique_ptr & parquet_reader,uint16_t col_id); + + parquet::Type::type get_type(); + + bool HasNext();//TODO template + + int64_t ReadBatch(int64_t batch_size, int16_t* def_levels, int16_t* rep_levels, + parquet_value_t* values, int64_t* values_read); + + int64_t Skip(int64_t rows_to_skip); + + parquet_column_read_state Read(uint64_t rownum,parquet_value_t & value); + +}; + +class parquet_file_parser +{ + +public: + + typedef std::vector> schema_t; + typedef std::set column_pos_t; + typedef std::vector row_values_t; + + typedef column_reader_wrap::parquet_value_t parquet_value_t; + typedef column_reader_wrap::parquet_type parquet_type; + +private: + + std::string m_parquet_file_name; + uint32_t m_num_of_columms; + uint64_t m_num_of_rows; + uint64_t m_rownum; + schema_t m_schm; + int m_num_row_groups; + std::shared_ptr m_file_metadata; + std::unique_ptr m_parquet_reader; + std::vector m_column_readers; + s3selectEngine::rgw_s3select_api* m_rgw_s3select_api; + + public: + + parquet_file_parser(std::string parquet_file_name,s3selectEngine::rgw_s3select_api* rgw_api) : + m_parquet_file_name(parquet_file_name), + m_num_of_columms(0), + m_num_of_rows(0), + m_rownum(0), + m_num_row_groups(0), + m_rgw_s3select_api(rgw_api) + + + { + load_meta_data(); + } + + ~parquet_file_parser() + { + for(auto r : m_column_readers) + { + delete r; + } + } + + int load_meta_data() + { + m_parquet_reader = parquet::ceph::ParquetFileReader::OpenFile(m_parquet_file_name,m_rgw_s3select_api,false); + m_file_metadata = m_parquet_reader->metadata(); + m_num_of_columms = m_parquet_reader->metadata()->num_columns(); + m_num_row_groups = m_file_metadata->num_row_groups(); + m_num_of_rows = m_file_metadata->num_rows(); + + for (uint32_t i = 0; i < m_num_of_columms; i++) + { + parquet::Type::type tp = m_file_metadata->schema()->Column(i)->physical_type(); + std::pair elm; + + switch (tp) + { + case parquet::Type::type::INT32: + elm = std::pair(m_file_metadata->schema()->Column(i)->name(), column_reader_wrap::parquet_type::INT32); + m_schm.push_back(elm); + break; + + case parquet::Type::type::INT64: + elm = std::pair(m_file_metadata->schema()->Column(i)->name(), column_reader_wrap::parquet_type::INT64); + m_schm.push_back(elm); + break; + + case parquet::Type::type::FLOAT: + elm = std::pair(m_file_metadata->schema()->Column(i)->name(), column_reader_wrap::parquet_type::FLOAT); + m_schm.push_back(elm); + break; + + case parquet::Type::type::DOUBLE: + elm = std::pair(m_file_metadata->schema()->Column(i)->name(), column_reader_wrap::parquet_type::DOUBLE); + m_schm.push_back(elm); + break; + + case parquet::Type::type::BYTE_ARRAY: + elm = std::pair(m_file_metadata->schema()->Column(i)->name(), column_reader_wrap::parquet_type::STRING); + m_schm.push_back(elm); + break; + + default: + { + std::stringstream err; + err << "some parquet type not supported"; + throw std::runtime_error(err.str()); + } + } + + m_column_readers.push_back(new column_reader_wrap(m_parquet_reader,i)); + } + + return 0; + } + + bool end_of_stream() + { + + if (m_rownum > (m_num_of_rows-1)) + return true; + return false; + } + + uint64_t get_number_of_rows() + { + return m_num_of_rows; + } + + uint64_t rownum() + { + return m_rownum; + } + + bool increase_rownum() + { + if (end_of_stream()) + return false; + + m_rownum++; + return true; + } + + uint64_t get_rownum() + { + return m_rownum; + } + + uint32_t get_num_of_columns() + { + return m_num_of_columms; + } + + int get_column_values_by_positions(column_pos_t positions, row_values_t &row_values) + { + column_reader_wrap::parquet_value_t column_value; + row_values.clear(); + + for(auto col : positions) + { + if((col)>=m_num_of_columms) + {//TODO should verified upon syntax phase + //TODO throw exception + return -1; + } + auto status = m_column_readers[col]->Read(m_rownum,column_value); + if(status == column_reader_wrap::parquet_column_read_state::PARQUET_OUT_OF_RANGE) return -1; + row_values.push_back(column_value);//TODO intensive (should move) + } + return 0; + } + + schema_t get_schema() + { + return m_schm; + } +}; + +/******************************************/ + + + column_reader_wrap::column_reader_wrap(std::unique_ptr & parquet_reader,uint16_t col_id): + m_rownum(-1), + m_type(parquet::Type::type::UNDEFINED), + m_row_grouop_id(0), + m_col_id(col_id), + m_end_of_stream(false), + m_read_last_value(false) + { + m_parquet_reader = parquet_reader.get(); + m_row_group_reader = m_parquet_reader->RowGroup(m_row_grouop_id); + m_ColumnReader = m_row_group_reader->Column(m_col_id); + } + + parquet::Type::type column_reader_wrap::get_type() + {//TODO if UNDEFINED + return m_parquet_reader->metadata()->schema()->Column(m_col_id)->physical_type(); + } + + bool column_reader_wrap::HasNext()//TODO template + { + parquet::Int32Reader* int32_reader; + parquet::Int64Reader* int64_reader; + parquet::FloatReader* float_reader; + parquet::DoubleReader* double_reader; + parquet::ByteArrayReader* byte_array_reader; + + switch (get_type()) + { + case parquet::Type::type::INT32: + int32_reader = static_cast(m_ColumnReader.get()); + return int32_reader->HasNext(); + break; + + case parquet::Type::type::INT64: + int64_reader = static_cast(m_ColumnReader.get()); + return int64_reader->HasNext(); + break; + + case parquet::Type::type::FLOAT: + float_reader = static_cast(m_ColumnReader.get()); + return float_reader->HasNext(); + break; + + case parquet::Type::type::DOUBLE: + double_reader = static_cast(m_ColumnReader.get()); + return double_reader->HasNext(); + break; + + case parquet::Type::type::BYTE_ARRAY: + byte_array_reader = static_cast(m_ColumnReader.get()); + return byte_array_reader->HasNext(); + break; + + default: + + std::stringstream err; + err << "HasNext():" << "wrong type or type not exist" << std::endl; + throw std::runtime_error(err.str()); + + return false; + //TODO throw exception + } + + return false; + } + + int64_t column_reader_wrap::ReadBatch(int64_t batch_size, int16_t* def_levels, int16_t* rep_levels, + parquet_value_t* values, int64_t* values_read) + { + parquet::Int32Reader* int32_reader; + parquet::Int64Reader* int64_reader; + parquet::FloatReader* float_reader; + parquet::DoubleReader* double_reader; + parquet::ByteArrayReader* byte_array_reader; + + parquet::ByteArray str_value; + int64_t rows_read; + int32_t i32_val; + + auto error_msg = [&](std::exception &e) + { + std::stringstream err; + err << "what() :" << e.what() << std::endl; + err << "failed to parse column id:" << this->m_col_id << " name:" <m_parquet_reader->metadata()->schema()->Column(m_col_id)->name(); + return err; + }; + int16_t defintion_level; + int16_t repeat_level; + + switch (get_type()) + { + case parquet::Type::type::INT32: + int32_reader = static_cast(m_ColumnReader.get()); + try { + rows_read = int32_reader->ReadBatch(1, &defintion_level, &repeat_level, &i32_val , values_read); + if(defintion_level == 0) + { + values->type = column_reader_wrap::parquet_type::PARQUET_NULL; + } else + { + values->num = i32_val; + values->type = column_reader_wrap::parquet_type::INT32; + } + } + catch(std::exception &e) + { + throw std::runtime_error(error_msg(e).str()); + } + + break; + + case parquet::Type::type::INT64: + int64_reader = static_cast(m_ColumnReader.get()); + try{ + rows_read = int64_reader->ReadBatch(1, &defintion_level, &repeat_level, (int64_t *)&(values->num), values_read); + if(defintion_level == 0) + { + values->type = column_reader_wrap::parquet_type::PARQUET_NULL; + } else + { + auto logical_type = m_parquet_reader->metadata()->schema()->Column(m_col_id)->logical_type(); + + if (logical_type.get()->type() == parquet::LogicalType::Type::type::TIMESTAMP) //TODO missing sub-type (milli,micro) + values->type = column_reader_wrap::parquet_type::TIMESTAMP; + else + values->type = column_reader_wrap::parquet_type::INT64; + } + } + catch(std::exception &e) + { + throw std::runtime_error(error_msg(e).str()); + } + break; + + case parquet::Type::type::FLOAT: + float_reader = static_cast(m_ColumnReader.get()); + try{ + float data_source_float = 0; + rows_read = float_reader->ReadBatch(1, &defintion_level, &repeat_level, &data_source_float , values_read);//TODO proper cast + if(defintion_level == 0) + { + values->type = column_reader_wrap::parquet_type::PARQUET_NULL; + } else + { + values->type = column_reader_wrap::parquet_type::DOUBLE; + values->dbl = data_source_float; + + } + } + catch(std::exception &e) + { + throw std::runtime_error(error_msg(e).str()); + } + break; + + case parquet::Type::type::DOUBLE: + double_reader = static_cast(m_ColumnReader.get()); + try{ + rows_read = double_reader->ReadBatch(1, &defintion_level, &repeat_level, (double *)&(values->dbl), values_read); + if(defintion_level == 0) + { + values->type = column_reader_wrap::parquet_type::PARQUET_NULL; + } else + { + values->type = column_reader_wrap::parquet_type::DOUBLE; + } + } + catch(std::exception &e) + { + throw std::runtime_error(error_msg(e).str()); + } + break; + + case parquet::Type::type::BYTE_ARRAY: + byte_array_reader = static_cast(m_ColumnReader.get()); + try{ + rows_read = byte_array_reader->ReadBatch(1, &defintion_level, &repeat_level, &str_value , values_read); + if(defintion_level == 0) + { + values->type = column_reader_wrap::parquet_type::PARQUET_NULL; + } else + { + values->type = column_reader_wrap::parquet_type::STRING; + values->str = (char*)str_value.ptr; + values->str_len = str_value.len; + } + } + catch(std::exception &e) + { + throw std::runtime_error(error_msg(e).str()); + } + break; + + default: + { + std::stringstream err; + err << "wrong type" << std::endl; + throw std::runtime_error(err.str()); + } + + } + + return rows_read; + } + + int64_t column_reader_wrap::Skip(int64_t rows_to_skip) + { + parquet::Int32Reader* int32_reader; + parquet::Int64Reader* int64_reader; + parquet::DoubleReader* double_reader; + parquet::FloatReader* float_reader; + parquet::ByteArrayReader* byte_array_reader; + + parquet::ByteArray str_value; + int64_t rows_read; + + auto error_msg = [&](std::exception &e) + { + std::stringstream err; + err << "what() :" << e.what() << std::endl; + err << "failed to parse column id:" << this->m_col_id << " name:" <m_parquet_reader->metadata()->schema()->Column(m_col_id)->name(); + return err; + }; + + switch (get_type()) + { + case parquet::Type::type::INT32: + int32_reader = static_cast(m_ColumnReader.get()); + try{ + rows_read = int32_reader->Skip(rows_to_skip); + } + catch(std::exception &e) + { + throw std::runtime_error(error_msg(e).str()); + } + break; + + case parquet::Type::type::INT64: + int64_reader = static_cast(m_ColumnReader.get()); + try{ + rows_read = int64_reader->Skip(rows_to_skip); + } + catch(std::exception &e) + { + throw std::runtime_error(error_msg(e).str()); + } + break; + + case parquet::Type::type::FLOAT: + float_reader = static_cast(m_ColumnReader.get()); + try { + rows_read = float_reader->Skip(rows_to_skip); + } + catch(std::exception &e) + { + throw std::runtime_error(error_msg(e).str()); + } + break; + + case parquet::Type::type::DOUBLE: + double_reader = static_cast(m_ColumnReader.get()); + try { + rows_read = double_reader->Skip(rows_to_skip); + } + catch(std::exception &e) + { + throw std::runtime_error(error_msg(e).str()); + } + break; + + case parquet::Type::type::BYTE_ARRAY: + byte_array_reader = static_cast(m_ColumnReader.get()); + try{ + rows_read = byte_array_reader->Skip(rows_to_skip); + } + catch(std::exception &e) + { + throw std::runtime_error(error_msg(e).str()); + } + break; + + default: + { + std::stringstream err; + err << "wrong type" << std::endl; + throw std::runtime_error(err.str()); + } + } + + return rows_read; + } + + + column_reader_wrap::parquet_column_read_state column_reader_wrap::Read(const uint64_t rownum,parquet_value_t & value) + { + int64_t values_read = 0; + + if (m_rownum < (int64_t)rownum) + { //should skip + m_read_last_value = false; + + //TODO what about Skip(0) + uint64_t skipped_rows = Skip(rownum - m_rownum -1); + m_rownum += skipped_rows; + + while (((m_rownum+1) < (int64_t)rownum) || HasNext() == false) + { + uint64_t skipped_rows = Skip(rownum - m_rownum -1); + m_rownum += skipped_rows; + + if (HasNext() == false) + { + if ((m_row_grouop_id + 1) >= m_parquet_reader->metadata()->num_row_groups()) + { + m_end_of_stream = true; + return column_reader_wrap::parquet_column_read_state::PARQUET_OUT_OF_RANGE;//end-of-stream + } + else + { + m_row_grouop_id++; + m_row_group_reader = m_parquet_reader->RowGroup(m_row_grouop_id); + m_ColumnReader = m_row_group_reader->Column(m_col_id); + } + } + } //end-while + + ReadBatch(1, nullptr, nullptr, &m_last_value, &values_read); + m_read_last_value = true; + m_rownum++; + value = m_last_value; + } + else + { + if (m_read_last_value == false) + { + ReadBatch(1, nullptr, nullptr, &m_last_value, &values_read); + m_read_last_value = true; + m_rownum++; + } + + value = m_last_value; + } + + return column_reader_wrap::parquet_column_read_state::PARQUET_READ_OK; + } + +#endif + diff --git a/src/s3select/parquet_mix_types.parquet b/src/s3select/parquet_mix_types.parquet new file mode 100644 index 000000000..bc34b0788 Binary files /dev/null and b/src/s3select/parquet_mix_types.parquet differ diff --git a/src/s3select/rapidjson/.gitattributes b/src/s3select/rapidjson/.gitattributes new file mode 100644 index 000000000..6f598bb7f --- /dev/null +++ b/src/s3select/rapidjson/.gitattributes @@ -0,0 +1,22 @@ +# Set the default behavior, in case people don't have core.autocrlf set. +* text=auto + +# Explicitly declare text files you want to always be normalized and converted +# to native line endings on checkout. +*.cpp text +*.h text +*.txt text +*.md text +*.cmake text +*.svg text +*.dot text +*.yml text +*.in text +*.sh text +*.autopkg text +Dockerfile text + +# Denote all files that are truly binary and should not be modified. +*.png binary +*.jpg binary +*.json binary \ No newline at end of file diff --git a/src/s3select/rapidjson/.gitignore b/src/s3select/rapidjson/.gitignore new file mode 100644 index 000000000..5932e82c2 --- /dev/null +++ b/src/s3select/rapidjson/.gitignore @@ -0,0 +1,29 @@ +/bin/* +!/bin/data +!/bin/encodings +!/bin/jsonchecker +!/bin/types +!/bin/unittestschema +/build +/doc/html +/doc/doxygen_*.db +*.a + +# Temporary files created during CMake build +CMakeCache.txt +CMakeFiles +cmake_install.cmake +CTestTestfile.cmake +Makefile +RapidJSON*.cmake +RapidJSON.pc +Testing +/googletest +install_manifest.txt +Doxyfile +Doxyfile.zh-cn +DartConfiguration.tcl +*.nupkg + +# Files created by OS +*.DS_Store diff --git a/src/s3select/rapidjson/.gitmodules b/src/s3select/rapidjson/.gitmodules new file mode 100644 index 000000000..5e41f7c97 --- /dev/null +++ b/src/s3select/rapidjson/.gitmodules @@ -0,0 +1,3 @@ +[submodule "thirdparty/gtest"] + path = thirdparty/gtest + url = https://github.com/google/googletest.git diff --git a/src/s3select/rapidjson/.travis.yml b/src/s3select/rapidjson/.travis.yml new file mode 100644 index 000000000..17d8f03d6 --- /dev/null +++ b/src/s3select/rapidjson/.travis.yml @@ -0,0 +1,166 @@ +sudo: required +dist: xenial + +language: cpp +cache: + - ccache + +addons: + apt: + sources: + - ubuntu-toolchain-r-test + packages: + - cmake + - valgrind + - clang-8 +env: + global: + - USE_CCACHE=1 + - CCACHE_SLOPPINESS=pch_defines,time_macros + - CCACHE_COMPRESS=1 + - CCACHE_MAXSIZE=100M + - ARCH_FLAGS_x86='-m32' # #266: don't use SSE on 32-bit + - ARCH_FLAGS_x86_64='-msse4.2' # use SSE4.2 on 64-bit + - ARCH_FLAGS_aarch64='-march=armv8-a' + - GITHUB_REPO='Tencent/rapidjson' + - secure: "HrsaCb+N66EG1HR+LWH1u51SjaJyRwJEDzqJGYMB7LJ/bfqb9mWKF1fLvZGk46W5t7TVaXRDD5KHFx9DPWvKn4gRUVkwTHEy262ah5ORh8M6n/6VVVajeV/AYt2C0sswdkDBDO4Xq+xy5gdw3G8s1A4Inbm73pUh+6vx+7ltBbk=" + +matrix: + include: + # gcc + - env: CONF=release ARCH=x86 CXX11=ON CXX17=OFF MEMBERSMAP=OFF + compiler: gcc + arch: amd64 + - env: CONF=release ARCH=x86_64 CXX11=ON CXX17=OFF MEMBERSMAP=OFF + compiler: gcc + arch: amd64 + - env: CONF=release ARCH=x86_64 CXX11=ON CXX17=OFF MEMBERSMAP=ON + compiler: gcc + arch: amd64 + - env: CONF=debug ARCH=x86 CXX11=OFF CXX17=OFF MEMBERSMAP=OFF + compiler: gcc + arch: amd64 + - env: CONF=debug ARCH=x86_64 CXX11=OFF CXX17=OFF MEMBERSMAP=OFF + compiler: gcc + arch: amd64 + - env: CONF=debug ARCH=x86 CXX11=OFF CXX17=ON MEMBERSMAP=ON CXX_FLAGS='-D_GLIBCXX_DEBUG' + compiler: gcc + arch: amd64 + - env: CONF=debug ARCH=x86_64 CXX11=OFF CXX17=ON MEMBERSMAP=ON CXX_FLAGS='-D_GLIBCXX_DEBUG' + compiler: gcc + arch: amd64 + - env: CONF=release ARCH=aarch64 CXX11=ON CXX17=OFF MEMBERSMAP=OFF + compiler: gcc + arch: arm64 + - env: CONF=release ARCH=aarch64 CXX11=OFF CXX17=OFF MEMBERSMAP=OFF + compiler: gcc + arch: arm64 + - env: CONF=release ARCH=aarch64 CXX11=OFF CXX17=ON MEMBERSMAP=ON + compiler: gcc + arch: arm64 + # clang + - env: CONF=release ARCH=x86 CXX11=ON CXX17=OFF MEMBERSMAP=ON CCACHE_CPP2=yes + compiler: clang + arch: amd64 + - env: CONF=release ARCH=x86_64 CXX11=ON CXX17=OFF MEMBERSMAP=ON CCACHE_CPP2=yes + compiler: clang + arch: amd64 + - env: CONF=release ARCH=x86_64 CXX11=ON CXX17=OFF MEMBERSMAP=OFF CCACHE_CPP2=yes + compiler: clang + arch: amd64 + - env: CONF=debug ARCH=x86 CXX11=OFF CXX17=OFF MEMBERSMAP=ON CCACHE_CPP2=yes + compiler: clang + arch: amd64 + - env: CONF=debug ARCH=x86_64 CXX11=OFF CXX17=OFF MEMBERSMAP=ON CCACHE_CPP2=yes + compiler: clang + arch: amd64 + - env: CONF=debug ARCH=x86 CXX11=OFF CXX17=ON MEMBERSMAP=OFF CCACHE_CPP2=yes + compiler: clang + arch: amd64 + - env: CONF=debug ARCH=x86_64 CXX11=OFF CXX17=ON MEMBERSMAP=OFF CCACHE_CPP2=yes + compiler: clang + arch: amd64 + - env: CONF=debug ARCH=aarch64 CXX11=ON CXX17=OFF MEMBERSMAP=ON CCACHE_CPP2=yes + compiler: clang + arch: arm64 + - env: CONF=debug ARCH=aarch64 CXX11=OFF CXX17=OFF MEMBERSMAP=ON CCACHE_CPP2=yes + compiler: clang + arch: arm64 + - env: CONF=debug ARCH=aarch64 CXX11=OFF CXX17=ON MEMBERSMAP=OFF CCACHE_CPP2=yes + compiler: clang + arch: arm64 + # coverage report + - env: CONF=debug ARCH=x86 GCOV_FLAGS='--coverage' CXX_FLAGS='-O0' CXX11=OFF CXX17=OFF + compiler: gcc + arch: amd64 + cache: + - ccache + - pip + after_success: + - pip install --user cpp-coveralls + - coveralls -r .. --gcov-options '\-lp' -e thirdparty -e example -e test -e build/CMakeFiles -e include/rapidjson/msinttypes -e include/rapidjson/internal/meta.h -e include/rapidjson/error/en.h + - env: CONF=debug ARCH=x86_64 GCOV_FLAGS='--coverage' CXX_FLAGS='-O0' CXX11=ON CXX17=OFF MEMBERSMAP=ON + compiler: gcc + arch: amd64 + cache: + - ccache + - pip + after_success: + - pip install --user cpp-coveralls + - coveralls -r .. --gcov-options '\-lp' -e thirdparty -e example -e test -e build/CMakeFiles -e include/rapidjson/msinttypes -e include/rapidjson/internal/meta.h -e include/rapidjson/error/en.h + - env: CONF=debug ARCH=aarch64 GCOV_FLAGS='--coverage' CXX_FLAGS='-O0' CXX11=OFF CXX17=ON + compiler: gcc + arch: arm64 + cache: + - ccache + - pip + after_success: + - pip install --user cpp-coveralls + - coveralls -r .. --gcov-options '\-lp' -e thirdparty -e example -e test -e build/CMakeFiles -e include/rapidjson/msinttypes -e include/rapidjson/internal/meta.h -e include/rapidjson/error/en.h + - script: # Documentation task + - cd build + - cmake .. -DRAPIDJSON_HAS_STDSTRING=ON -DCMAKE_VERBOSE_MAKEFILE=ON + - make travis_doc + cache: false + addons: + apt: + packages: + - doxygen + +before_install: + - if [ "x86_64" = "$(arch)" ]; then sudo apt-get install -y g++-multilib libc6-dbg:i386 --allow-unauthenticated; fi + +before_script: + # travis provides clang-7 for amd64 and clang-3.8 for arm64 + # here use clang-8 to all architectures as clang-7 is not available for arm64 + - if [ -f /usr/bin/clang++-8 ]; then + sudo update-alternatives --install /usr/bin/clang++ clang++ /usr/bin/clang++-8 1000; + sudo update-alternatives --config clang++; + export PATH=/usr/bin:$PATH; + fi + - if [ "$CXX" = "clang++" ]; then export CCACHE_CPP2=yes; fi + - ccache -s + # hack to avoid Valgrind bug (https://bugs.kde.org/show_bug.cgi?id=326469), + # exposed by merging PR#163 (using -march=native) + # TODO: Since this bug is already fixed. Remove this when valgrind can be upgraded. + - sed -i "s/-march=native//" CMakeLists.txt + - mkdir build + +script: + - if [ "$CXX" = "clang++" ]; then export CXXFLAGS="-stdlib=libc++ ${CXXFLAGS}"; fi + - > + eval "ARCH_FLAGS=\${ARCH_FLAGS_${ARCH}}" ; + (cd build && cmake + -DRAPIDJSON_HAS_STDSTRING=ON + -DRAPIDJSON_USE_MEMBERSMAP=$MEMBERSMAP + -DRAPIDJSON_BUILD_CXX11=$CXX11 + -DRAPIDJSON_BUILD_CXX17=$CXX17 + -DCMAKE_VERBOSE_MAKEFILE=ON + -DCMAKE_BUILD_TYPE=$CONF + -DCMAKE_CXX_FLAGS="$ARCH_FLAGS $GCOV_FLAGS $CXX_FLAGS" + -DCMAKE_EXE_LINKER_FLAGS=$GCOV_FLAGS + ..) + - cd build + - make tests -j 2 + - make examples -j 2 + - ctest -j 2 -V `[ "$CONF" = "release" ] || echo "-E perftest"` diff --git a/src/s3select/rapidjson/CHANGELOG.md b/src/s3select/rapidjson/CHANGELOG.md new file mode 100644 index 000000000..1c580bd14 --- /dev/null +++ b/src/s3select/rapidjson/CHANGELOG.md @@ -0,0 +1,158 @@ +# Change Log +All notable changes to this project will be documented in this file. +This project adheres to [Semantic Versioning](http://semver.org/). + +## [Unreleased] + +## 1.1.0 - 2016-08-25 + +### Added +* Add GenericDocument ctor overload to specify JSON type (#369) +* Add FAQ (#372, #373, #374, #376) +* Add forward declaration header `fwd.h` +* Add @PlatformIO Library Registry manifest file (#400) +* Implement assignment operator for BigInteger (#404) +* Add comments support (#443) +* Adding coapp definition (#460) +* documenttest.cpp: EXPECT_THROW when checking empty allocator (470) +* GenericDocument: add implicit conversion to ParseResult (#480) +* Use with C++ linkage on Windows ARM (#485) +* Detect little endian for Microsoft ARM targets +* Check Nan/Inf when writing a double (#510) +* Add JSON Schema Implementation (#522) +* Add iostream wrapper (#530) +* Add Jsonx example for converting JSON into JSONx (a XML format) (#531) +* Add optional unresolvedTokenIndex parameter to Pointer::Get() (#532) +* Add encoding validation option for Writer/PrettyWriter (#534) +* Add Writer::SetMaxDecimalPlaces() (#536) +* Support {0, } and {0, m} in Regex (#539) +* Add Value::Get/SetFloat(), Value::IsLossLessFloat/Double() (#540) +* Add stream position check to reader unit tests (#541) +* Add Templated accessors and range-based for (#542) +* Add (Pretty)Writer::RawValue() (#543) +* Add Document::Parse(std::string), Document::Parse(const char*, size_t length) and related APIs. (#553) +* Add move constructor for GenericSchemaDocument (#554) +* Add VS2010 and VS2015 to AppVeyor CI (#555) +* Add parse-by-parts example (#556, #562) +* Support parse number as string (#564, #589) +* Add kFormatSingleLineArray for PrettyWriter (#577) +* Added optional support for trailing commas (#584) +* Added filterkey and filterkeydom examples (#615) +* Added npm docs (#639) +* Allow options for writing and parsing NaN/Infinity (#641) +* Add std::string overload to PrettyWriter::Key() when RAPIDJSON_HAS_STDSTRING is defined (#698) + +### Fixed +* Fix gcc/clang/vc warnings (#350, #394, #397, #444, #447, #473, #515, #582, #589, #595, #667) +* Fix documentation (#482, #511, #550, #557, #614, #635, #660) +* Fix emscripten alignment issue (#535) +* Fix missing allocator to uses of AddMember in document (#365) +* CMake will no longer complain that the minimum CMake version is not specified (#501) +* Make it usable with old VC8 (VS2005) (#383) +* Prohibit C++11 move from Document to Value (#391) +* Try to fix incorrect 64-bit alignment (#419) +* Check return of fwrite to avoid warn_unused_result build failures (#421) +* Fix UB in GenericDocument::ParseStream (#426) +* Keep Document value unchanged on parse error (#439) +* Add missing return statement (#450) +* Fix Document::Parse(const Ch*) for transcoding (#478) +* encodings.h: fix typo in preprocessor condition (#495) +* Custom Microsoft headers are necessary only for Visual Studio 2012 and lower (#559) +* Fix memory leak for invalid regex (26e69ffde95ba4773ab06db6457b78f308716f4b) +* Fix a bug in schema minimum/maximum keywords for 64-bit integer (e7149d665941068ccf8c565e77495521331cf390) +* Fix a crash bug in regex (#605) +* Fix schema "required" keyword cannot handle duplicated keys (#609) +* Fix cmake CMP0054 warning (#612) +* Added missing include guards in istreamwrapper.h and ostreamwrapper.h (#634) +* Fix undefined behaviour (#646) +* Fix buffer overrun using PutN (#673) +* Fix rapidjson::value::Get() may returns wrong data (#681) +* Add Flush() for all value types (#689) +* Handle malloc() fail in PoolAllocator (#691) +* Fix builds on x32 platform. #703 + +### Changed +* Clarify problematic JSON license (#392) +* Move Travis to container based infrastructure (#504, #558) +* Make whitespace array more compact (#513) +* Optimize Writer::WriteString() with SIMD (#544) +* x86-64 48-bit pointer optimization for GenericValue (#546) +* Define RAPIDJSON_HAS_CXX11_RVALUE_REFS directly in clang (#617) +* Make GenericSchemaDocument constructor explicit (#674) +* Optimize FindMember when use std::string (#690) + +## [1.0.2] - 2015-05-14 + +### Added +* Add Value::XXXMember(...) overloads for std::string (#335) + +### Fixed +* Include rapidjson.h for all internal/error headers. +* Parsing some numbers incorrectly in full-precision mode (`kFullPrecisionParseFlag`) (#342) +* Fix some numbers parsed incorrectly (#336) +* Fix alignment of 64bit platforms (#328) +* Fix MemoryPoolAllocator::Clear() to clear user-buffer (0691502573f1afd3341073dd24b12c3db20fbde4) + +### Changed +* CMakeLists for include as a thirdparty in projects (#334, #337) +* Change Document::ParseStream() to use stack allocator for Reader (ffbe38614732af8e0b3abdc8b50071f386a4a685) + +## [1.0.1] - 2015-04-25 + +### Added +* Changelog following [Keep a CHANGELOG](https://github.com/olivierlacan/keep-a-changelog) suggestions. + +### Fixed +* Parsing of some numbers (e.g. "1e-00011111111111") causing assertion (#314). +* Visual C++ 32-bit compilation error in `diyfp.h` (#317). + +## [1.0.0] - 2015-04-22 + +### Added +* 100% [Coverall](https://coveralls.io/r/Tencent/rapidjson?branch=master) coverage. +* Version macros (#311) + +### Fixed +* A bug in trimming long number sequence (4824f12efbf01af72b8cb6fc96fae7b097b73015). +* Double quote in unicode escape (#288). +* Negative zero roundtrip (double only) (#289). +* Standardize behavior of `memcpy()` and `malloc()` (0c5c1538dcfc7f160e5a4aa208ddf092c787be5a, #305, 0e8bbe5e3ef375e7f052f556878be0bd79e9062d). + +### Removed +* Remove an invalid `Document::ParseInsitu()` API (e7f1c6dd08b522cfcf9aed58a333bd9a0c0ccbeb). + +## 1.0-beta - 2015-04-8 + +### Added +* RFC 7159 (#101) +* Optional Iterative Parser (#76) +* Deep-copy values (#20) +* Error code and message (#27) +* ASCII Encoding (#70) +* `kParseStopWhenDoneFlag` (#83) +* `kParseFullPrecisionFlag` (881c91d696f06b7f302af6d04ec14dd08db66ceb) +* Add `Key()` to handler concept (#134) +* C++11 compatibility and support (#128) +* Optimized number-to-string and vice versa conversions (#137, #80) +* Short-String Optimization (#131) +* Local stream optimization by traits (#32) +* Travis & Appveyor Continuous Integration, with Valgrind verification (#24, #242) +* Redo all documentation (English, Simplified Chinese) + +### Changed +* Copyright ownership transferred to THL A29 Limited (a Tencent company). +* Migrating from Premake to CMAKE (#192) +* Resolve all warning reports + +### Removed +* Remove other JSON libraries for performance comparison (#180) + +## 0.11 - 2012-11-16 + +## 0.1 - 2011-11-18 + +[Unreleased]: https://github.com/Tencent/rapidjson/compare/v1.1.0...HEAD +[1.1.0]: https://github.com/Tencent/rapidjson/compare/v1.0.2...v1.1.0 +[1.0.2]: https://github.com/Tencent/rapidjson/compare/v1.0.1...v1.0.2 +[1.0.1]: https://github.com/Tencent/rapidjson/compare/v1.0.0...v1.0.1 +[1.0.0]: https://github.com/Tencent/rapidjson/compare/v1.0-beta...v1.0.0 diff --git a/src/s3select/rapidjson/CMakeLists.txt b/src/s3select/rapidjson/CMakeLists.txt new file mode 100644 index 000000000..bdfdd6779 --- /dev/null +++ b/src/s3select/rapidjson/CMakeLists.txt @@ -0,0 +1,250 @@ +CMAKE_MINIMUM_REQUIRED(VERSION 2.8) +if(POLICY CMP0025) + # detect Apple's Clang + cmake_policy(SET CMP0025 NEW) +endif() +if(POLICY CMP0054) + cmake_policy(SET CMP0054 NEW) +endif() + +SET(CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/CMakeModules) + +set(LIB_MAJOR_VERSION "1") +set(LIB_MINOR_VERSION "1") +set(LIB_PATCH_VERSION "0") +set(LIB_VERSION_STRING "${LIB_MAJOR_VERSION}.${LIB_MINOR_VERSION}.${LIB_PATCH_VERSION}") + +if (CMAKE_VERSION VERSION_LESS 3.0) + PROJECT(RapidJSON CXX) +else() + cmake_policy(SET CMP0048 NEW) + PROJECT(RapidJSON VERSION "${LIB_VERSION_STRING}" LANGUAGES CXX) +endif() + +# compile in release with debug info mode by default +if(NOT CMAKE_BUILD_TYPE) + set(CMAKE_BUILD_TYPE "RelWithDebInfo" CACHE STRING "Choose the type of build, options are: Debug Release RelWithDebInfo MinSizeRel." FORCE) +endif() + +# Build all binaries in a separate directory +SET(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/bin) + +option(RAPIDJSON_BUILD_DOC "Build rapidjson documentation." ON) +option(RAPIDJSON_BUILD_EXAMPLES "Build rapidjson examples." ON) +option(RAPIDJSON_BUILD_TESTS "Build rapidjson perftests and unittests." ON) +option(RAPIDJSON_BUILD_THIRDPARTY_GTEST + "Use gtest installation in `thirdparty/gtest` by default if available" OFF) + +option(RAPIDJSON_BUILD_CXX11 "Build rapidjson with C++11" ON) +option(RAPIDJSON_BUILD_CXX17 "Build rapidjson with C++17" OFF) +if(RAPIDJSON_BUILD_CXX11) + set(CMAKE_CXX_STANDARD 11) + set(CMAKE_CXX_STANDARD_REQUIRED TRUE) +endif() + +option(RAPIDJSON_BUILD_ASAN "Build rapidjson with address sanitizer (gcc/clang)" OFF) +option(RAPIDJSON_BUILD_UBSAN "Build rapidjson with undefined behavior sanitizer (gcc/clang)" OFF) + +option(RAPIDJSON_ENABLE_INSTRUMENTATION_OPT "Build rapidjson with -march or -mcpu options" ON) + +option(RAPIDJSON_HAS_STDSTRING "" OFF) +if(RAPIDJSON_HAS_STDSTRING) + add_definitions(-DRAPIDJSON_HAS_STDSTRING) +endif() + +option(RAPIDJSON_USE_MEMBERSMAP "" OFF) +if(RAPIDJSON_USE_MEMBERSMAP) + add_definitions(-DRAPIDJSON_USE_MEMBERSMAP=1) +endif() + +find_program(CCACHE_FOUND ccache) +if(CCACHE_FOUND) + set_property(GLOBAL PROPERTY RULE_LAUNCH_COMPILE ccache) + set_property(GLOBAL PROPERTY RULE_LAUNCH_LINK ccache) + if (CMAKE_CXX_COMPILER_ID MATCHES "Clang") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Qunused-arguments -fcolor-diagnostics") + endif() +endif(CCACHE_FOUND) + +if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + if(RAPIDJSON_ENABLE_INSTRUMENTATION_OPT AND NOT CMAKE_CROSSCOMPILING) + if(CMAKE_SYSTEM_PROCESSOR STREQUAL "powerpc" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "ppc64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "ppc64le") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mcpu=native") + else() + #FIXME: x86 is -march=native, but doesn't mean every arch is this option. To keep original project's compatibility, I leave this except POWER. + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -march=native") + endif() + endif() + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra -Werror") + set(EXTRA_CXX_FLAGS -Weffc++ -Wswitch-default -Wfloat-equal -Wconversion -Wsign-conversion) + if (RAPIDJSON_BUILD_CXX11 AND CMAKE_VERSION VERSION_LESS 3.1) + if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS "4.7.0") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++0x") + else() + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") + endif() + elseif (RAPIDJSON_BUILD_CXX17 AND NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS "5.0") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17") + endif() + if (RAPIDJSON_BUILD_ASAN) + if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS "4.8.0") + message(FATAL_ERROR "GCC < 4.8 doesn't support the address sanitizer") + else() + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=address") + endif() + endif() + if (RAPIDJSON_BUILD_UBSAN) + if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS "4.9.0") + message(FATAL_ERROR "GCC < 4.9 doesn't support the undefined behavior sanitizer") + else() + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=undefined") + endif() + endif() +elseif (CMAKE_CXX_COMPILER_ID MATCHES "Clang") + if(NOT CMAKE_CROSSCOMPILING) + if(CMAKE_SYSTEM_PROCESSOR STREQUAL "powerpc" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "ppc64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "ppc64le") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -mcpu=native") + else() + #FIXME: x86 is -march=native, but doesn't mean every arch is this option. To keep original project's compatibility, I leave this except POWER. + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -march=native") + endif() + endif() + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wall -Wextra -Werror -Wno-missing-field-initializers") + set(EXTRA_CXX_FLAGS -Weffc++ -Wswitch-default -Wfloat-equal -Wconversion -Wimplicit-fallthrough) + if (RAPIDJSON_BUILD_CXX11 AND CMAKE_VERSION VERSION_LESS 3.1) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11") + elseif (RAPIDJSON_BUILD_CXX17 AND NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS "4.0") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++17") + endif() + if (RAPIDJSON_BUILD_ASAN) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=address") + endif() + if (RAPIDJSON_BUILD_UBSAN) + if (CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=undefined-trap -fsanitize-undefined-trap-on-error") + else() + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=undefined") + endif() + endif() +elseif (CMAKE_CXX_COMPILER_ID STREQUAL "MSVC") + add_definitions(-D_CRT_SECURE_NO_WARNINGS=1) + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /EHsc") + # CMake >= 3.10 should handle the above CMAKE_CXX_STANDARD fine, otherwise use /std:c++XX with MSVC >= 19.10 + if (RAPIDJSON_BUILD_CXX11 AND NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS "19.10") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /std:c++11") + elseif (RAPIDJSON_BUILD_CXX17 AND NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS "19.14") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /std:c++17") + endif() + # Always compile with /WX + if(CMAKE_CXX_FLAGS MATCHES "/WX-") + string(REGEX REPLACE "/WX-" "/WX" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") + else() + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /WX") + endif() +elseif (CMAKE_CXX_COMPILER_ID MATCHES "XL") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -qarch=auto") +endif() + +#add extra search paths for libraries and includes +SET(INCLUDE_INSTALL_DIR "${CMAKE_INSTALL_PREFIX}/include" CACHE PATH "The directory the headers are installed in") +SET(LIB_INSTALL_DIR "${CMAKE_INSTALL_PREFIX}/lib" CACHE STRING "Directory where lib will install") +SET(DOC_INSTALL_DIR "${CMAKE_INSTALL_PREFIX}/share/doc/${PROJECT_NAME}" CACHE PATH "Path to the documentation") + +IF(UNIX OR CYGWIN) + SET(_CMAKE_INSTALL_DIR "${LIB_INSTALL_DIR}/cmake/${PROJECT_NAME}") +ELSEIF(WIN32) + SET(_CMAKE_INSTALL_DIR "${CMAKE_INSTALL_PREFIX}/cmake") +ENDIF() +SET(CMAKE_INSTALL_DIR "${_CMAKE_INSTALL_DIR}" CACHE PATH "The directory cmake files are installed in") + +include_directories(${CMAKE_CURRENT_SOURCE_DIR}/include) + +if(RAPIDJSON_BUILD_DOC) + add_subdirectory(doc) +endif() + +add_custom_target(travis_doc) +add_custom_command(TARGET travis_doc + COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/travis-doxygen.sh) + +if(RAPIDJSON_BUILD_EXAMPLES) + add_subdirectory(example) +endif() + +if(RAPIDJSON_BUILD_TESTS) + if(MSVC11) + # required for VS2012 due to missing support for variadic templates + add_definitions(-D_VARIADIC_MAX=10) + endif(MSVC11) + add_subdirectory(test) + include(CTest) +endif() + +# pkg-config +IF (UNIX OR CYGWIN) + CONFIGURE_FILE (${CMAKE_CURRENT_SOURCE_DIR}/${PROJECT_NAME}.pc.in + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}.pc + @ONLY) + INSTALL (FILES ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}.pc + DESTINATION "${LIB_INSTALL_DIR}/pkgconfig" + COMPONENT pkgconfig) +ENDIF() + +install(FILES readme.md + DESTINATION "${DOC_INSTALL_DIR}" + COMPONENT doc) + +install(DIRECTORY include/rapidjson + DESTINATION "${INCLUDE_INSTALL_DIR}" + COMPONENT dev) + +install(DIRECTORY example/ + DESTINATION "${DOC_INSTALL_DIR}/examples" + COMPONENT examples + # Following patterns are for excluding the intermediate/object files + # from an install of in-source CMake build. + PATTERN "CMakeFiles" EXCLUDE + PATTERN "Makefile" EXCLUDE + PATTERN "cmake_install.cmake" EXCLUDE) + +# Provide config and version files to be used by other applications +# =============================== + +################################################################################ +# Export package for use from the build tree +EXPORT( PACKAGE ${PROJECT_NAME} ) + +# Create the RapidJSONConfig.cmake file for other cmake projects. +# ... for the build tree +SET( CONFIG_SOURCE_DIR ${CMAKE_CURRENT_SOURCE_DIR}) +SET( CONFIG_DIR ${CMAKE_CURRENT_BINARY_DIR}) +SET( ${PROJECT_NAME}_INCLUDE_DIR "\${${PROJECT_NAME}_SOURCE_DIR}/include" ) + +CONFIGURE_FILE( ${CMAKE_CURRENT_SOURCE_DIR}/${PROJECT_NAME}Config.cmake.in + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Config.cmake @ONLY ) +CONFIGURE_FILE(${CMAKE_CURRENT_SOURCE_DIR}/${PROJECT_NAME}ConfigVersion.cmake.in + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake @ONLY) + +# ... for the install tree +SET( CMAKECONFIG_INSTALL_DIR ${LIB_INSTALL_DIR}/cmake/${PROJECT_NAME} ) +FILE( RELATIVE_PATH REL_INCLUDE_DIR + "${CMAKECONFIG_INSTALL_DIR}" + "${CMAKE_INSTALL_PREFIX}/include" ) + +SET( ${PROJECT_NAME}_INCLUDE_DIR "\${${PROJECT_NAME}_CMAKE_DIR}/${REL_INCLUDE_DIR}" ) +SET( CONFIG_SOURCE_DIR ) +SET( CONFIG_DIR ) +CONFIGURE_FILE( ${CMAKE_CURRENT_SOURCE_DIR}/${PROJECT_NAME}Config.cmake.in + ${CMAKE_CURRENT_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${PROJECT_NAME}Config.cmake @ONLY ) + +INSTALL(FILES "${CMAKE_CURRENT_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/${PROJECT_NAME}Config.cmake" + DESTINATION ${CMAKECONFIG_INSTALL_DIR} ) + +# Install files +IF(CMAKE_INSTALL_DIR) + INSTALL(FILES + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Config.cmake + ${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake + DESTINATION "${CMAKE_INSTALL_DIR}" + COMPONENT dev) +ENDIF() diff --git a/src/s3select/rapidjson/CMakeModules/FindGTestSrc.cmake b/src/s3select/rapidjson/CMakeModules/FindGTestSrc.cmake new file mode 100644 index 000000000..f3cb8c990 --- /dev/null +++ b/src/s3select/rapidjson/CMakeModules/FindGTestSrc.cmake @@ -0,0 +1,30 @@ + +SET(GTEST_SEARCH_PATH + "${GTEST_SOURCE_DIR}" + "${CMAKE_CURRENT_LIST_DIR}/../thirdparty/gtest/googletest") + +IF(UNIX) + IF(RAPIDJSON_BUILD_THIRDPARTY_GTEST) + LIST(APPEND GTEST_SEARCH_PATH "/usr/src/gtest") + ELSE() + LIST(INSERT GTEST_SEARCH_PATH 1 "/usr/src/gtest") + ENDIF() +ENDIF() + +FIND_PATH(GTEST_SOURCE_DIR + NAMES CMakeLists.txt src/gtest_main.cc + PATHS ${GTEST_SEARCH_PATH}) + + +# Debian installs gtest include directory in /usr/include, thus need to look +# for include directory separately from source directory. +FIND_PATH(GTEST_INCLUDE_DIR + NAMES gtest/gtest.h + PATH_SUFFIXES include + HINTS ${GTEST_SOURCE_DIR} + PATHS ${GTEST_SEARCH_PATH}) + +INCLUDE(FindPackageHandleStandardArgs) +find_package_handle_standard_args(GTestSrc DEFAULT_MSG + GTEST_SOURCE_DIR + GTEST_INCLUDE_DIR) diff --git a/src/s3select/rapidjson/RapidJSON.pc.in b/src/s3select/rapidjson/RapidJSON.pc.in new file mode 100644 index 000000000..6afb079f8 --- /dev/null +++ b/src/s3select/rapidjson/RapidJSON.pc.in @@ -0,0 +1,7 @@ +includedir=@INCLUDE_INSTALL_DIR@ + +Name: @PROJECT_NAME@ +Description: A fast JSON parser/generator for C++ with both SAX/DOM style API +Version: @LIB_VERSION_STRING@ +URL: https://github.com/Tencent/rapidjson +Cflags: -I${includedir} diff --git a/src/s3select/rapidjson/RapidJSONConfig.cmake.in b/src/s3select/rapidjson/RapidJSONConfig.cmake.in new file mode 100644 index 000000000..c25d31258 --- /dev/null +++ b/src/s3select/rapidjson/RapidJSONConfig.cmake.in @@ -0,0 +1,25 @@ +################################################################################ +# CMake minimum version required +cmake_minimum_required(VERSION 3.0) + +################################################################################ +# RapidJSON source dir +set( RapidJSON_SOURCE_DIR "@CONFIG_SOURCE_DIR@") + +################################################################################ +# RapidJSON build dir +set( RapidJSON_DIR "@CONFIG_DIR@") + +################################################################################ +# Compute paths +get_filename_component(RapidJSON_CMAKE_DIR "${CMAKE_CURRENT_LIST_FILE}" PATH) + +set( RapidJSON_INCLUDE_DIR "@RapidJSON_INCLUDE_DIR@" ) +set( RapidJSON_INCLUDE_DIRS "@RapidJSON_INCLUDE_DIR@" ) +message(STATUS "RapidJSON found. Headers: ${RapidJSON_INCLUDE_DIRS}") + +if(NOT TARGET rapidjson) + add_library(rapidjson INTERFACE IMPORTED) + set_property(TARGET rapidjson PROPERTY + INTERFACE_INCLUDE_DIRECTORIES ${RapidJSON_INCLUDE_DIRS}) +endif() diff --git a/src/s3select/rapidjson/RapidJSONConfigVersion.cmake.in b/src/s3select/rapidjson/RapidJSONConfigVersion.cmake.in new file mode 100644 index 000000000..25741fc09 --- /dev/null +++ b/src/s3select/rapidjson/RapidJSONConfigVersion.cmake.in @@ -0,0 +1,10 @@ +SET(PACKAGE_VERSION "@LIB_VERSION_STRING@") + +IF (PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION) + SET(PACKAGE_VERSION_EXACT "true") +ENDIF (PACKAGE_FIND_VERSION VERSION_EQUAL PACKAGE_VERSION) +IF (NOT PACKAGE_FIND_VERSION VERSION_GREATER PACKAGE_VERSION) + SET(PACKAGE_VERSION_COMPATIBLE "true") +ELSE (NOT PACKAGE_FIND_VERSION VERSION_GREATER PACKAGE_VERSION) + SET(PACKAGE_VERSION_UNSUITABLE "true") +ENDIF (NOT PACKAGE_FIND_VERSION VERSION_GREATER PACKAGE_VERSION) diff --git a/src/s3select/rapidjson/appveyor.yml b/src/s3select/rapidjson/appveyor.yml new file mode 100644 index 000000000..4044ba664 --- /dev/null +++ b/src/s3select/rapidjson/appveyor.yml @@ -0,0 +1,102 @@ +version: 1.1.0.{build} + +configuration: +- Debug +- Release + +environment: + matrix: + # - VS_VERSION: 9 2008 + # VS_PLATFORM: win32 + # - VS_VERSION: 9 2008 + # VS_PLATFORM: x64 + - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2013 + VS_VERSION: 10 2010 + VS_PLATFORM: win32 + CXX11: OFF + CXX17: OFF + MEMBERSMAP: OFF + - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2013 + VS_VERSION: 10 2010 + VS_PLATFORM: x64 + CXX11: OFF + CXX17: OFF + MEMBERSMAP: ON + - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2013 + VS_VERSION: 11 2012 + VS_PLATFORM: win32 + CXX11: OFF + CXX17: OFF + MEMBERSMAP: ON + - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2013 + VS_VERSION: 11 2012 + VS_PLATFORM: x64 + CXX11: OFF + CXX17: OFF + MEMBERSMAP: OFF + - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2013 + VS_VERSION: 12 2013 + VS_PLATFORM: win32 + CXX11: OFF + CXX17: OFF + MEMBERSMAP: OFF + - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2013 + VS_VERSION: 12 2013 + VS_PLATFORM: x64 + CXX11: OFF + CXX17: OFF + MEMBERSMAP: ON + - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015 + VS_VERSION: 14 2015 + VS_PLATFORM: win32 + CXX11: OFF + CXX17: OFF + MEMBERSMAP: ON + - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2015 + VS_VERSION: 14 2015 + VS_PLATFORM: x64 + CXX11: OFF + CXX17: OFF + MEMBERSMAP: OFF + - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017 + VS_VERSION: 15 2017 + VS_PLATFORM: win32 + CXX11: OFF + CXX17: OFF + MEMBERSMAP: OFF + - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017 + VS_VERSION: 15 2017 + VS_PLATFORM: x64 + CXX11: OFF + CXX17: OFF + MEMBERSMAP: ON + - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017 + VS_VERSION: 15 2017 + VS_PLATFORM: x64 + CXX11: ON + CXX17: OFF + MEMBERSMAP: OFF + - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2017 + VS_VERSION: 15 2017 + VS_PLATFORM: x64 + CXX11: OFF + CXX17: ON + MEMBERSMAP: OFF + - APPVEYOR_BUILD_WORKER_IMAGE: Visual Studio 2019 + VS_VERSION: 16 2019 + VS_PLATFORM: x64 + CXX11: OFF + CXX17: ON + MEMBERSMAP: ON + +before_build: +- git submodule update --init --recursive +- cmake -H. -BBuild/VS -G "Visual Studio %VS_VERSION%" -DCMAKE_GENERATOR_PLATFORM=%VS_PLATFORM% -DCMAKE_VERBOSE_MAKEFILE=ON -DBUILD_SHARED_LIBS=true -DRAPIDJSON_BUILD_CXX11=%CXX11% -DRAPIDJSON_BUILD_CXX17=%CXX17% -DRAPIDJSON_USE_MEMBERSMAP=%MEMBERSMAP% -Wno-dev + +build: + project: Build\VS\RapidJSON.sln + parallel: true + verbosity: minimal + +test_script: +- cd Build\VS && if %CONFIGURATION%==Debug (ctest --verbose -E perftest --build-config %CONFIGURATION%) else (ctest --verbose --build-config %CONFIGURATION%) diff --git a/src/s3select/rapidjson/bin/data/abcde.txt b/src/s3select/rapidjson/bin/data/abcde.txt new file mode 100644 index 000000000..6a8165460 --- /dev/null +++ b/src/s3select/rapidjson/bin/data/abcde.txt @@ -0,0 +1 @@ +abcde \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/data/glossary.json b/src/s3select/rapidjson/bin/data/glossary.json new file mode 100644 index 000000000..d6e6ca150 --- /dev/null +++ b/src/s3select/rapidjson/bin/data/glossary.json @@ -0,0 +1,22 @@ +{ + "glossary": { + "title": "example glossary", + "GlossDiv": { + "title": "S", + "GlossList": { + "GlossEntry": { + "ID": "SGML", + "SortAs": "SGML", + "GlossTerm": "Standard Generalized Markup Language", + "Acronym": "SGML", + "Abbrev": "ISO 8879:1986", + "GlossDef": { + "para": "A meta-markup language, used to create markup languages such as DocBook.", + "GlossSeeAlso": ["GML", "XML"] + }, + "GlossSee": "markup" + } + } + } + } +} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/data/menu.json b/src/s3select/rapidjson/bin/data/menu.json new file mode 100644 index 000000000..539c3af20 --- /dev/null +++ b/src/s3select/rapidjson/bin/data/menu.json @@ -0,0 +1,27 @@ +{"menu": { + "header": "SVG Viewer", + "items": [ + {"id": "Open"}, + {"id": "OpenNew", "label": "Open New"}, + null, + {"id": "ZoomIn", "label": "Zoom In"}, + {"id": "ZoomOut", "label": "Zoom Out"}, + {"id": "OriginalView", "label": "Original View"}, + null, + {"id": "Quality"}, + {"id": "Pause"}, + {"id": "Mute"}, + null, + {"id": "Find", "label": "Find..."}, + {"id": "FindAgain", "label": "Find Again"}, + {"id": "Copy"}, + {"id": "CopyAgain", "label": "Copy Again"}, + {"id": "CopySVG", "label": "Copy SVG"}, + {"id": "ViewSVG", "label": "View SVG"}, + {"id": "ViewSource", "label": "View Source"}, + {"id": "SaveAs", "label": "Save As"}, + null, + {"id": "Help"}, + {"id": "About", "label": "About Adobe CVG Viewer..."} + ] +}} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/data/readme.txt b/src/s3select/rapidjson/bin/data/readme.txt new file mode 100644 index 000000000..c53bfb8b7 --- /dev/null +++ b/src/s3select/rapidjson/bin/data/readme.txt @@ -0,0 +1 @@ +sample.json is obtained from http://code.google.com/p/json-test-suite/downloads/detail?name=sample.zip diff --git a/src/s3select/rapidjson/bin/data/sample.json b/src/s3select/rapidjson/bin/data/sample.json new file mode 100644 index 000000000..30930e765 --- /dev/null +++ b/src/s3select/rapidjson/bin/data/sample.json @@ -0,0 +1,3315 @@ +{ + "a": { + "6U閆崬밺뀫颒myj츥휘:$薈mY햚#rz飏+玭V㭢뾿愴YꖚX亥ᮉ푊\u0006垡㐭룝\"厓ᔧḅ^Sqpv媫\"⤽걒\"˽Ἆ?ꇆ䬔未tv{DV鯀Tἆl凸g\\㈭ĭ즿UH㽤": null, + "b茤z\\.N": [[ + "ZL:ᅣዎ*Y|猫劁櫕荾Oj为1糕쪥泏S룂w࡛Ᏺ⸥蚙)", + { + "\"䬰ỐwD捾V`邀⠕VD㺝sH6[칑.:醥葹*뻵倻aD\"": true, + "e浱up蔽Cr෠JK軵xCʨ<뜡癙Y獩ケ齈X/螗唻?<蘡+뷄㩤쳖3偑犾&\\첊xz坍崦ݻ鍴\"嵥B3㰃詤豺嚼aqJ⑆∥韼@\u000b㢊\u0015L臯.샥": false, + "l?Ǩ喳e6㔡$M꼄I,(3᝝縢,䊀疅뉲B㴔傳䂴\u0088㮰钘ꜵ!ᅛ韽>": -5514085325291784739, + "o㮚?\"춛㵉<\/﬊ࠃ䃪䝣wp6ἀ䱄[s*S嬈貒pᛥ㰉'돀": [{ + "(QP윤懊FI<ꃣ『䕷[\"珒嶮?%Ḭ壍಻䇟0荤!藲끹bd浶tl\u2049#쯀@僞": {"i妾8홫": { + ",M맃䞛K5nAㆴVN㒊햬$n꩑&ꎝ椞阫?/ṏ세뉪1x쥼㻤㪙`\"$쟒薟B煌܀쨝ଢ଼2掳7㙟鴙X婢\u0002": "Vዉ菈᧷⦌kﮞఈnz*﷜FM\"荭7ꍀ-VR<\/';䁙E9$䩉\f @s?퍪o3^衴cඎ䧪aK鼟q䆨c{䳠5mᒲՙ蘹ᮩ": { + "F㲷JGo⯍P덵x뒳p䘧☔\"+ꨲ吿JfR㔹)4n紬G练Q፞!C|": true, + "p^㫮솎oc.೚A㤠??r\u000f)⾽⌲們M2.䴘䩳:⫭胃\\፾@Fᭌ\\K": false, + "蟌Tk愙潦伩": { + "a<\/@ᾛ慂侇瘎": -7271305752851720826, + "艓藬/>၄ṯ,XW~㲆w": {"E痧郶)㜓ha朗!N赻瞉駠uC\u20ad辠x퓮⣫P1ࠫLMMX'M刼唳됤": null, + "P쓫晥%k覛ዩIUᇸ滨:噐혲lMR5䋈V梗>%幽u頖\\)쟟": null, + "eg+昉~矠䧞难\b?gQ쭷筝\\eꮠNl{ಢ哭|]Mn銌╥zꖘzⱷ⭤ᮜ^": [ + -1.30142114406914976E17, + -1.7555215491128452E-19, + null, + "渾㨝ߏ牄귛r?돌?w[⚞ӻ~廩輫㼧/", + -4.5737191805302129E18, + null, + "xy࿑M[oc셒竓Ⓔx?뜓y䊦>-D켍(&&?XKkc꩖ﺸᏋ뵞K伕6ী)딀P朁yW揙?훻魢傎EG碸9類៌g踲C⟌aEX舲:z꒸许", + 3808159498143417627, + null, + {"m試\u20df1{G8&뚈h홯J<\/": { + "3ஸ厠zs#1K7:rᥞoꅔꯧ&띇鵼鞫6跜#赿5l'8{7㕳(b/j\"厢aq籀ꏚ\u0015厼稥": [ + -2226135764510113982, + true, + null, + { + "h%'맞S싅Hs&dl슾W0j鿏MםD놯L~S-㇡R쭬%": null, + "⟓咔謡칲\u0000孺ꛭx旑檉㶆?": null, + "恇I転;￸B2Y`z\\獓w,놏濐撐埵䂄)!䶢D=ഭ㴟jyY": { + "$ࡘt厛毣ൢI芁<겿骫⫦6tr惺a": [ + 6.385779736989334E-20, + false, + true, + true, + [ + -6.891946211462334E-19, + null, + { + "]-\\Ꟑ1/薓❧Ὂ\\l牑\u0007A郃)阜ᇒᓌ-塯`W峬G}SDb㬨Q臉⮻빌O鞟톴첂B㺱<ƈmu챑J㴹㷳픷Oㆩs": { + "\"◉B\"pᶉt骔J꩸ᄇᛐi╰栛K쉷㉯鐩!㈐n칍䟅難>盥y铿e୔蒏M貹ヅ8嘋퀯䉶ጥ㏢殊뻳\"絧╿ꉑ䠥?∃蓊{}㣣Gk긔H1哵峱": false, + "6.瀫cN䇮F㧺?\\椯=ڈT䘆4␘8qv": -3.5687501019676885E-19, + "Q?yऴr혴{஀䳘p惭f1ﹸ䅷䕋贲<ྃᄊ繲hq\\b|#QSTs1c-7(䵢\u2069匏絘ꯉ:l毴汞t戀oෟᵶ뮱፣-醇Jx䙬䐁햢0࣫ᡁgrㄛ": "\u0011_xM/蘇Chv;dhA5.嗀绱V爤ﰦi뵲M", + "⏑[\"ugoy^儣횎~U\\섯겜論l2jw஌yD腅̂\u0019": true, + "ⵯɇ䐲᫿࢚!㯢l샅笶戮1꣖0Xe": null, + "劅f넀識b宁焊E찓橵G!ʱ獓뭔雩괛": [{"p⹣켙[q>燣䍃㞽ᩲx:쓤삘7玑퇼0<\/q璂ᑁ[Z\\3䅵䧳\u0011㤧|妱緒C['췓Yꞟ3Z鳱雼P錻BU씧U`ᢶg蓱>.1ӧ譫'L_5V䏵Ц": [ + false, + false, + {"22䂍盥N霂얢躰e9⑩_뵜斌n@B}$괻Yᐱ@䧋V\"☒-諯cV돯ʠ": true, + "Ű螧ᔼ檍鍎땒딜qꄃH뜣<獧ूCY吓⸏>XQ㵡趌o끬k픀빯a(ܵ甏끆୯/6Nᪧ}搚ᆚ짌P牰泱鈷^d꣟#L삀\"㕹襻;k㸊\\f+": true, + "쎣\",|⫝̸阊x庿k잣v庅$鈏괎炔k쬪O_": [ + "잩AzZGz3v愠ꉈⵎ?㊱}S尳௏p\r2>췝IP䘈M)w|\u000eE", + -9222726055990423201, + null, + [ + false, + {"´킮'뮤쯽Wx讐V,6ᩪ1紲aႈ\u205czD": [ + -930994432421097536, + 3157232031581030121, + "l貚PY䃛5@䭄귻m㎮琸f": 1.0318894506812084E-19, + "࢜⩢Ш䧔1肽씮+༎ᣰ闺馺窃䕨8Mƶq腽xc(៯夐J5굄䕁Qj_훨/~価.䢵慯틠퇱豠㼇Qﵘ$DuSp(8Uญ<\/ಟ룴𥳐ݩ$": 8350772684161555590, + "ㆎQ䄾\u001bpᩭ${[諟^^骴᤮b^ㅥI┧T㉇⾞\"绦r䰂f矩'-7䡭桥Dz兔V9谶居㺍ᔊ䩯덲.\u001eL0ὅㅷ釣": [{ + "<쯬J卷^숞u࠯䌗艞R9닪g㐾볎a䂈歖意:%鐔|ﵤ|y}>;2,覂⶚啵tb*仛8乒㓶B࿠㯉戩oX 貘5V嗆렽낁߼4h䧛ꍺM空\\b꿋貼": 8478577078537189402, + "VD*|吝z~h譺aᯒ": { + "YI췢K<\/濳xNne玗rJo쾘3핰鴊\"↱AR:ࢷ\"9?\"臁說)?誚ꊏe)_D翾W?&F6J@뺾ꍰNZ醊Z쾈വH嶿?炫㷱鬰M겈᭨b,⻁鈵P䕡䀠८ⱄ홎鄣": { + "@?k2鶖㋮\"Oರ K㨇廪儲\u0017䍾J?);\b*묀㗠섳햭1MC V": null, + "UIICP!BUA`ᢈ㋸~袩㗪⾒=fB﮴l1ꡛ죘R辂여ҳ7쮡<䩲`熕8頁": 4481809488267626463, + "Y?+8먙ᚔ鋳蜩럶1㥔y璜౩`": [ + null, + 1.2850335807501874E-19, + "~V2", + 2035406654801997866, + { + "<숻1>\"": -8062468865199390827, + "M㿣E]}qwG莎Gn᝶(ꔙ\\D⬲iꇲs寢t駇S뀡ꢜ": false, + "pꝤ㎏9W%>M;-U璏f(^j1?&RB隧 忓b똊E": "#G?C8.躬ꥯ'?냪#< 渟&헿란zpo왓Kj}鷧XﻘMツb䕖;㪻", + "vE풤幉xz뱕쫥Ug㦲aH} ᣟp:鬼YᰟH3镔ᴚ斦\\鏑r*2橱G⼔F/.j": true, + "RK좬뎂a홠f*f㱉ᮍ⦋潙㨋Gu곌SGI3I뿐\\F',)t`荁蘯囯ﮉ裲뇟쥼_ገ驪▵撏ᕤV": 1.52738225997956557E18, + "^k굲䪿꠹B逤%F㱢漥O披M㽯镞竇霒i꼂焅륓\u00059=皫之눃\u2047娤閍銤唫ၕb<\/w踲䔼u솆맚,䝒ᝳ'/it": "B餹饴is権ꖪ怯ꦂẉဎt\"!凢谵⧿0\\<=(uL䷍刨쑪>俆揓Cy襸Q힆䆭涷<\/ᐱ0ɧ䗾䚹\\ኜ?ꄢᇘ`䴢{囇}᠈䴥X4퓪檄]ꥷ/3謒ሴn+g騍X", + "GgG꽬[(嫓몍6\u0004궍宩㙻/>\u0011^辍dT腪hxǑ%ꊇk,8(W⧂結P鬜O": [{ + "M㴾c>\\ᓲ\u0019V{>ꤩ혙넪㭪躂TS-痴໸闓⍵/徯O.M㏥ʷD囎⧔쁳휤T??鉬뇙=#ꢫ숣BX䭼<\/d똬졬g榿)eꨋﯪ좇첻\u001a\u0011\";~쓆BH4坋攊7힪", + "iT:L闞椕윚*滛gI≀Wਟඊ'ꢆ縺뱹鮚Nꩁ᧬蕼21줧\\䋯``⍐\\㏱鳨": 1927052677739832894, + "쮁缦腃g]礿Y㬙 fヺSɪ꾾N㞈": [ + null, + null, + { + "!t,灝Y 1䗉罵?c饃호䉂Cᐭ쒘z(즽sZG㬣sഖE4뢜㓕䏞丮Qp簍6EZឪ겛fx'ꩱQ0罣i{k锩*㤴㯞r迎jTⲤ渔m炅肳": [ + -3.3325685522591933E18, + [{"㓁5]A䢕1룥BC?Ꙍ`r룔Ⳛ䙡u伲+\u0001്o": [ + null, + 4975309147809803991, + null, + null, + {"T팘8Dﯲ稟MM☻㧚䥧/8ﻥ⥯aXLaH\"顾S☟耲ît7fS෉놁뮔/ꕼ䓈쁺4\\霶䠴ᩢ<\/t4?죵>uD5➶༆쉌럮⢀秙䘥\u20972ETR3濡恆vB? ~鸆\u0005": { + "`閖m璝㥉b뜴?Wf;?DV콜\u2020퍉౓擝宏ZMj3mJ먡-傷뱙yח㸷꥿ ໘u=M읝!5吭L4v\\?ǎ7C홫": null, + "|": false, + "~Ztᛋ䚘\\擭㗝傪W陖+㗶qᵿ蘥ᙄp%䫎)}=⠔6ᮢS湟-螾-mXH?cp": 448751162044282216, + "\u209fad놹j檋䇌ᶾ梕㉝bוּ": {"?苴ꩠD䋓帘5騱qﱖPF?☸珗顒yU ᡫcb䫎 S@㥚gꮒ쎘泴멖\\:I鮱TZ듒ᶨQ3+f7캙\"?\f풾\\o杞紟﻽M.⏎靑OP": [ + -2.6990368911551596E18, + [{"䒖@<᰿<\/⽬tTr腞&G%᳊秩蜰擻f㎳?S㵧\r*k뎾-乢겹隷j軛겷0룁鮁": {")DO0腦:춍逿:1㥨่!蛍樋2": [{ + ",ꌣf侴笾m๫ꆽ?1?U?\u0011ꌈꂇ": { + "x捗甠nVq䅦w`CD⦂惺嘴0I#vỵ} \\귂S끴D얾?Ԓj溯\"v餄a": { + "@翙c⢃趚痋i\u0015OQ⍝lq돆Y0pࢥ3쉨䜩^<8g懥0w)]䊑n洺o5쭝QL댊랖L镈Qnt⪟㒅십q헎鳒⮤眉ᔹ梠@O縠u泌ㄘb榚癸XޔFtj;iC": false, + "I&뱋゘|蓔䔕측瓯%6ᗻHW\\N1貇#?僐ᗜgh᭪o'䗈꽹Rc욏/蔳迄༝!0邔䨷푪8疩)[쭶緄㇈୧ፐ": { + "B+:ꉰ`s쾭)빼C羍A䫊pMgjdx䐝Hf9᥸W0!C樃'蘿f䫤סи\u0017Jve? 覝f둀⬣퓉Whk\"஼=չﳐ皆笁BIW虨쫓F廰饞": -642906201042308791, + "sb,XcZ<\/m㉹ ;䑷@c䵀s奤⬷7`ꘖ蕘戚?Feb#輜}p4nH⬮eKL트}": [ + "RK鳗z=袤Pf|[,u욺", + "Ẏᏻ罯뉋⺖锅젯㷻{H䰞쬙-쩓D]~\u0013O㳢gb@揶蔉|kᦂ❗!\u001ebM褐sca쨜襒y⺉룓", + null, + null, + true, + -1.650777344339075E-19, + false, + "☑lꄆs힨꤇]'uTന⌳농].1⋔괁沰\"IWഩ\u0019氜8쟇䔻;3衲恋,窌z펏喁횗?4?C넁问?ᥙ橭{稻Ⴗ_썔", + "n?]讇빽嗁}1孅9#ꭨ靶v\u0014喈)vw祔}룼쮿I", + -2.7033457331882025E18, + { + ";⚃^㱋x:饬ኡj'꧵T☽O㔬RO婎?향ᒭ搩$渣y4i;(Q>꿘e8q": "j~錘}0g;L萺*;ᕭꄮ0l潛烢5H▄쳂ꏒוֹꙶT犘≫x閦웧v", + "~揯\u2018c4職렁E~ᑅቚꈂ?nq뎤.:慹`F햘+%鉎O瀜쟏敛菮⍌浢<\/㮺紿P鳆ࠉ8I-o?#jﮨ7v3Dt赻J9": null, + "ࣝW䌈0ꍎqC逖,횅c၃swj;jJS櫍5槗OaB>D踾Y": {"㒰䵝F%?59.㍈cᕨ흕틎ḏ㋩B=9IېⓌ{:9.yw}呰ㆮ肒᎒tI㾴62\"ዃ抡C﹬B<\/촋jo朣", + [ + -7675533242647793366, + {"ᙧ呃:[㒺쳀쌡쏂H稈㢤\u001dᶗGG-{GHྻຊꡃ哸䵬;$?&d\\⥬こN圴됤挨-'ꕮ$PU%?冕눖i魁q騎Q": [ + false, + [[ + 7929823049157504248, + [[ + true, + "Z菙\u0017'eꕤ᱕l,0\\X\u001c[=雿8蠬L<\/낲긯W99g톉4ퟋb㝺\u0007劁'!麕Q궈oW:@X၎z蘻m絙璩귓죉+3柚怫tS捇蒣䝠-擶D[0=퉿8)q0ٟ", + "唉\nFA椭穒巯\\䥴䅺鿤S#b迅獘 ﶗ꬘\\?q1qN犠pX꜅^䤊⛤㢌[⬛휖岺q唻ⳡ틍\"㙙Eh@oA賑㗠y必Nꊑᗘ", + -2154220236962890773, + -3.2442003245397908E18, + "Wᄿ筠:瘫퀩?o貸q⊻(᎞KWf宛尨h^残3[U(='橄", + -7857990034281549164, + 1.44283696979059942E18, + null, + {"ꫯAw跭喀 ?_9\"Aty背F=9缉ྦྷ@;?^鞀w:uN㘢Rỏ": [ + 7.393662029337442E15, + 3564680942654233068, + [ + false, + -5253931502642112194, + "煉\\辎ೆ罍5⒭1䪁䃑s䎢:[e5}峳ﴱn騎3?腳Hyꏃ膼N潭錖,Yᝋ˜YAၓ㬠bG렣䰣:", + true, + null, + { + "⒛'P&%죮|:⫶춞": -3818336746965687085, + "钖m<\/0ݎMtF2Pk=瓰୮洽겎.": [[ + -8757574841556350607, + -3045234949333270161, + null, + { + "Ꮬr輳>⫇9hU##w@귪A\\C 鋺㘓ꖐ梒뒬묹㹻+郸嬏윤'+g<\/碴,}ꙫ>손;情d齆J䬁ຩ撛챝탹/R澡7剌tꤼ?ặ!`⏲睤\u00002똥଴⟏": null, + "\u20f2ܹe\\tAꥍư\\x当뿖렉禛;G檳ﯪS૰3~㘠#[J<}{奲 5箉⨔{놁<\/釿抋,嚠/曳m&WaOvT赋皺璑텁": [[ + false, + null, + true, + -5.7131445659795661E18, + "萭m䓪D5|3婁ఞ>蠇晼6nﴺPp禽羱DS<睓닫屚삏姿", + true, + [ + -8759747687917306831, + { + ">ⓛ\t,odKr{䘠?b퓸C嶈=DyEᙬ@ᴔ쨺芛髿UT퓻春<\/yꏸ>豚W釺N뜨^?꽴﨟5殺ᗃ翐%>퍂ဿ䄸沂Ea;A_\u0005閹殀W+窊?Ꭼd\u0013P汴G5썓揘": 4.342729067882445E-18, + "Q^즾眆@AN\u0011Kb榰냎Y#䝀ꀒᳺ'q暇睵s\"!3#I⊆畼寤@HxJ9": false, + "⿾D[)袨㇩i]웪䀤ᛰMvR<蟏㣨": {"v퇓L㪱ꖣ豛톤\\곱#kDTN": [{ + "(쾴䡣,寴ph(C\"㳶w\"憳2s馆E!n!&柄<\/0Pꈗſ?㿳Qd鵔": {"娇堰孹L錮h嵅⛤躏顒?CglN束+쨣ﺜ\\MrH": {"獞䎇둃ቲ弭팭^ꄞ踦涟XK錆쳞ឌ`;੶S炥騞ଋ褂B៎{ڒ䭷ᶼ靜pI荗虶K$": [{"◖S~躘蒉꫿輜譝Q㽙闐@ᢗ¥E榁iء5┄^B[絮跉ᰥ遙PWi3wㄾⵀDJ9!w㞣ᄎ{듒ꓓb6\\篴??c⼰鶹⟧\\鮇ꮇ": [[ + 654120831325413520, + -1.9562073916357608E-19, + { + "DC(昐衵ἡ긙갵姭|֛[t": 7.6979110359897907E18, + "J␅))嫼❳9Xfd飉j7猬ᩉ+⤻眗벎E鰉Zᄊ63zၝ69}ZᶐL崭ᦥ⡦靚⋛ꎨ~i㨃咊ꧭo䰠阀3C(": -3.5844809362512589E17, + "p꣑팱쒬ꎑ뛡Ꙩ挴恍胔&7ᔈ묒4Hd硶훐㎖zꢼ豍㿢aሃ=<\/湉鵲EӅ%$F!퍶棌孼{O駍਺geu+": ")\u001b잓kŀX쩫A밁®ڣ癦狢)扔弒p}k縕ꩋ,䃉tࣼi", + "ァF肿輸<솄G-䢹䛸ꊏl`Tqꕗ蒞a氷⸅ᴉ蠰]S/{J왲m5{9.uέ~㕚㣹u>x8U讁B덺襪盎QhVS맅킃i识{벂磄Iහ䙅xZy/抍૭Z鲁-霳V据挦ℒ": null, + "㯛|Nꐸb7ⵐb?拠O\u0014ކ?-(EꞨ4ꕷᄤYᯕOW瞺~螸\"욿ќe㺰\"'㌢ƐW\u0004瞕>0?V鷵엳": true, + "뤥G\\迋䠿[庩'꼡\u001aiᩮV쯁ᳪ䦪Ô;倱ନ뛁誈": null, + "쥹䄆䚟Q榁䎐᢭<\/2㕣p}HW蟔|䃏꿈ꚉ锳2Pb7㙑Tⅹᵅ": { + "Y?֭$>#cVBꩨ:>eL蒁務": { + "86柡0po 䏚&-捑Ћ祌<\/휃-G*㶢הּ쩍s㶟餇c걺yu꽎還5*턧簕Og婥SꝐ": null, + "a+葞h٥ࠆ裈嗫ﵢ5輙퀟ᛜ,QDﹼ⟶Y騠锪E_|x죗j侵;m蜫轘趥?븅w5+mi콛L": { + ";⯭ﱢ!买F⽍柤鶂n䵣V㫚墱2렾ELEl⣆": [ + true, + -3.6479311868339015E-18, + -7270785619461995400, + 3.334081886177621E18, + 2.581457786298155E18, + -6.605252412954115E-20, + -3.9232347037744167E-20, + { + "B6㊕.k1": null, + "ZAꄮJ鮷ᳱo갘硥鈠䠒츼": { + "ᕅ}럡}.@y陪鶁r業'援퀉x䉴ﵴl퍘):씭脴ᥞhiꃰblﲂ䡲엕8߇M㶭0燋標挝-?PCwe⾕J碻Ᾱ䬈䈥뷰憵賣뵓痬+": {"a췩v礗X⋈耓ፊf罅靮!㔽YYᣓw澍33⎔芲F|\"䜏T↮輦挑6ᓘL侘?ᅥ]덆1R௯✎餘6ꏽ<\/௨\\?q喷ꁫj~@ulq": {"嗫欆뾔Xꆹ4H㌋F嵧]ࠎ]㠖1ꞤT<$m뫏O i댳0䲝i": {"?෩?\u20cd슮|ꯆjs{?d7?eNs⢚嫥氂䡮쎱:鑵롟2hJꎒﯭ鱢3춲亄:뼣v䊭諱Yj択cVmR䩃㘬T\"N홝*ै%x^F\\_s9보zz4淗?q": [ + null, + "?", + 2941869570821073737, + "{5{殇0䝾g6밖퍋臩綹R$䖭j紋釰7sXI繳漪행y", + false, + "aH磂?뛡#惇d婅?Fe,쐘+늵䍘\"3r瘆唊勐j⳧࠴ꇓ<\/唕윈x⬌讣䋵%拗ᛆⰿ妴᝔M2㳗必꧂淲?ゥ젯檢<8끒MidX䏒3᳻Q▮佐UT|⤪봦靏⊏", + [[{ + "颉(&뜸귙{y^\"P퟉춝Ჟ䮭D顡9=?}Y誱<$b뱣RvO8cH煉@tk~4ǂ⤧⩝屋SS;J{vV#剤餓ᯅc?#a6D,s": [ + -7.8781018564821536E16, + true, + [ + -2.28770899315832371E18, + false, + -1.0863912140143876E-20, + -6282721572097446995, + 6767121921199223078, + -2545487755405567831, + false, + null, + -9065970397975641765, + [ + -5.928721243413937E-20, + {"6촊\u001a홯kB0w撨燠룉{绎6⳹!턍贑y▾鱧ժ[;7ᨷ∀*땒䪮1x霆Hᩭ☔\"r䝐7毟ᝰr惃3ꉭE+>僒澐": [ + "Ta쎩aƝt쵯ⰪVb", + [ + -5222472249213580702, + null, + -2851641861541559595, + null, + 4808804630502809099, + 5657671602244269874, + "5犲﨣4mᥣ?yf젫꾯|䋬잁$`Iⳉﴷ扳兝,'c", + false, + [ + null, + { + "DyUIN쎾M仼惀⮥裎岶泭lh扠\u001e礼.tEC癯튻@_Qd4c5S熯A<\/\6U윲蹴Q=%푫汹\\\u20614b[௒C⒥Xe⊇囙b,服3ss땊뢍i~逇PA쇸1": -2.63273619193485312E17, + "Mq꺋貘k휕=nK硍뫞輩>㾆~἞ࡹ긐榵l⋙Hw뮢帋M엳뢯v⅃^": 1877913476688465125, + "ᶴ뻗`~筗免⚽টW˃⽝b犳䓺Iz篤p;乨A\u20ef쩏?疊m㝀컩뫡b탔鄃ᾈV(遢珳=뎲ିeF仢䆡谨8t0醄7㭧瘵⻰컆r厡궥d)a阄፷Ed&c﯄伮1p": null, + "⯁w4曢\"(欷輡": "\"M᭫]䣒頳B\\燧ࠃN㡇j姈g⊸⺌忉ꡥF矉স%^", + "㣡Oᄦ昵⫮Y祎S쐐級㭻撥>{I$": -378474210562741663, + "䛒掷留Q%쓗1*1J*끓헩ᦢ﫫哉쩧EↅIcꅡ\\?ⴊl귛顮4": false, + "寔愆샠5]䗄IH贈=d﯊/偶?ॊn%晥D視N򗘈'᫂⚦|X쵩넽z질tskxDQ莮Aoﱻ뛓": true, + "钣xp?&\u001e侉/y䴼~?U篔蘚缣/I畚?Q绊": -3034854258736382234, + "꺲໣眀)⿷J暘pИfAV삕쳭Nꯗ4々'唄ⶑ伻㷯騑倭D*Ok꧁3b␽_<\/챣Xm톰ၕ䆄`*fl㭀暮滠毡?": [ + "D男p`V뙸擨忝븪9c麺`淂⢦Yw⡢+kzܖ\fY1䬡H歁)벾Z♤溊-혰셢?1<-\u0005;搢Tᐁle\\ᛵߓﭩ榩訝-xJ;巡8깊蠝ﻓU$K": { + "Vꕡ諅搓W=斸s︪vﲜ츧$)iꡟ싉e寳?ጭムVથ嵬i楝Fg<\/Z|៪ꩆ-5'@ꃱ80!燱R쇤t糳]罛逇dṌ֣XHiͦ{": true, + "Ya矲C멗Q9膲墅携휻c\\딶G甔<\/.齵휴": -1.1456247877031811E-19, + "z#.OO￝J": -8263224695871959017, + "崍_3夼ᮟ1F븍뽯ᦓ鴭V豈Ь": [{ + "N蒬74": null, + "yuB?厅vK笗!ᔸcXQ旦컶P-녫mᄉ麟_": "1R@ 톘xa_|﩯遘s槞d!d껀筤⬫薐焵먑D{\\6k共倌☀G~AS_D\"딟쬚뮥馲렓쓠攥WTMܭ8nX㩴䕅檹E\u0007ﭨN 2 ℆涐ꥏ꠵3▙玽|됨_\u2048", + "恐A C䧩G": {":M큣5e들\\ꍀ恼ᔄ靸|I﨏$)n": { + "|U䬫㟯SKV6ꛤ㗮\bn봻䲄fXT:㾯쳤'笓0b/ೢC쳖?2浓uO.䰴": "ཐ꼋e?``,ᚇ慐^8ꜙNM䂱\u0001IᖙꝧM'vKdꌊH牮r\\O@䊷ᓵ쀆(fy聻i툺\"?<\/峧ࣞ⓺ᤤ쵒߯ꎺ騬?)刦\u2072l慪y꺜ﲖTj+u", + "뽫hh䈵w>1ⲏ쐭V[ⅎ\\헑벑F_㖝⠗㫇h恽;῝汰ᱼ瀖J옆9RR셏vsZ柺鶶툤r뢱橾/ꉇ囦FGm\"謗ꉦ⨶쒿⥡%]鵩#ᖣ_蹎 u5|祥?O", + null, + 2.0150326776036215E-19, + null, + true, + false, + true, + {"\fa᭶P捤WWc᠟f뚉ᬏ퓗ⳀW睹5:HXH=q7x찙X$)모r뚥ᆟ!Jﳸf": [ + -2995806398034583407, + [ + 6441377066589744683, + "Mﶒ醹i)Gἦ廃s6몞 KJ౹礎VZ螺费힀\u0000冺업{谥'꡾뱻:.ꘘ굄奉攼Di᷑K鶲y繈욊阓v㻘}枭캗e矮1c?휐\"4\u0005厑莔뀾墓낝⽴洗ṹ䇃糞@b1\u0016즽Y轹", + { + "1⽕⌰鉟픏M㤭n⧴ỼD#%鐘⊯쿼稁븣몐紧ᅇ㓕ᛖcw嬀~ഌ㖓(0r⧦Q䑕髍ര铂㓻R儮\"@ꇱm❈௿᦯頌8}㿹犴?xn잆꥽R": 2.07321075750427366E18, + "˳b18㗈䃟柵Z曆VTAu7+㛂cb0﯑Wp執<\/臋뭡뚋刼틮荋벲TLP预庰܈G\\O@VD'鱃#乖끺*鑪ꬳ?Mޞdﭹ{␇圯쇜㼞顄︖Y홡g": [{ + "0a,FZ": true, + "2z̬蝣ꧦ驸\u0006L↛Ḣ4๚뿀'?lcwᄧ㐮!蓚䃦-|7.飑挴.樵*+1ﮊ\u0010ꛌ%貨啺/JdM:똍!FBe?鰴㨗0O财I藻ʔWA᫓G쳛u`<\/I": [{ + "$τ5V鴐a뾆両環iZp頻යn븃v": -4869131188151215571, + "*즢[⦃b礞R◚nΰꕢH=귰燙[yc誘g䆌?ଜ臛": { + "洤湌鲒)⟻\\䥳va}PeAMnN[": "㐳ɪ/(軆lZR,Cp殍ȮN啷\"3B婴?i=r$펽ᤐ쀸", + "阄R4㒿㯔ڀ69ZᲦ2癁핌噗P崜#\\-쭍袛&鐑/$4童V꩑_ZHA澢fZ3": {"x;P{긳:G閉:9?活H": [ + "繺漮6?z犞焃슳\">ỏ[Ⳛ䌜녏䂹>聵⼶煜Y桥[泥뚩MvK$4jtロ", + "E#갶霠좭㦻ୗ먵F+䪀o蝒ba쮎4X㣵 h", + -335836610224228782, + null, + null, + [ + "r1᫩0>danjY짿bs{", + [ + -9.594464059325631E-23, + 1.0456894622831624E-20, + null, + 5.803973284253454E-20, + -8141787905188892123, + true, + -4735305442504973382, + 9.513150514479281E-20, + "7넳$螔忷㶪}䪪l짴\u0007鹁P鰚HF銏ZJﳴ/⍎1ᷓ忉睇ᜋ쓈x뵠m䷐窥Ꮤ^\u0019ᶌ偭#ヂt☆၃pᎍ臶䟱5$䰵&๵分숝]䝈뉍♂坎\u0011<>", + "C蒑貑藁lﰰ}X喇몛;t밿O7/᯹f\u0015kI嘦<ዴ㟮ᗎZ`GWퟩ瑹࡮ᅴB꿊칈??R校s脚", + { + "9珵戬+AU^洘拻ቒy柭床'粙XG鞕᠜繀伪%]hC,$輙?Ut乖Qm떚W8઼}~q⠪rU䤶CQ痗ig@#≲t샌f㈥酧l;y闥ZH斦e⸬]j⸗?ঢ拻퀆滌": null, + "畯}㧢J罚帐VX㨑>1ꢶkT⿄蘥㝑o|<嗸層沈挄GEOM@-䞚䧰$만峬輏䠱V✩5宸-揂D'㗪yP掶7b⠟J㕻SfP?d}v㼂Ꮕ'猘": { + "陓y잀v>╪": null, + "鬿L+7:됑Y=焠U;킻䯌잫!韎ஔ\f": { + "駫WmGጶ": { + "\\~m6狩K": -2586304199791962143, + "ႜࠀ%͑l⿅D.瑢Dk%0紪dḨTI픸%뗜☓s榗኉\"?V籄7w髄♲쟗翛歂E䤓皹t ?)ᄟ鬲鐜6C": { + "_췤a圷1\u000eB-XOy缿請∎$`쳌eZ~杁튻/蜞`塣৙\"⪰\"沒l}蕌\\롃荫氌.望wZ|o!)Hn獝qg}": null, + "kOSܧ䖨钨:಼鉝ꭝO醧S`십`ꓭ쭁ﯢN&Et㺪馻㍢ⅳ㢺崡ຊ蜚锫\\%ahx켨|ż劻ꎄ㢄쐟A躊᰹p譞綨Ir쿯\u0016ﵚOd럂*僨郀N*b㕷63z": { + ":L5r+T㡲": [{ + "VK泓돲ᮙRy㓤➙Ⱗ38oi}LJቨ7Ó㹡৘*q)1豢⛃e᫛뙪壥镇枝7G藯g㨛oI䄽 孂L缊ꋕ'EN`": -2148138481412096818, + "`⛝ᘑ$(खꊲ⤖ᄁꤒ䦦3=)]Y㢌跨NĴ驳줟秠++d孳>8ᎊ떩EꡣSv룃 쯫أ?#E|᭙㎐?zv:5祉^⋑V": [ + -1.4691944435285607E-19, + 3.4128661569395795E17, + "㐃촗^G9佭龶n募8R厞eEw⺡_ㆱ%⼨D뉄퉠2ꩵᛅⳍ搿L팹Lවn=\"慉념ᛮy>!`g!풲晴[/;?[v겁軇}⤳⤁핏∌T㽲R홓遉㓥", + "愰_⮹T䓒妒閤둥?0aB@㈧g焻-#~跬x<\/舁P݄ꐡ=\\׳P\u0015jᳪᢁq;㯏l%᭗;砢觨▝,謁ꍰGy?躤O黩퍋Y㒝a擯\n7覌똟_䔡]fJ晋IAS", + 4367930106786121250, + -4.9421193149720582E17, + null, + { + ";ᄌ똾柉곟ⰺKpፇ䱻ฺ䖝{o~h!eꁿ઻욄ښ\u0002y?xUd\u207c悜ꌭ": [ + 1.6010824122815255E-19, + [ + "宨︩9앉檥pr쇷?WxLb", + "氇9】J玚\u000f옛呲~ 輠1D嬛,*mW3?n휂糊γ虻*ᴫ꾠?q凐趗Ko↦GT铮", + "㶢ថmO㍔k'诔栀Z蛟}GZ钹D", + false, + -6.366995517736813E-20, + -4894479530745302899, + null, + "V%᫡II璅䅛䓎풹ﱢ/pU9se되뛞x梔~C)䨧䩻蜺(g㘚R?/Ự[忓C뾠ࢤc왈邠买?嫥挤풜隊枕", + ",v碍喔㌲쟚蔚톬៓ꭶ", + 3.9625444752577524E-19, + null, + [ + "kO8란뿒䱕馔b臻⍟隨\"㜮鲣Yq5m퐔K#ꢘug㼈ᝦ=P^6탲@䧔%$CqSw铜랊0&m⟭<\/a逎ym\u0013vᯗ": true, + "洫`|XN뤮\u0018詞=紩鴘_sX)㯅鿻Ố싹": 7.168252736947373E-20, + "ꛊ饤ﴏ袁(逊+~⽫얢鈮艬O힉7D筗S곯w操I斞᠈븘蓷x": [[[[ + -7.3136069426336952E18, + -2.13572396712722688E18, + { + "硢3㇩R:o칢行E<=\u0018ၬYuH!\u00044U%卝炼2>\u001eSi$⓷ꒈ'렢gᙫ番ꯒ㛹럥嶀澈v;葷鄕x蓎\\惩+稘UEᖸﳊ㊈壋N嫿⏾挎,袯苷ኢ\\x|3c": 7540762493381776411, + "?!*^ᢏ窯?\u0001ڔꙃw虜돳FgJ?&⨫*uo籤:?}ꃹ=ٴ惨瓜Z媊@ત戹㔏똩Ԛ耦Wt轁\\枒^\\ꩵ}}}ꀣD\\]6M_⌫)H豣:36섘㑜": { + ";홗ᰰU஋㙛`D왔ཿЃS회爁\u001b-㢈`봆?盂㛣듿ᦾ蒽_AD~EEຆ㊋(eNwk=Rɠ峭q\"5Ἠ婾^>'ls\n8QAK)- Q䲌mo펹L_칍樖庫9꩝쪹ᘹ䑖瀍aK ?*趤f뭓廝p=磕", + "哑z懅ᤏ-ꍹux쀭", + [ + true, + 3998739591332339511, + "ጻ㙙?᳸aK<\/囩U`B3袗ﱱ?\"/k鏔䍧2l@쿎VZ쨎/6ꃭ脥|B?31+on颼-ꮧ,O嫚m ࡭`KH葦:粘i]aSU쓙$쐂f+詛頖b", + [{"^<9<箝&絡;%i﫡2攑紴\\켉h쓙-柂䚝ven\u20f7浯-Ꮏ\r^훁䓚헬\u000e?\\ㅡֺJ떷VOt": [{ + "-௄卶k㘆혐஽y⎱㢬sS઄+^瞥h;ᾷj;抭\u0003밫f<\/5Ⱗ裏_朻%*[-撵䷮彈-芈": { + "㩩p3篊G|宮hz䑊o곥j^Co0": [ + 653239109285256503, + {"궲?|\":N1ۿ氃NZ#깩:쇡o8킗ࡊ[\"됸Po핇1(6鰏$膓}⽐*)渽J'DN<썙긘毦끲Ys칖": { + "2Pr?Xjㆠ?搮/?㓦柖馃5뚣Nᦼ|铢r衴㩖\"甝湗ܝ憍": "\"뾯i띇筝牻$珲/4ka $匝휴译zbAᩁꇸ瑅&뵲衯ꎀᆿ7@ꈋ'ᶨH@ᠴl+", + "7뢽뚐v?4^ꊥ_⪛.>pởr渲<\/⢕疻c\"g䇘vU剺dஔ鮥꒚(dv祴X⼹\\a8y5坆": true, + "o뼄B욞羁hr﷔폘뒚⿛U5pꪴfg!6\\\"爑쏍䢱W<ﶕ\\텣珇oI/BK뺡'谑♟[Ut븷亮g(\"t⡎有?ꬊ躺翁艩nl F⤿蠜": 1695826030502619742, + "ۊ깖>ࡹ햹^ⵕ쌾BnN〳2C䌕tʬ]찠?ݾ2饺蹳ぶꌭ訍\"◹ᬁD鯎4e滨T輀ﵣ੃3\u20f3킙D瘮g\\擦+泙ၧ 鬹ﯨַ肋7놷郟lP冝{ߒhড়r5,꓋": null, + "ΉN$y{}2\\N﹯ⱙK'8ɜͣwt,.钟廣䎘ꆚk媄_": null, + "䎥eᾆᝦ읉,Jުn岪㥐s搖謽䚔5t㯏㰳㱊ZhD䃭f絕s鋡篟a`Q鬃┦鸳n_靂(E4迠_觅뷝_宪D(NL疶hL追V熑%]v肫=惂!㇫5⬒\u001f喺4랪옑": { + "2a輍85먙R㮧㚪Sm}E2yꆣꫨrRym㐱膶ᔨ\\t綾A☰.焄뙗9<쫷챻䒵셴᭛䮜.<\/慌꽒9叻Ok䰊Z㥪幸k": [ + null, + true, + {"쌞쐍": { + "▟GL K2i뛱iQ\"̠.옛1X$}涺]靎懠ڦ늷?tf灟ݞゟ{": 1.227740268699265E-19, + "꒶]퓚%ฬK❅": [{ + "(ෛ@Ǯっ䧼䵤[aテൖvEnAdU렖뗈@볓yꈪ,mԴ|꟢캁(而첸죕CX4Y믅": "2⯩㳿ꢚ훀~迯?᪑\\啚;4X\u20c2襏B箹)俣eỻw䇄", + "75༂f詳䅫ꐧ鏿 }3\u20b5'∓䝱虀f菼Iq鈆﨤g퍩)BFa왢d0뮪痮M鋡nw∵謊;ꝧf美箈ḋ*\u001c`퇚퐋䳫$!V#N㹲抗ⱉ珎(V嵟鬒_b㳅\u0019": null, + "e_m@(i㜀3ꦗ䕯䭰Oc+-련0뭦⢹苿蟰ꂏSV䰭勢덥.ྈ爑Vd,ᕥ=퀍)vz뱊ꈊB_6듯\"?{㒲&㵞뵫疝돡믈%Qw限,?\r枮\"? N~癃ruࡗdn&": null, + "㉹&'Pfs䑜공j<\/?|8oc᧨L7\\pXᭁ 9᪘": -2.423073789014103E18, + "䝄瑄䢸穊f盈᥸,B뾧푗횵B1쟢f\u001f凄": "魖⚝2儉j꼂긾껢嗎0ࢇ纬xI4](੓`蕞;픬\fC\"斒\")2櫷I﹥迧", + "ퟯ詔x悝령+T?Bg⥄섅kOeQ큼㻴*{E靼6氿L缋\u001c둌๶-㥂2==-츫I즃㠐Lg踞ꙂEG貨鞠\"\u0014d'.缗gI-lIb䋱ᎂDy缦?": null, + "紝M㦁犿w浴詟棓쵫G:䜁?V2ힽ7N*n&㖊Nd-'ຊ?-樹DIv⊜)g䑜9뉂ㄹ푍阉~ꅐ쵃#R^\u000bB䌎䦾]p.䀳": [{"ϒ爛\"ꄱ︗竒G䃓-ま帳あ.j)qgu扐徣ਁZ鼗A9A鸦甈!k蔁喙:3T%&㠘+,䷞|챽v䚞문H<\/醯r셓㶾\\a볜卺zE䝷_죤ဵ뿰᎟CB": [ + 6233512720017661219, + null, + -1638543730522713294, + false, + -8901187771615024724, + [ + 3891351109509829590, + true, + false, + -1.03836679125188032E18, + { + "j랎:g曞ѕᘼ}链N", + -1.1103819473845426E-19, + true, + [ + true, + null, + -7.9091791735309888E17, + true, + {"}蔰鋈+ꐨ啵0?g*사%`J?*": [{ + "\"2wG?yn,癷BK\\龞䑞x?蠢": -3.7220345009853505E-19, + ";饹়❀)皋`噿焒j(3⿏w>偍5X薙婏聿3aFÆÝ": "2,ꓴg?_섦_>Y쪥션钺;=趘F~?D㨫\bX?㹤+>/믟kᠪ멅쬂Uzỵ]$珧`m雁瑊ඖ鯬cꙉ梢f묛bB", + "♽n$YjKiXX*GO贩鏃豮祴遞K醞眡}ꗨv嵎꼷0୸+M菋eH徸J꣆:⼐悥B켽迚㯃b諂\u000bjꠜ碱逮m8": [ + "푷᣺ﻯd8ﱖ嬇ភH鹎⡱᱅0g:果6$GQ췎{vᷧYy-脕x偹砡館⮸C蓼ꏚ=軄H犠G谖ES詤Z蠂3l봟hᅭ7䦹1GPQG癸숟~[#駥8zQ뛣J소obg,", + null, + 1513751096373485652, + null, + -6.851466660824754E-19, + {"䩂-⴮2ٰK솖풄꾚ႻP앳1H鷛wmR䗂皎칄?醜<\/&ࠧ㬍X濬䵈K`vJ륒Q/IC묛!;$vϑ": { + "@-ꚗxྐྵ@m瘬\u0010U絨ﮌ驐\\켑寛넆T=tQ㭤L연@脸삯e-:⩼u㎳VQ㋱襗ຓ<Ⅶ䌸cML3+\u001e_C)r\\9+Jn\\Pﺔ8蠱檾萅Pq鐳话T䄐I": -1.80683891195530061E18, + "ᷭዻU~ཷsgSJ`᪅'%㖔n5픆桪砳峣3獮枾䌷⊰呀": { + "Ş੉䓰邟自~X耤pl7间懑徛s첦5ਕXexh⬖鎥᐀nNr(J컗|ૃF\"Q겮葲놔엞^겄+㈆话〾희紐G'E?飕1f❼텬悚泬먐U睬훶Qs": false, + "(\u20dag8큽튣>^Y{뤋.袊䂓;_g]S\u202a꽬L;^'#땏bႌ?C緡<䝲䲝断ꏏ6\u001asD7IK5Wxo8\u0006p弊⼂ꯍ扵\u0003`뵂픋%ꄰ⫙됶l囏尛+䗅E쟇\\": [ + true, + { + "\n鱿aK㝡␒㼙2촹f;`쾏qIࡔG}㝷䐍瓰w늮*粅9뒪ㄊCj倡翑閳R渚MiUO~仨䜶RꙀA僈㉋⦋n{㖥0딿벑逦⥻0h薓쯴Ꝼ": [ + 5188716534221998369, + 2579413015347802508, + 9.010794400256652E-21, + -6.5327297761238093E17, + 1.11635352494065523E18, + -6656281618760253655, + { + "": ")?", + "TWKLꑙ裑꺔UE俸塑炌Ũ᜕-o\"徚#": {"M/癟6!oI51ni퐚=댡>xꍨ\u0004 ?": { + "皭": {"⢫䋖>u%w잼<䕏꘍P䋵$魋拝U䮎緧皇Y훂&|羋ꋕ잿cJ䨈跓齳5\u001a삱籷I꿾뤔S8㌷繖_Yឯ䲱B턼O歵F\\l醴o_欬6籏=D": [ + false, + true, + {"Mt|ꏞD|F궣MQ뵕T,띺k+?㍵i": [ + 7828094884540988137, + false, + { + "!༦鯠,&aﳑ>[euJꏽ綷搐B.h": -7648546591767075632, + "-n켧嘰{7挐毄Y,>❏螵煫乌pv醑Q嶚!|⌝責0왾덢ꏅ蛨S\\)竰'舓Q}A釡5#v": 3344849660672723988, + "8閪麁V=鈢1녈幬6棉⪮둌\u207d᚛驉ꛃ'r䆉惏ै|bἧﺢᒙ<=穊强s혧eꮿ慩⌡ \\槳W븧J檀C,ᘉ의0俯퀉M;筷ࣴ瓿{늊埂鄧_4揸Nn阼Jੵ˥(社": true, + "o뼀vw)4A뢵(a䵢)p姃뛸\u000fK#KiQp\u0005ꅍ芅쏅": null, + "砥$ꥸ┇耽u斮Gc{z빔깎밇\\숰\u001e괷各㶇쵿_ᴄ+h穢p촀Ნ䃬z䝁酳ӂ31xꔄ1_砚W렘G#2葊P ": [ + -3709692921720865059, + null, + [ + 6669892810652602379, + -135535375466621127, + "뎴iO}Z? 馢녱稹ᄾ䐩rSt帤넆&7i騏멗畖9誧鄜'w{Ͻ^2窭외b㑎粖i矪ꦨ탪跣)KEㆹ\u0015V8[W?⽉>'kc$䨘ᮛ뉻٬M5", + 1.10439588726055846E18, + false, + -4349729830749729097, + null, + [ + false, + "_蠢㠝^䟪/D녒㡋ỎC䒈판\u0006એq@O펢%;鹐쏌o戥~A[ꡉ濽ỳ&虃᩾荣唙藍茨Ig楡꒻M窓冉?", + true, + 2.17220752996421728E17, + -5079714907315156164, + -9.960375974658589E-20, + "ᾎ戞༒", + true, + false, + [[ + "ⶉᖌX⧕홇)g엃⹪x뚐癟\u0002", + -5185853871623955469, + { + "L㜤9ợㇶK鐰⋓V뽋˖!斫as|9"፬䆪?7胜&n薑~": -2.11545634977136992E17, + "O8뀩D}캖q萂6༣㏗䈓煮吽ਆᎼDᣘ폛;": false, + "YTᡅ^L㗎cbY$pᣞ縿#fh!ꘂb삵玊颟샞ဢ$䁗鼒몁~rkH^:닮먖츸륈⪺쒉砉?㙓扫㆕꣒`R䢱B酂?C뇞<5Iޚ讳騕S瞦z": null, + "\\RB?`mG댵鉡幐物䵎有5*e骄T㌓ᛪ琾駒Ku\u001a[柆jUq8⋈5鿋츿myﻗ?雍ux঴?": 5828963951918205428, + "n0晅:黯 xu씪^퓞cB㎊ᬍ⺘٤փ~B岚3㥕擄vᲂ~F?C䶖@$m~忔S왖㲚?챴⊟W#벌{'㰝I䝠縁s樘\\X뢻9핡I6菍ㄛ8쯶]wॽ0L\"q": null, + "x增줖j⦦t䏢᎙㛿Yf鼘~꫓恄4惊\u209c": "oOhbᤃ᛽z&Bi犑\\3B㩬劇䄑oŁ쨅孥멁ຖacA㖫借㞝vg싰샂㐜#譞⢤@k]鋰嘘䜾L熶塥_<\/⍾屈ﮊ_mY菹t뙺}Ox=w鮮4S1ꐩמּ'巑", + "㗓蟵ꂾe蠅匳(JP䗏෸\u0089耀왲": [{ + "ᤃ㵥韎뤽\r?挥O쯡⇔㞚3伖\u0005P⋪\"D궣QLn(⚘罩䩢Ŏv䤘尗뼤됛O淽鋋闚r崩a{4箙{煷m6〈": { + "l곺1L": { + "T'ਤ?砅|੬Km]䄩\"(࿶<\/6U爢䫈倔郴l2㴱^줣k'L浖L鰄Rp今鎗⒗C얨M훁㡧ΘX粜뫈N꤇輊㌻켑#㮮샶-䍗룲蠝癜㱐V>=\\I尬癤t=": 7648082845323511446, + "鋞EP:<\/_`ၧe混ㇹBd⯢㮂驋\\q碽饩跓྿ᴜ+j箿렏㗑yK毢宸p謹h䦹乕U媣\\炤": [[ + "3", + [ + true, + 3.4058271399411134E-20, + true, + "揀+憱f逮@먻BpW曉\u001a㣐⎊$n劈D枤㡞좾\u001aᛁ苔౩闝1B䷒Ṋ݋➐ꀞꐃ磍$t੤_:蘺⮼(#N", + 697483894874368636, + [ + "vᘯ锴)0訶}䳅⩚0O壱韈ߜ\u0018*U鍾䏖=䧉뽑单휻ID쿇嘗?ꌸῬ07", + -5.4858784319382006E18, + 7.5467775182251151E18, + -8911128589670029195, + -7531052386005780140, + null, + [ + null, + true, + [[{ + "1欯twG<\/Q:0怯押殃탷聫사<ỗꕧ蚨䡁nDꌕ\u001c녬~蓩鲃g儊>ꏡl㻿/⑷*챳6㻜W毤緛ﹺᨪ4\u0013뺚J髬e3쳸䘦伧?恪&{L掾p+꬜M䏊d娘6": { + "2p첼양棜h䜢﮶aQ*c扦v︥뮓kC寵횂S銩&ǝ{O*य़iH`U큅ࡓr䩕5ꄸ?`\\᧫?ᮼ?t〟崾훈k薐ì/iy꤃뵰z1<\/AQ#뿩8jJ1z@u䕥": 1.82135747285215155E18, + "ZdN &=d년ᅆ'쑏ⅉ:烋5&៏ᄂ汎来L㯄固{钧u\\㊏튚e摑&t嗄ꖄUb❌?m䴘熚9EW": [{ + "ଛ{i*a(": -8.0314147546006822E17, + "⫾ꃆY\u000e+W`௸ \"M뒶+\\뷐lKE}(NT킶Yj選篒쁶'jNQ硾(똡\\\"逌ⴍy? IRꜘ὞鄬﨧:M\\f⠋Cꚜ쫊ᚴNV^D䕗ㅖἔIao꿬C⍏8": [ + 287156137829026547, + { + "H丞N逕⯲": {"": { + "7-;枮阕梒9ᑄZ": [[[[ + null, + { + "": [[[[ + -7.365909561486078E-19, + 2948694324944243408, + null, + [ + true, + "荒\"并孷䂡쵼9o䀘F\u0002龬7⮹Wz%厖/*? a*R枈㌦됾g뒠䤈q딄㺿$쮸tᶎ릑弣^鏎<\/Y鷇驜L鿽<\/춋9Mᲆឨ^<\/庲3'l낢", + "c鮦\u001b두\\~?眾ಢu݆綑෪蘛轋◜gȃ<\/ⴃcpkDt誩܅\"Y", + [[ + null, + null, + [ + 3113744396744005402, + true, + "v(y", + { + "AQ幆h쾜O+꺷铀ꛉ練A蚗⼺螔j㌍3꽂楎䥯뎸먩?": null, + "蠗渗iz鱖w]擪E": 1.2927828494783804E-17, + "튷|䀭n*曎b✿~杤U]Gz鄭kW|㴚#㟗ഠ8u擨": [[ + true, + null, + null, + {"⾪壯톽g7?㥜ώQꑐ㦀恃㧽伓\\*᧰閖樧뢇赸N휶䎈pI氇镊maᬠ탷#X?A+kНM ༑᩟؝?5꧎鰜ṚY즫궔 =ঈ;ﳈ?*s|켦蜌wM笙莔": [ + null, + -3808207793125626469, + [ + -469910450345251234, + 7852761921290328872, + -2.7979740127017492E18, + 1.4458504352519893E-20, + true, + "㽙깹?먏䆢:䴎ۻg殠JBTU⇞}ꄹꗣi#I뵣鉍r혯~脀쏃#釯:场:䔁>䰮o'㼽HZ擓௧nd", + [ + 974441101787238751, + null, + -2.1647718292441327E-19, + 1.03602824249831488E18, + [ + null, + 1.0311977941822604E-17, + false, + true, + { + "": -3.7019778830816707E18, + "E峾恆茍6xLIm縂0n2视֯J-ᤜz+ᨣ跐mYD豍繹⹺䊓몓ﴀE(@詮(!Y膽#᎙2䟓섣A䈀㟎,囪QbK插wcG湎ꤧtG엝x⥏俎j'A一ᯥ뛙6ㅑ鬀": 8999803005418087004, + "よ殳\\zD⧅%Y泥簳Uꈩ*wRL{3#3FYHା[d岀䉯T稉駅䞘礄P:闈W怏ElB㤍喬赔bG䠼U଄Nw鰯闀楈ePsDꥷ꭬⊊": [ + 6.77723657904486E-20, + null, + [ + "ཚ_뷎꾑蹝q'㾱ꂓ钚蘞慵렜떆`ⴹ⎼櫯]J?[t9Ⓢ !컶躔I᮸uz>3a㠕i,錃L$氰텰@7녫W㸮?羧W뇧ꃞ,N鋮숪2ɼ콏┍䁲6", + "&y?뢶=킕올Za惻HZk>c\u20b58i?ꦶcfBv잉ET9j䡡", + "im珊Ճb칧校\\뼾쯀", + 9.555715121193197E-20, + true, + { + "<㫚v6腓㨭e1㕔&&V∌ᗈT奄5Lጥ>탤?튣瑦㳆ꉰ!(ᙪ㿬擇_n쌯IMΉ㕨␰櫈ᱷ5풔蟹&L.첽e鰷쯃劼﫭b#ﭶ퓀7뷄Wr㢈๧Tʴશ㶑澕鍍%": -1810142373373748101, + "fg晌o?߲ꗄ;>C>?=鑰監侯Kt굅": true, + "䫡蓺ꑷ]C蒹㦘\"1ః@呫\u0014NL䏾eg呮፳,r$裢k>/\\?ㄤᇰﻛ쉕1஥'Ċ\" \\_?쨔\"ʾr: 9S䘏禺ᪧꄂ㲄", + [[{ + "*硙^+E쌺I1䀖ju?:⦈Ꞓl๴竣迃xKC/饉:\fl\"XTFᄄ蟭,芢<\/骡軺띜hꏘ\u001f銿<棔햳▨(궆*=乥b8\\媦䷀뫝}닶ꇭ(Kej䤑M": [{ + "1Ꮼ?>옿I╅C<ގ?ꊌ冉SV5A㢊㶆z-๎玶绢2F뵨@㉌뀌o嶔f9-庒茪珓뷳4": null, + ";lᰳ": "CbB+肻a䄷苝*/볳+/4fq=㰁h6瘉샴4铢Y骐.⌖@哼猎㦞+'gꋸ㒕ߤ㞑(䶒跲ti⑴a硂#No볔", + "t?/jE幸YHT셵⩎K!Eq糦ꗣv刴w\"l$ο:=6:移": { + "z]鑪醊嫗J-Xm銌翁絨c里됏炙Ep㣋鏣똼嚌䀓GP﹖cmf4鹭T䅿꣭姧␸wy6ꦶ;S&(}ᎧKxᾂQ|t뻳k\"d6\"|Ml췆hwLt꼼4$&8Պ褵婶鯀9": {"嵃닢ᒯ'd᧫䳳#NXe3-붋鸿ଢ떓%dK\u0013䲎ꖍYV.裸R⍉rR3蟛\\:젯:南ĺLʆ넕>|텩鴷矔ꋅⒹ{t孶㓑4_": [ + true, + null, + [ + false, + "l怨콈lᏒ", + { + "0w䲏嬧-:`䉅쉇漧\\܂yㄨb%㽄j7ᦶ涶<": 3.7899452730383747E-19, + "ꯛTẀq纤q嶏V⿣?\"g}ი艹(쥯B T騠I=仵및X": {"KX6颠+&ᅃ^f畒y[": { + "H?뱜^?꤂-⦲1a㋞&ꍃ精Ii᤾챪咽쬘唂쫷<땡劈훫놡o㥂\\ KⴙD秼F氮[{'좴:례晰Iq+I쭥_T綺砸GO煝䟪ᚪ`↹l羉q쐼D꽁ᜅ훦: vUV": true, + "u^yﳍ0㱓#[y뜌앸ꊬL㷩?蕶蘾⻍KӼ": -7931695755102841701, + "䤬轉車>\u001c鴵惋\"$쯃྆⇻n뽀G氠S坪]ಲꨍ捇Qxኻ椕駔\\9ࣼ﫻읜磡煮뺪ᶚ볝l㕆t+sζ": [[[ + true, + false, + [ + null, + 3363739578828074923, + true, + { + "\"鸣詩 볰㑵gL㯦῅춝旫}ED辗ﮈI쀤-ꧤ|㠦Z\"娑ᕸ4爏騍㣐\"]쳝Af]茛⬻싦o蚁k䢯䩐菽3廇喑ޅ": 4.5017999150704666E17, + "TYႇ7ʠ值4챳唤~Zo&ݛ": false, + "`塄J袛㭆끺㳀N㺣`꽐嶥KﯝSVᶔ∲퀠獾N딂X\"ᤏhNﬨvI": {"\u20bb㭘I䖵䰼?sw䂷쇪](泒f\"~;꼪Fԝsᝦ": {"p,'ꉂ軿=A蚶?bƉ㏵䅰諬'LYKL6B깯⋩겦뎙(ᜭ\u0006噣d꾆㗼Z;䄝䚔cd<情@䞂3苼㸲U{)<6&ꩻ钛\u001au〷N숨囖愙j=BXW욕^x芜堏Ῑ爂뛷꒻t✘Q\b": [[ + "籛&ଃ䩹.ꃩ㦔\\C颫#暪&!勹ꇶ놽攺J堬镙~軌C'꾖䣹㮅岃ᙴ鵣", + 4.317829988264744E15, + 6.013585322002147E-20, + false, + true, + null, + null, + -3.084633632357326E-20, + false, + null, + { + "\"짫愔昻 X\"藣j\"\"먁ཅѻ㘤㬯0晲DU꟒㸃d벀윒l䦾c੻*3": null, + "谈Wm陧阦咟ฯ歖擓N喴㋐銭rCCnVࢥ^♼Ⅾ젲씗刊S༝+_t赔\\b䚍뉨ꬫ6펛cL䊘᜼<\/澤pF懽&H": [ + null, + { + "W\"HDUuΌ퀟M'P4࿰H똆ⰱﮯ<\/凐蘲\"C鴫ﭒж}ꭩ쥾t5yd诪ﮡ퍉ⴰ@?氐醳rj4I6Qt": 6.9090159359219891E17, + "絛ﳛ⺂": {"諰P㗮聦`ZQ?ꫦh*റcb⧱}埌茥h{棩렛툽o3钛5鮁l7Q榛6_g)ὄ\u0013kj뤬^爖eO4Ⱈ槞鉨ͺ订%qX0T썗嫷$?\\\"봅늆'%": [ + -2.348150870600346E-19, + [[ + true, + -6619392047819511778, + false, + [[ + -1.2929189982356161E-20, + 1.7417192219309838E-19, + {"?嵲2࿐2\u0001啑㷳c縯": [ + null, + [ + false, + true, + 2578060295690793218, + { + "?\"殃呎#㑑F": true, + "}F炊_殛oU헢兔Ꝉ,赭9703.B数gTz3⏬": { + "5&t3,햓Mݸᵣ㴵;꣫䩍↳#@뫷䠅+W-ࣇzᓃ鿕ಔ梭?T䮑ꥬ旴]u뫵막bB讍:왳둛lEh=숾鱠p咐$짏#?g⹷ᗊv㷵.斈u頻\u0018-G.": "뽙m-ouࣤ஫牷\"`Ksꕞ筼3HlȨvC堈\"I]㖡玎r먞#'W賜鴇k'c룼髋䆿飉㗆xg巤9;芔cጐ/ax䊨♢큓r吓㸫೼䢗da᩾\"]屣`", + ":M딪<䢥喠\u0013㖅x9蕐㑂XO]f*Q呰瞊吭VP@9,㨣 D\\穎vˤƩs㜂-曱唅L걬/롬j㈹EB8g<\/섩o渀\"u0y&룣": ">氍緩L/䕑돯Ꟙ蕞^aB뒣+0jK⪄瑨痜LXK^힦1qK{淚t츔X:Vm{2r獁B뾄H첚7氥?쉟䨗ꠂv팳圎踁齀\\", + "D彤5㢷Gꪻ[lㄆ@὜⓰絳[ଃ獽쮹☒[*0ꑚ㜳": 9022717159376231865, + "ҖaV銣tW+$魿\u20c3亜~뫡ᙰ禿쨽㏡fṼzE/h": "5臐㋇Ჯ쮺? 昨탰Wム밎#'\"崲钅U?幫뺀⍾@4kh>騧\\0ҾEV=爐͌U捀%ꉼ 㮋<{j]{R>:gԩL\u001c瀈锌ﯲﳡꚒ'⫿E4暍㌗뵉X\"H᝜", + "ᱚגּ;s醒}犍SἿ㦣&{T$jkB\\\tḮ앾䤹o<避(tW": "vb⯽䴪䮢@|)", + "⥒퐁껉%惀뗌+녣迺顀q條g⚯i⤭룐M琹j̈́⽜A": -8385214638503106917, + "逨ꊶZ<\/W⫟솪㎮ᘇb?ꠔi\"H㧺x෷韒Xꫨฟ|]窽\u001a熑}Agn?Mᶖa9韲4$3Ỵ^=쏍煤ፐ돷2䣃%鷠/eQ9頸쥎", + 2398360204813891033, + false, + 3.2658897259932633E-19, + null, + "?ꚃ8Nn㞷幵d䲳䱲뀙ꪛQ瑓鎴]䩋-鰾捡䳡??掊", + false, + -1309779089385483661, + "ᦲxu_/yecR.6芏.ᜇ過 ~", + -5658779764160586501, + "쒌:曠=l썜䢜wk#s蕚\"互㮉m䉤~0듐䋙#G;h숄옥顇෤勹(C7㢅雚㐯L⠅VV簅<", + null, + -4.664877097240962E18, + -4.1931322262828017E18, + { + ",": { + "v㮟麑䄠뤵g{M띮.\u001bzt뢜뵡0Ǥ龍떟Ᾰ怷ϓRT@Lꀌ樂U㏠⾕e扉|bJg(뵒㠶唺~ꂿ(땉x⻫싉쁊;%0鎻V(o\f,N鏊%nk郼螺": -1.73631993428376141E18, + "쟧摑繮Q@Rᕾ㭚㾣4隅待㓎3蒟": [ + 4971487283312058201, + 8973067552274458613, + { + "`a揙ᣗ\u0015iBo¸": 4.3236479112537999E18, + "HW&퉡ぁ圍Y?瑡Qy훍q!帰敏s舠㫸zꚗaS歲v`G株巷Jp6킼 (귶鍔⾏⡈>M汐㞍ቴ꙲dv@i㳓ᇆ?黍": [ + null, + 4997607199327183467, + "E㻎蠫ᐾ高䙟蘬洼旾﫠텛㇛?'M$㣒蔸=A_亀绉앭rN帮", + null, + [{ + "Eᑞ)8餧A5u&㗾q?": [ + -1.969987519306507E-19, + null, + [ + 3.42437673373841E-20, + true, + "e걷M墁\"割P␛퍧厀R䱜3ﻴO퓫r﹉⹊", + [ + -8164221302779285367, + [ + true, + null, + "爘y^-?蘞Ⲽꪓa␅ꍨ}I", + 1.4645984996724427E-19, + [{ + "tY좗⧑mrzﺝ㿥ⴖ᥷j諅\u0000q賋譁Ꞅ⮱S\nࡣB/큃굪3Zɑ复o<\/;롋": null, + "彟h浠_|V4䦭Dᙣ♞u쿻=삮㍦\u001e哀鬌": [{"6횣楠,qʎꗇ鎆빙]㱭R굋鈌%栲j分僅ペ䇰w폦p蛃N溈ꡐꏀ?@(GI뉬$ﮄ9誁ꓚ2e甸ڋ[䁺,\u0011\u001cࢃ=\\+衪䷨ᯕ鬸K": [[ + "ㅩ拏鈩勥\u000etgWVXs陂規p狵w퓼{뮵_i\u0002ퟑႢ⬐d6鋫F~챿搟\u0096䚼1ۼ칥0꣯儏=鋷牋ⅈꍞ龐", + -7283717290969427831, + true, + [ + 4911644391234541055, + { + "I鈒첽P릜朸W徨觘-Hᎄ퐟⓺>8kr1{겵䍃〛ᬡ̨O귑o䝕'쿡鉕p5": "fv粖RN瞖蛐a?q꤄\u001d⸥}'ꣴ犿ꦼ?뤋?鵆쥴덋䡫s矷̄?ඣ/;괱絢oWfV<\/\u202cC,㖦0䑾%n賹g&T;|lj_欂N4w", + "짨䠗;䌕u i+r๏0": [{"9䥁\\఩8\"馇z䇔<\/ႡY3e狚쐡\"ุ6ﰆZ遖c\"Ll:ꮾ疣<\/᭙O◌납୕湞9⡳Und㫜\u0018^4pj1;䧐儂䗷ୗ>@e톬": { + "a⑂F鋻Q螰'<퇽Q贝瀧{ᘪ,cP&~䮃Z?gI彃": [ + -1.69158726118025933E18, + [ + "궂z簽㔛㮨瘥⤜䛖Gℤ逆Y⪾j08Sn昞ꘔ캻禀鴚P謦b{ꓮmN靐Mᥙ5\"睏2냑I\u0011.L&=?6ᄠ뻷X鸌t刑\"#z)o꫚n쳟줋", + null, + 7517598198523963704, + "ኑQp襟`uᩄr方]*F48ꔵn俺ሙ9뇒", + null, + null, + 6645782462773449868, + 1219168146640438184, + null, + { + ")ယ넌竀Sd䰾zq⫣⏌ʥ\u0010ΐ' |磪&p牢蔑mV蘸૰짬꺵;K": [ + -7.539062290108008E-20, + [ + true, + false, + null, + true, + 6574577753576444630, + [[ + 1.2760162530699766E-19, + [ + null, + [ + "顊\\憎zXB,", + [{ + "㇆{CVC9-MN㜋ઘR눽#{h@ퟨ!鼚׼XOvXS\u0017ᝣ=cS+梽៲綆16s덽휐y屬?ᇳG2ᴭ\u00054쫖y룇nKcW̭炦s/鰘ᬽ?J|퓀髣n勌\u0010홠P>j": false, + "箴": [ + false, + "鍞j\"ꮾ*엇칬瘫xṬ⭽쩁䃳\"-⋵?ᦽ댎Ĝ": true, + "Pg帯佃籛n㔠⭹࠳뷏≻࿟3㞱!-쒾!}쭪䃕!籿n涻J5ਲ਼yvy;Rኂ%ᔡጀ裃;M⣼)쵂쑈": 1.80447711803435366E18, + "ꈑC⡂ᑆ㤉壂뎃Xub<\/쀆༈憓ق쨐ק\\": [ + 7706977185172797197, + {"": {"K╥踮砆NWࡆFy韣7ä밥{|紒︧䃀榫rᩛꦡTSy잺iH8}ퟴ,M?Ʂ勺ᴹ@T@~꾂=I㙕뾰_涀쑜嫴曣8IY?ҿo줫fऒ}\\S\"ᦨ뵼#nDX": { + "♘k6?଱癫d68?㽚乳䬳-V顷\u0005蝕?\u0018䞊V{邾zじl]雏k臤~ൖH뒐iꢥ]g?.G碄懺䔛pR$䅒X觨l봜A刊8R梒',}u邩퉕?;91Ea䈈믁G⊶芔h袪&廣㺄j;㡏綽\u001bN頸쳘橆": -2272208444812560733, + "拑Wﵚj鵼駳Oࣿ)#㾅顂N傓纝y僱栜'Bꐍ-!KF*ꭇK¦?䈴^:啤wG逭w᧯": "xᣱmYe1ۏ@霄F$ě꧘푫O䤕퀐Pq52憬ꀜ兴㑗ᡚ?L鷝ퟐ뭐zJꑙ}╆ᅨJB]\"袌㺲u8䯆f", + "꿽၅㔂긱Ǧ?SI": -1669030251960539193, + "쇝ɨ`!葎>瞺瘡驷錶❤ﻮ酜=": -6961311505642101651, + "?f7♄꫄Jᡔ훮e읇퍾፣䭴KhखT;Qty}O\\|뫁IῒNe(5惁ꥶㆷY9ﮡ\\ oy⭖-䆩婁m#x봉>Y鈕E疣s驇↙ᙰm<": {"퉻:dꂁ&efᅫ쫢[\"돈늖꺙|Ô剐1͖-K:ʚ᭕/;쏖㷛]I痐职4gZ4⍜kเꛘZ⥺\\Bʫᇩ鄨魢弞&幟ᓮ2̊盜", + -9006004849098116748, + -3118404930403695681, + { + "_彃Y艘-\"Xx㤩㳷瑃?%2䐡鵛o귵옔夘v*탋职&㳈챗|O钧": [ + false, + "daꧺdᗹ羞쯧H㍤鄳頳<型孒ン냆㹀f4㹰\u000f|C*ሟ鰠(O<ꨭ峹ipຠ*y೧4VQ蔔hV淬{?ᵌEfrI_", + "j;ꗣ밷邍副]ᗓ", + -4299029053086432759, + -5610837526958786727, + [ + null, + [ + -1.3958390678662759E-19, + { + "lh좈T_믝Y\"伨\u001cꔌG爔겕ꫳ晚踍⿻읐T䯎]~e#฽燇\"5hٔ嶰`泯r;ᗜ쮪Q):/t筑,榄&5懶뎫狝(": [{ + "2ፁⓛ]r3C攟וּ9賵s⛔6'ஂ|\"ⵈ鶆䐹禝3\"痰ࢤ霏䵩옆䌀?栕r7O簂Isd?K᫜`^讶}z8?z얰T:X倫⨎ꑹ": -6731128077618251511, + "|︦僰~m漿햭\\Y1'Vvخ굇ቍ챢c趖": [null] + }], + "虌魿閆5⛔煊뎰㞤ᗴꥰF䮥蘦䂪樳-K᝷-(^\u20dd_": 2.11318679791770592E17 + } + ] + ] + ]}, + "묗E䀳㧯᳀逞GMc\b墹㓄끖Ơ&U??펌鑍 媋k))ᄊ": null, + "묥7콽벼諌J_DɯﮪM殴䣏,煚ྼ`Y:씧<\/⩫%yf䦀!1Ჶk춎Q米W∠WC跉鬽*ᛱi㴕L꘻ꀏ쓪\"_g鿄'#t⽙?,Wg㥖|D鑆e⥏쪸僬h鯔咼ඡ;4TK聎졠嫞" + } + ] + ] + } + ] + ] + ]}} + } + ]} + }, + "뿋뀾淣截䔲踀&XJ펖꙯^Xb訅ꫥgᬐ>棟S\"혧騾밫겁7-": "擹8C憎W\"쵮yR뢩浗絆䠣簿9䏈引Wcy䤶孖ꯥ;퐌]輩䍐3@{叝 뽸0ᡈ쵡Ⲇ\u001dL匁꧐2F~ݕ㪂@W^靽L襒ᦘ~沦zZ棸!꒲栬R" + } + ] + ], + "Z:덃൛5Iz찇䅄駠㭧蓡K1": "e8᧤좱U%?ⵇ䯿鿝\u0013縮R∱骒EO\u000fg?幤@֗퉙vU`", + "䐃쪈埽້=Ij,쭗쓇చ": false + }]}} + ] + } + ]} + } + ] + ] + ], + "咰긖VM]᝼6䓑쇎琺etDҌ?㞏ꩄ퇫밉gj8蠃\"⩐5䛹1ࣚ㵪": "ക蹊?⎲⧘⾚̀I#\"䈈⦞돷`wo窭戕෱휾䃼)앷嵃꾞稧,Ⴆ윧9S?೗EMk3Მ3+e{⹔Te驨7䵒?타Ulg悳o43" + } + ], + "zQᤚ纂땺6#ٽ﹧v￿#ࠫ휊冟蹧텈ꃊʆ?&a䥯De潝|쿓pt瓞㭻啹^盚2Ꝋf醪,얏T窧\\Di䕎谄nn父ꋊE": -2914269627845628872, + "䉩跐|㨻ᷢ㝉B{蓧瞸`I!℄욃힕#ೲᙾ竛ᔺCjk췒늕貭词\u0017署?W딚%(pꍁ⤼띳^=on뺲l䆼bzrﳨ[&j狸䠠=ᜑꦦ\u2061յnj=牲攑)M\\龏": false, + "뎕y絬᫡⥮Ϙᯑ㌔/NF*˓.,QEzvK!Iwz?|쥾\"ꩻL꼗Bꔧ賴緜s뉣隤茛>ロ?(?^`>冺飒=噸泥⺭Ᲊ婓鎔븜z^坷裮êⓅ໗jM7ﶕ找\\O": 1.376745434746303E-19 + }, + "䐛r滖w㏤,|Nዜ": false + } + ]], + "@꿙?薕尬 gd晆(띄5躕ﻫS蔺4)떒錸瓍?~": 1665108992286702624, + "w믍nᏠ=`঺ᅥC>'從됐槷䤝眷螄㎻揰扰XᅧC贽uჍ낟jKD03T!lDV쀉Ӊy뢖,袛!终캨G?鉮Q)⑗1쾅庅O4ꁉH7?d\u0010蠈줘월ސ粯Q!낇껉6텝|{": null, + "~˷jg쿤촖쉯y": -5.5527605669177098E18, + "펅Wᶺzꐆと푭e?4j仪열[D<鈑皶婆䵽ehS?袪;HꍨM뗎ば[(嗏M3q퍟g4y╸鰧茀[Bi盤~﫝唎鋆彺⦊q?B4쉓癚O洙킋툈䶯_?ퟲ": null + } + ] + ]] + ]], + "꟱Ԕ㍤7曁聯ಃ錐V䷰?v㪃૦~K\"$%请|ꇹn\"k䫛㏨鲨\u2023䄢\u0004[︊VJ?䶟ាꮈ䗱=깘U빩": -4863152493797013264 + } + ]}]} + ] + }}} + ], + "쏷쐲۹퉃~aE唙a챑,9㮹gLHd'䔏|킗㍞䎥&KZYT맵7䥺Nⱳ同莞鿧w\\༌疣n/+ꎥU\"封랾○ퟙAJᭌ?9䛝$?驔9讐짘魡T֯c藳`虉C읇쐦T" + } + ], + "谶개gTR￐>ၵ͚dt晑䉇陏滺}9㉸P漄": -3350307268584339381 + }] + ] + ] + ]] + ] + ], + "0y꟭馋X뱔瑇:䌚￐廿jg-懲鸭䷭垤㒬茭u賚찶ಽ+\\mT땱\u20821殑㐄J쩩䭛ꬿNS潔*d\\X,壠뒦e殟%LxG9:摸": 3737064585881894882, + "풵O^-⧧ⅶvѪ8廸鉵㈉ר↝Q㿴뺟EႳvNM:磇>w/៻唎뷭୥!냹D䯙i뵱貁C#⼉NH6`柴ʗ#\\!2䂗Ⱨf?諳.P덈-返I꘶6?8ꐘ": -8934657287877777844, + "溎-蘍寃i诖ര\"汵\"\ftl,?d⼡쾪⺋h匱[,෩I8MҧF{k瓿PA'橸ꩯ綷퉲翓": null + } + ] + ], + "ោ係؁<元": 1.7926963090826924E-18 + }}] + } + ] + ]]}] + }] + ] + ] + ] + ], + "ጩV<\"ڸsOᤘ": 2.0527167903723048E-19 + }] + ]} + ] + ]], + "∳㙰3젴p᧗䱙?`yZA8Ez0,^ᙛ4_0븢\u001ft:~䎼s.bb룦明yNP8弆C偯;⪾짍'蕴뮛": -6976654157771105701, + "큵ꦀ\\㇑:nv+뒤燻䀪ﴣ﷍9ᚈ኷K㚊誦撪䚛,ꮪxሲ쳊\u0005HSf?asg昱dqꬌVꙇ㼺'k*'㈈": -5.937042203633044E-20 + } + ] + }], + "?}\u20e0],s嶳菋@#2u쒴sQS䩗=ꥮ;烌,|ꘔ䘆": "ᅩ영N璠kZ먕眻?2ቲ芋眑D륟渂⸑ﴃIRE]啗`K'" + }}, + "쨀jmV賂ﰊ姐䂦玞㬙ᏪM᪟Վ씜~`uOn*ॠ8\u000ef6??\\@/?9見d筜ﳋB|S䝬葫㽁o": true + }, + "즛ꄤ酳艚␂㺘봿㎨iG৕ࡿ?1\"䘓您\u001fSኝ⺿溏zៀ뻤B\u0019?윐a䳵᭱䉺膷d:<\/": 3935553551038864272 + } + ] + ]} + ]] + ]] + ]} + } + ] + } + ]]}}, + "᥺3h↛!ꋰy\"攜(ெl䪕oUkc1A㘞ᡲ촾ᣫ<\/䒌E㛝潨i{v?W౾H\\RჅpz蝬R脾;v:碽✘↯삞鷱o㸧瑠jcmK7㶧뾥찲n": true, + "ⶸ?x䊺⬝-䰅≁!e쩆2ꎿ准G踌XXᩯ1߁}0?.헀Z馟;稄\baDꟹ{-寪⚈ꉷ鮸_L7ƽᾚ<\u001bጨA䧆송뇵⨔\\礍뗔d设룱㶉cq{HyぱR㥽吢ſtp": -7985372423148569301, + "緫#콮IB6<\/=5Eh礹\t8럭@饹韠r㰛斣$甝LV췐a갵'请o0g:^": "䔨(.", + "띳℡圤pン௄ĝ倧訜B쁟G䙔\"Sb⓮;$$▏S1J뢙SF|赡g*\"Vu䲌y": "䪈&틐),\\kT鬜1풥;뷴'Zေ䩹@J鞽NぼM?坥eWb6榀ƩZڮ淽⺞삳煳xჿ絯8eⶍ羷V}ჿ쎱䄫R뱃9Z>'\u20f1ⓕ䏜齮" + } + ] + ]]] + }} + } + ] + ]}, + "펮b.h粔폯2npX詫g錰鷇㇒<쐙S値bBi@?镬矉`剔}c2壧ଭfhY깨R()痩⺃a\\⍔?M&ﯟ<劜꺄멊ᄟA\"_=": null + }, + "~潹Rqn榢㆓aR鬨侅?䜑亡V_翅㭔(䓷w劸ၳDp䀅<\/ﰎ鶊m䵱팱긽ꆘ긓准D3掱;o:_ќ)껚콥8곤d矦8nP倥ꃸI": null, + "뾎/Q㣩㫸벯➡㠦◕挮a鶧⋓偼\u00001뱓fm覞n?㛅\"": 2.8515592202045408E17 + }], + ",": -5426918750465854828, + "2櫫@0柡g䢻/gꆑ6演&D稒肩Y?艘/놘p{f투`飷ᒉ챻돎<늛䘍ﴡ줰쫄": false, + "8(鸑嵀⵹ퟡ<9㣎Tߗ┘d슒ل蘯&㠦뮮eࠍk砝g 엻": false, + "d-\u208b?0ﳮ嵙'(J`蔿d^踅⤔榥\\J⵲v7": 6.8002426206715341E17, + "ཎ耰큓ꐕ㱷\u0013y=詽I\"盈xm{0쾽倻䉚ષso#鰑/8㸴짯%ꀄ떸b츟*\\鲷礬ZQ兩?np㋄椂榨kc᡹醅3": false, + "싊j20": false + }]] + ]], + "俛\u0017n緽Tu뫉蜍鼟烬.ꭠIⰓ\"Ἀ᜾uC쎆J@古%ꛍm뻨ᾀ画蛐휃T:錖㑸ዚ9죡$": true + } + ] + ], + "㍵⇘ꦖ辈s}㱮慀밒s`\"㞟j:`i픻Z섫^諎0Ok{켿歁෣胰a2﨤[탳뚬쎼嫭뉮m": 409440660915023105, + "w墄#*ᢄ峠밮jLa`ㆪ꺊漓Lで끎!Agk'ꁛ뢃㯐岬D#㒦": false, + "ଦPGI䕺L몥罭ꃑ궩﮶#⮈ᢓӢ䚬p7웼臧%~S菠␌힀6&t䳙y㪘냏\\*;鉏ᅧ鿵'嗕pa\"oL쇿꬈Cg": "㶽1灸D⟸䴅ᆤ뉎﷛渤csx 䝔цꬃ锚捬?ຽ+x~꘩uI࡞\u0007栲5呚ẓem?袝\")=㥴䨃pac!/揎Y", + "ᷱo\\||뎂몷r篙|#X䦜I#딌媸픕叞RD斳X4t⯩夬=[뭲r=绥jh뷱츝⪘%]⚋܈㖴スH텹m(WO曝劉0~K3c柢Ր㏉着逳~": false, + "煽_qb[첑\\륌wE❽ZtCNﭝ+餌ᕜOꛭ": "{ﳾ쉌&s惧ᭁⵆ3䢫;䨞팑꒪흘褀࢖Q䠿V5뭀䎂澻%받u5텸oA⮥U㎦;B䳌wz䕙$ឿ\\௅婺돵⪾퐆\\`Kyौꋟ._\u0006L챯l뇠Hi䧈偒5", + "艊佁ࣃ롇䱠爬!*;⨣捎慓q靓|儑ᨋL+迥=6㒺딉6弄3辅J-㕎뛄듘SG㆛(\noAzQꝱ䰩X*ぢO퀌%펠낌mo틮a^<\/F&_눊ᾉ㨦ы4\"8H": 2974648459619059400, + "鬙@뎣䫳ၮ끡?){y?5K;TA*k溱䫜J汃ꂯ싔썍\u001dA}룖(<\/^,": false, + "몏@QꋦFꊩᒐ뎶lXl垨4^郣|ꮇ;䝴ᝓ}쵲z珖": null + } + ]]]], + ":_=닧弗D䙋暨鏛. 㱻붘䂍J儒&ZK/녩䪜r囁⽯D喠죥7⹌䪥c\u001a\u2076￞妈朹oLk菮F౟覛쐧㮏7T;}蛙2{9\"崓bB<\/⡷룀;즮鿹)丒툃୤뷠5W⊢嶜(fb뭳갣": "E{响1WM" + }}, + "䘨tjJ驳豨?y輊M*᳑梵瞻઻ofQG瑮e": 2.222802939724948E-19, + "䮴=❑➶T෋w䞜\"垦ꃼUt\u001dx;B$뵣䙶E↌艣ᡥ!᧟;䱀[䔯k쬃`੍8饙른熏'2_'袻tGf蒭J땟as꯳╖&啒zWࡇᒫYSᏬ\u0014ℑ첥鈤|cG~Pᓮ\">\"": "ႆl\f7V儊㦬nHꄬꨧC{쐢~C⮃⛓嶦vꄎ1w鰠嘩뿠魄&\"_qMⵖ釔녮ꝇ 㝚{糍J哋 cv?-jkﻯྌ鹑L舟r", + "龧葆yB✱H盋夔ﶉ?n*0(": "ꧣኆ㢓氥qZZ酒ຜ)鮢樛)X䣆gTSґG텞k.J圬疝롫쯭z L:\\ྤ@w炋塜쿖ᾳy뢀䶃뱝N䥨㚔勇겁#p", + "도畎Q娡\"@S/뼋:䵏!P衅촚fVHQs✜ᐫi㻑殡B䜇%믚k*U#濨낄~": "ꍟዕ쳸ꍈ敋&l妏\u0005憡멗瘌uPgᅪm<\/To쯬锩h뒓k" + } + ] + }], + "墥홞r绚<\/⸹ⰃB}<躅\\Y;๑@䔸>韫䜲뱀X뗩鿥쩗SI%ﴞ㳕䛇?<\/\u00018x\\&侂9鋙a[LR㋭W胕)⡿8㞙0JF,}?허d1cDMᐃ␛鄝ⱕ%X)!XQ": "ⳍꗳ=橇a;3t⦾꼑仈ူaᚯ⯋ꕃAs鴷N⍕_䎃ꙎAz\u0016䯷\\<࿫>8q{}キ?ᣰ}'0ᴕ펓B┦lF#趤厃T?㕊#撹圂䆲" + }, + "܋닐龫論c웑": false, + "ㇿ/q\"6-co髨휝C큦#\u001b4~?3䐹E삇<<": 7.600917488140322E-20, + "䁝E6?㣖ꃁ间t祗*鑠{ḣV(浾h逇큞=W?ૉ?nꇽ8ꅉຉj으쮺@Ꚅ㰤u]Oyr": "v≁᫸_*όAඤԆl)ۓᦇQ}폠z༏q滚", + "ソ᥊/넺I": true + }]] + ] + ] + ] + ]] + }, + "䭑Ik攑\u0002QV烄:芩.麑㟴㘨≕": true, + "坄꿕C쇻풉~崍%碼\\8\"䬦꣙": null, + "欌L圬䅘Y8c(♺2?ON}o椳s宥2䉀eJ%闹r冁O^K諭%凞⺉⡻,掜?$ꥉ?略焕찳㯊艼誜4?\"﯎<゛XፈINT:詓 +": -1.0750456770694562E-19, + "獒àc뜭싼ﺳ뎤K`]p隨LtE": null, + "甙8䵊神EIꩤ鐯ᢀ,ﵮU䝑u疒ử驺䚿≚ഋ梶秓F`覤譐#짾蔀묊4<媍쬦靪_Yzgcࡶ4k紥`kc[Lﮗ簐*I瀑[⾰L殽鑥_mGȠ<\/|囹灠g桰iri": true, + "챓ꖙꟻ좝菇ou,嗠0\\jK핻뜠qwQ?ഩ㼕3Y彦b\u009bJ榶N棨f?됦鏖綃6鳵M[OE봨u햏.Ꮁ癜蟳뽲ꩌ뻾rM豈R嗀羫 uDꎚ%": null + }, + "V傜2<": 7175127699521359521 + }], + "铫aG切<\/\"ী⊆e<^g࢛)D顝nאַ饼\u008c猪繩嵿ﱚCꡬ㻊g엺A엦\u000f暿_f꿤볝㦕桦`蒦䎔j甬%岝rj 糏": "䚢偎눴Au<4箞7礦Iﱔ坠eȧ䪸u䵁p|逹$嗫쨘ꖾ﷐!胠z寓팢^㨔|u8Nሇe텔ꅦ抷]،鹎㳁#༔繁 ", + "낂乕ꃻ볨ϱ-ꇋ㖍fs⿫)zꜦ/K?솞♞ꑌ宭hJ᤭瑥Fu": false, + "쟰ぜ魛G\u0003u?`㾕ℾ㣭5螠烶這趩ꖢ:@咕ꐶx뒘느m䰨b痃렐0鳊喵熬딃$摉_~7*ⱦ녯1錾GKhJ惎秴6'H妈Tᧅ窹㺒疄矤铟wላ": null, + "쯆q4!3錕㲏ⵆ㇛꘷Z瑩뭆\\◪NH\u001d\\㽰U~㯶<\"쑣낞3ᵤ'峉eꢬ;鬹o꣒木X*長PXᘱu\"䠹n惞": null, + "ᅸ祊\"&ꥴCjࢼ﴿?䡉`U效5殼㮞V昽ꏪ#ﺸ\\&t6x꠹盥꣰a[\u001aꪍSpe鎿蠹": -1.1564713893659811E-19 + } + ]] + ] + ] + ], + "羵䥳H,6ⱎ겾|@t\"#햊1|稃 섭)띜=뻔ꡜ???櫎~*ῡ꫌/繣ﻠq": null + } + ]} + ]}, + "츤": false + }}, + "s": 3.7339341963399598E18 + } + ], + "N,I?1+㢓|ࣱ嶃쩥V2\u0012(4EE虪朶$|w颇v步": "~읢~_,Mzr㐫YB溓E淚\"ⅹ䈔ᏺ抙 b,nt5V㐒J檶ꏨ⻔?", + "Q껑ꡡ}$넎qH煔惍/ez^!ẳF댙䝌馻剁8": "梲;yt钰$i冄}AL%a j뜐奷걳뚾d꿽*ሬuDY3?뮟鼯뮟w㍪틱V", + "o{Q/K O胟㍏zUdꀐm&⨺J舕⾏魸訟㌥[T籨櫉唐킝 aṭ뱫촙莛>碶覆⧬짙쭰ׯdAiH໥벤퐥_恸[ 0e:죃TC弼荎뵁DA:w唵ꣁ": null, + "὏樎䵮軧|?౗aWH쩃1 ꅭsu": null + } + ] + }, + "勂\\&m鰈J釮=Ⲽ鳋+䂡郑": null, + "殣b綊倶5㥗惢⳷萢ᑀ䬄镧M^ﱴ3⣢翣n櫻1㨵}ኯ뗙顖Z.Q➷ꮨ뗇\u0004": "ꔙ䁼>n^[GीA䨟AM琢ᒊS쨲w?d㶣젊嘶纝麓+愣a%気ྞSc됓ᔘ:8bM7Xd8㶑臌]Ꙥ0ꐭ쒙䫣挵C薽Dfⵃ떼᷸", + "?紡.셪_෨j\u0013Ox┠$Xᶨ-ᅇo薹-}軫;y毝㪜K㣁?.EV쮱4둽⛻䤜'2盡\u001f60(|e쐰㼎ᦀ㒧-$l@ﻑ坳\u0003䭱响巗WFo5c㧆T턁Y맸♤(": -2.50917882560589088E17 + }} + ], + "侸\\릩.᳠뎠狣살cs项䭩畳H1s瀉븇19?.w骴崖㤊h痠볭㞳㞳䁮Ql怠㦵": "@䟴-=7f", + "鹟1x௢+d ;vi䭴FSDS\u0004hꎹ㚍?⒍⦏ў6u,扩@됷Su)Pag휛TᒗV痩!瞏釀ꖞ蘥&ೞ蘐ꭰꞇᝎ": "ah懱Ժ&\u20f7䵅♎඀䞧鿪굛ౕ湚粎蚵ᯋ幌YOE)५襦㊝Y*^\"R+ඈ咷蝶9ꥂ榨艦멎헦閝돶v좛咊E)K㓷ྭr", + "搆q쮦4綱켙셁.f4<\/g<籽늷?#蚴픘:fF\u00051㹉뀭.ᰖ풎f֦Hv蔎㧤.!䭽=鞽]음H:?\"-4": 8.740133984938656E-20 + }]} + } + ], + "tVKn딩꘥⊾蹓᤹{\u0003lR꼽ᄲQFᅏ傅ﱋ猢⤊ᔁ,E㓒秤nTතv`♛I\u0000]꫔ṞD\"麵c踝杰X&濿또꣹깳౥葂鿎\\aꡨ?": 3900062609292104525 + } + ], + "ਉ샒⊩Lu@S䧰^g": -1.1487677090371648E18, + "⎢k⑊꬗yᏫ7^err糎Dt\u000bJ礯확ㆍ沑サꋽe赔㝢^J\u0004笲㿋idra剰-᪉C錇/Ĝ䂾ညS지?~콮gR敉⬹'䧭": 1901472137232418266, + "灗k䶥:?촽贍쓉꓈㒸g獘[뵎\\胕?\u0014_榙p.j稶,$`糉妋0>Fᡰly㘽$?": "]ꙛO赎&#㠃돱剳\"<◆>0誉齐_|z|裵씪>ᐌ㼍\"Z[琕}O?G뚇諦cs⠜撺5cu痑U圲\u001c?鴴計l춥/╓哼䄗茏ꮅ뫈댽A돌롖뤫V窗讬sHd&\nOi;_u" + } + ], + "Uﺗ\\Y\\梷䄬~\u0002": null, + "k\"Y磓ᗔ휎@U冈<\/w컑)[": false, + "曏J蝷⌻덦\u001f㙳s꥓⍟邫P늮쥄c∬ྡྷ舆렮칤Z趣5콡넛A쳨\\뀙骫(棻.*&輛LiIfi{@EA婳KᬰTXT": -4.3088230431977587E17 + }]} + ] + ], + "곃㲧<\/dఓꂟs其ࡧ&N葶=?c㠤Ჴ'횠숄臼#\u001a~": false + } + ] + ]}] + }] + }} + ], + "2f`⽰E쵟>J笂裭!〛觬囀ۺ쟰#桊l鹛ⲋ|RA_Vx፭gE됓h﵀mfỐ|?juTU档[d⢼⺻p濚7E峿": 5613688852456817133 + }, + "濘끶g忮7㏵殬W팕Q曁 뫰)惃廊5%-蹚zYZ樭ﴷQ锘쯤崫gg": true, + "絥ᇑ⦏쒓븣爚H.㗊߄o蘵貆ꂚ(쎔O᥉ﮓ]姨Wꁓ!RMA|o퉢THx轮7M껁U즨'i뾘舯o": "跥f꜃?" + }} + ], + "鷰鹮K-9k;ﰰ?_ݦѷ-ꅣ䩨Zꥱ\"mꠟ屎/콑Y╘2&鸞脇㏢ꀇ࠺ⰼ拾喭틮L꽩bt俸墶 [l/웄\"꾦\u20d3iও-&+\u000fQ+໱뵞": -1.296494662286671E-19 + }, + "HX੹/⨇୕붷Uﮘ旧\\쾜͔3l鄈磣糂̖䟎Eᐳw橖b῀_딕hu葰窳闹вU颵|染H죶.fP䗮:j䫢\\b뎖i燕ꜚG⮠W-≚뉗l趕": "ଊ칭Oa᡺$IV㷧L\u0019脴셀붿餲햪$迳向쐯켂PqfT\" ?I屉鴼쿕@硙z^鏕㊵M}㚛T젣쓌-W⩐-g%⺵<뮱~빅╴瑿浂脬\u0005왦燲4Ⴭb|D堧 <\/oEQh", + "䘶#㥘੐캔f巋ἡAJ䢚쭈ࣨ뫒*mᇊK,ࣺAꑱ\u000bR<\/A\"1a6鵌㯀bh곿w(\"$ꘁ*rಐ趣.d࿩k/抶면䒎9W⊃9": "漩b挋Sw藎\u0000", + "畀e㨼mK꙼HglKb,\"'䤜": null + }]}] + ] + ] + }] + ]} + ] + ]} + ], + "歙>駿ꣂ숰Q`J΋方樛(d鱾뼣(뫖턭\u20f9lচ9歌8o]8윶l얶?镖G摄탗6폋폵+g:䱫홊<멀뀿/س|ꭺs걐跶稚W々c㫣⎖": "㣮蔊깚Cꓔ舊|XRf遻㆚︆'쾉췝\\&言", + "殭\"cށɨꝙ䞘:嬮e潽Y펪㳅/\"O@ࠗ겴]췖YǞ(t>R\"N?梳LD恭=n氯T豰2R諸#N}*灧4}㶊G䍣b얚": null, + "襞<\/啧 B|싞W瓇)6簭鼡艆lN쩝`|펭佡\\間邝[z릶&쭟愱ꅅ\\T᰽1鯯偐栈4̸s윜R7⒝/똽?치X": "⏊躖Cﱰ2Qẫ脐&இ?%냝悊", + ",鰧偵셣싹xᎹ힨᯳EṬH㹖9": -4604276727380542356 + } + } + ]]]], + "웺㚑xs}q䭵䪠馯8?LB犯zK'os䚛HZ\"L?셎s^㿧㴘Cv2": null + }] + ] + ] + ], + "Kd2Kv+|z": 7367845130646124107, + "ᦂⶨ?ᝢ 祂些ഷ牢㋇操\"腭䙾㖪\\(y4cE뽺ㆷ쫺ᔖ%zfۻ$ў1柦,㶢9r漢": -3.133230960444846E-20, + "琘M焀q%㢟f鸯O⣏蓑맕鯊$O噷|)z褫^㢦⠮ꚯ꫞`毕1qꢚ{ĭ䎀বώT\"뱘3G൴?^^of": null + } + ], + "a8V᯺?:ﺃ/8ꉿBq|9啓댚;*i2": null, + "cpT瀇H珰Ừpೃi鎪Rr␣숬-鹸ҩ䠚z脚цGoN8入y%趌I┽2ឪЀiJNcN)槣/▟6S숆牟\"箑X僛G殱娇葱T%杻:J諹昰qV쨰": 8331037591040855245 + }], + "G5ᩜ䄗巢껳": true + } + }, + "Ồ巢ゕ@_譙A`碫鄐㡥砄㠓(^K": "?܃B혢▦@犑ὺD~T⧁|醁;o=J牌9냚⢽㨘{4觍蚔9#$∺\u0016p囅\\3Xk阖⪚\"UzA穕롬✎➁㭒춺C㣌ဉ\"2瓑员ᅽꝶ뫍}꽚ꞇ鶂舟彺]ꍽJC蝧銉", + "␆Ě膝\"b-퉐ACR言J謈53~V튥x䜢?ꃽɄY뮩ꚜ": "K/↾e萃}]Bs⾿q룅鷦-膋?m+死^魊镲6", + "粡霦c枋AHퟁo礼Ke?qWcA趸㡔ꂏ?\u000e춂8iতᦜ婪\u0015㢼nﵿꍻ!ᐴ関\u001d5j㨻gfῩUK5Ju丝tかTI'?㓏t>⼟o a>i}ᰗ;뤕ܝ": false, + "ꄮ匴껢ꂰ涽+䜨B蛹H䛓-k蕞fu7kL谖,'涃V~챳逋穞cT\"vQ쓕ObaCRQ㓡Ⲯ?轭⫦輢墳?vA餽=h䮇킵n폲퉅喙?\"'1疬V嬗Qd灗'Lự": "6v!s믁㭟㣯獃!磸餠ቂh0C뿯봗F鷭gꖶ~コkK<ᦈTt\\跓w㭣횋钘ᆹ듡䑚W䟾X'ꅔ4FL勉Vܴ邨y)2'〚쭉⽵-鞣E,Q.?块", + "?(˧쩯@崟吋歄K": null + }, + "Gc럃녧>?2DYI鴿\\륨)澔0ᔬlx'觔7젘⤡縷螩%Sv׫묈/]↱&S h\u0006歋ᑛxi̘}ひY蔯_醨鯘煑橾8?䵎쨋z儬ꁏ*@츾:": null + } + } + } + ] + ] + ]} + }, + "HO츧G": 3.694949578823609E17, + "QC\u0012(翻曇Tf㷟bGBJ옉53\\嚇ᛎD/\u001b夾၉4\"핀@祎)쫆yD\"i먎Vn㿿V1W᨝䶀": -6150931500380982286, + "Z㓮P翸鍱鉼K䋞꘺튿⭁Y": -7704503411315138850, + "]모开ꬖP븣c霤<[3aΠ\"黁䖖䰑뮋ꤦ秽∼㑷冹T+YUt\"싳F↭䖏&鋌": -2.7231911483181824E18, + "tꎖ": -4.9517948741799555E-19, + "䋘즊.⬅IꬃۣQ챢ꄑ黐|f?C⾺|兕읯sC鬸섾整腨솷V": "旆柩l쪦sᖸMy㦅울썉瘗㎜檵9ꍂ駓ૉᚿ/u3씅徐拉[Z䞸ࡗ1ꆱ&Q풘?ǂ8\u0011BCDY2볨;鸏": null, + "幫 n煥s쁇펇 왊-$C\"衝:\u0014㣯舼.3뙗Yl⋇\"K迎멎[꽵s}9鉳UK8쐥\"掄㹖h㙈!얄સ?Ꜳ봺R伕UTD媚I䜘W鏨蔮": -4.150842714188901E-17, + "ﺯ^㄄\b죵@fྉkf颡팋Ꞧ{/Pm0V둳⻿/落韒ꊔᚬ@5螺G\\咸a谆⊪ቧ慷绖?财(鷇u錝F=r၍橢ឳn:^iᴵtD볠覅N赴": null + }] + }] + } + ] + ]} + ]}, + "謯?w厓奰T李헗聝ឍ貖o⪇弒L!캶$ᆅ": -4299324168507841322, + "뺊奉_垐浸延몏孄Z舰2i$q붿좾껇d▵餏\"v暜Ҭ섁m￴g>": -1.60911932510533427E18 + } + ] + } + ] + ]], + "퉝꺔㠦楶Pꅱ": 7517896876489142899, + "": false + } + ]}, + "是u&I狻餼|谖j\"7c됮sסּ-踳鉷`䣷쉄_A艣鳞凃*m⯾☦椿q㎭N溔铉tlㆈ^": 1.93547720203604352E18, + "kⲨ\\%vr#\u000bⒺY\\t<\/3﬌R訤='﹠8蝤Ꞵ렴曔r": false + } + ]}, + "阨{c?C\u001d~K?鎌Ԭ8烫#뙣P초遗t㭱E­돒䆺}甗[R*1!\\~h㕅᰺@<9JꏏષI䳖栭6綘걹ᅩM\"▯是∔v鬽顭⋊譬": "운ﶁK敂(欖C취پ℄爦賾" + } + }} + }], + "鷨赼鸙+\\䭣t圙ڹx᜾ČN<\/踘\"S_맶a鷺漇T彚⎲i㈥LT-xA캔$\u001cUH=a0츺l릦": "溣㣂0濕=鉵氬駘>Pꌢpb솇쬤h힊줎獪㪬CrQ矠a&脍꼬爼M茴/΅\u0017弝轼y#Ꞡc6둴=?R崏뷠麖w?" + }, + "閕ᘜ]CT)䵞l9z'xZF{:ؐI/躅匽졁:䟇AGF૸\u001cퟗ9)駬慟ꡒꆒRS״툋A<>\u0010\"ꂔ炃7g덚E৏bꅰ輤]o㱏_뷕ܘ暂\"u": "芢+U^+㢩^鱆8*1鈶鮀\u0002뺰9⬳ꪮlL䃣괟,G8\u20a8DF㉪錖0ㄤ瓶8Nଷd?眡GLc陓\\_죌V쁰ल二?c띦捱 \u0019JC\u0011b⤉zẒT볕\"绣蘨뚋cꡉkI\u001e鳴", + "ꃣI'{6u^㡃#཰Kq4逹y൒䧠䵮!㱙/n??{L풓ZET㙠퍿X2᩟綳跠葿㚙w཮x캽扳B唕S|尾}촕%N?o䪨": null, + "ⰴFjෟ셈[\u0018辷px?椯\\1<ﲻ栘ᣁ봢憠뉴p": -5263694954586507640 + } + ] + ]] + ]} + ]}] + ] + ], + "?#癘82禩鋆ꊝty?&": -1.9419029518535086E-19 + } + ] + ] + ]} + ] + ] + ], + "훊榲.|῕戄&.㚏Zꛦ2\"䢥ሆ⤢fV_摕婔?≍Fji冀탆꜕i㏬_ẑKᅢ꫄蔻XWc|饡Siẘ^㲦?羡2ぴ1縁ᙅ?쐉Ou": false + }]] + ]}}}, + "慂뗄卓蓔ᐓ匐嚖/颹蘯/翻ㆼL?뇊,텵<\\獷ごCボ": null + }, + "p溉ᑟi짣z:䒤棇r^٫%G9缑r砌롧.물农g?0׼ሩ4ƸO㣥㯄쩞ጩ": null, + "껎繥YxK\"F젷쨹뤤1wq轫o?鱑뜀瘊?뎃h灑\\ꛣ}K峐^ኖ⤐林ꉓhy": null + } + ], + "᱀n肓ㄛ\"堻2>m殮'1橌%Ꞵ군=Ӳ鯨9耛<\/n據0u彘8㬇៩f᏿诙]嚊": "䋯쪦S럶匏ㅛ#)O`ሀX_鐪渲⛀㨻宅闩➈ꢙஶDR⪍" + }, + "tA썓龇 ⋥bj왎录r땽✒롰;羋^\\?툳*┎?썀ma䵳넅U䳆૘〹䆀LQ0\b疀U~u$M}(鵸g⳾i抦뛹?䤈땚검.鹆?ꩡtⶥGĒ;!ቹHS峻B츪켏f5≺": 2366175040075384032, + "전pJjleb]ួ": -7.5418493141528422E18, + "n.鎖ጲ\n?,$䪘": true + }, + "欈Ar㉣螵᪚茩?O)": null + }, + "쫸M#x}D秱欐K=侫们丐.KꕾxẠ\u001e㿯䣛F܍캗qq8꟞ṢFD훎⵳簕꭛^鳜\u205c٫~⑟~冫ऊ2쫰<\/戲윱o<\"": true + }, + "㷝聥/T뱂\u0010锕|内䞇x侁≦㭖:M?iM᣿IJe煜dG࣯尃⚩gPt*辂.{磼럾䝪@a\\袛?}ᓺB珼": true + } + } + ]]}]}}, + "tn\"6ꫤ샾䄄;銞^%VBPwu묪`Y僑N.↺Ws?3C⤻9唩S䠮ᐴm;sᇷ냞඘B/;툥B?lB∤)G+O9m裢0kC햪䪤": -4.5941249382502277E18, + "ᚔt'\\愫?鵀@\\びꂕP큠<<]煹G-b!S?\nꖽ鼫,ݛ&頺y踦?E揆릱H}햧캡b@手.p탻>췽㣬ꒅ`qe佭P>ᓂ&?u}毚ᜉ蟶頳졪ᎏzl2wO": -2.53561440423275936E17 + }]} + } + ] + ]], + "潈촒⿂叡": 5495738871964062986 + } + ]] + } + ] + ]} + ]] + ]] + ]} + ] + ]}, + "ႁq킍蓅R`謈蟐ᦏ儂槐僻ﹶ9婌櫞釈~\"%匹躾ɢ뤥>࢟瀴愅?殕节/냔O✬H鲽엢?ᮈੁ⋧d␽㫐zCe*": 2.15062231586689536E17, + "㶵Ui曚珰鋪ᾼ臧P{䍏䷪쨑̟A뼿T渠誈䏚D1!잶<\/㡍7?)2l≣穷᛾稝{:;㡹nemיּ訊`G": null, + "䀕\"飕辭p圁f#뫆䶷뛮;⛴ᩍ3灚덏ᰝ쎓⦷詵%᜖Մfs⇫(\u001e~P|ﭗCⲾផv湟W첋(텪બT<บSꏉ੗⋲X婵i ӵ⇮?L䬇|ꈏ?졸": 1.548341247351782E-19 + } + ] + }, + "t;:N\u0015q鐦Rt缆{ꮐC?஛㷱敪\\+鲊㉫㓪몗릙竏(氵kYS": "XᰂT?൮ô", + "碕飦幑|+ 㚦鏶`镥ꁩ B<\/加륙": -4314053432419755959, + "秌孳(p!G?V傫%8ሽ8w;5鲗㦙LI檸\u2098": "zG N볞䆭鎍흘\\ONK3횙<\/樚立圌Q튅k쩎Ff쁋aׂJK銆ઘ즐狩6༥✙䩜篥CzP(聻駇HHퟲ讃%,ά{렍p而刲vy䦅ክ^톺M楒鍢㹳]Mdg2>䤉洞", + "踛M젧>忔芿㌜Zk": 2215369545966507819, + "씐A`$槭頰퍻^U覒\bG毲aᣴU;8!팲f꜇E⸃_卵{嫏羃X쀳C7뗮m(嚼u N܁谟D劯9]#": true, + "ﻩ!뵸-筚P᭛}ἰ履lPh?౮ⶹꆛ穉뎃g萑㑓溢CX뾇G㖬A錟]RKaꄘ]Yo+@䘁's섎襠$^홰}F": null + }, + "粘ꪒ4HXᕘ蹵.$區\r\u001d묁77pPc^y笲Q<\/ꖶ 訍䃍ᨕG?*": 1.73773035935040224E17 + }, + "婅拳?bkU;#D矠❴vVN쩆t㜷A풃갮娪a%鮏絪3dAv룒#tm쑬⌛qYwc4|L8KZ;xU⓭㳔밆拓EZ7襨eD|隰ऌ䧼u9Ԣ+]贴P荿": 2.9628516456987075E18 + }]}}] + ]} + }} + ]}] + ], + "|g翉F*湹̶\u0005⏐1脉̀eI쩓ᖂ㫱0碞l䴨ꑅ㵽7AtἈ턧yq䳥塑:z:遀ᄐX눔擉)`N3昛oQ셖y-ڨ⾶恢ꈵq^<\/": null, + "菹\\랓G^璬x৴뭸ゆUS겧﮷Bꮤ ┉銜᯻0%N7}~f洋坄Xꔼ<\/4妟Vꄟ9:౟곡t킅冩䧉笭裟炂4봋ⱳ叺怊t+怯涗\"0㖈Hq": false, + "졬믟'ﺇফ圪쓬멤m邸QLব䗁愍4jvs翙 ྍ꧀艳H-|": null, + "컮襱⣱뗠 R毪/鹙꾀%헳8&": -5770986448525107020 + } + ], + "B䔚bꐻ뙏姓展槰T-똌鷺tc灿᫽^㓟䏀o3o$꘭趙萬I顩)뇭Ἑ䓝\f@{ᣨ`x3蔛": null + } + ] + ] + }], + "⦖扚vWꃱ꥙㾠壢輓{-⎳鹷贏璿䜑bG倛⋐磎c皇皩7a~ﳫU╣Q࠭ꎉS摅姽OW.홌ೞ.": null, + "蚪eVlH献r}ᮏ믠ﰩꔄ@瑄ⲱ": null, + "퀭$JWoꩢg역쁍䖔㑺h&ୢtXX愰㱇?㾫I_6 OaB瑈q裿": null, + "꽦ﲼLyr纛Zdu珍B絟쬴糔?㕂짹䏵e": "ḱ\u2009cX9멀i䶛簆㳀k" + } + ]]]], + "(_ꏮg່澮?ᩑyM<艷\u001aꪽ\\庼뙭Z맷㰩Vm\\lY筺]3㋲2㌩㄀Eਟ䝵⨄쐨ᔟgङHn鐖⤇놋瓇Q탚單oY\"♆臾jHᶈ征ቄ??uㇰA?#1侓": null + }, + "觓^~ሢ&iI띆g륎ḱ캀.ᓡꀮ胙鈉": 1.0664523593012836E-19, + "y詭Gbᔶऽs댁U:杜⤎ϲ쁗⮼D醄诿q뙰I#즧v蔎xHᵿt᡽[**?崮耖p缫쿃L菝,봬ꤦC쯵#=X1瞻@OZc鱗CQTx": null + } + ] + }}], + "剘紁\u0004\\Xn⊠6,တױ;嵣崇}讃iႽ)d1\\䔓": null + }, + "脨z\"{X,1u찜<'k&@?1}Yn$\u0015Rd輲ーa쮂굄+B$l": true, + "諳>*쭮괐䵟Ґ+<箁}빀䅱⡔檏臒hIH脟ꩪC핝ଗP좕\"0i<\/C褻D۞恗+^5?'ꂱ䚫^7}㡠cq6\\쨪ꔞꥢ?纖䫀氮蒫侲빦敶q{A煲G": -6880961710038544266 + }}] + }, + "5s⨲JvಽῶꭂᄢI.a৊": null, + "?1q꽏쿻ꛋDR%U娝>DgN乭G": -1.2105047302732358E-19 + } + ] + ]}, + "qZz`撋뙹둣j碇쁏\\ꆥ\u0018@藴疰Wz)O{F䶛l᷂绘訥$]뮍夻䢋䩇萿獰樧猵⣭j萶q)$꬚⵷0馢W:Ⱍ!Qoe": -1666634370862219540, + "t": "=wp|~碎Q鬳Ӎ\\l-<\/^ﳊhn퐖}䍔t碵ḛ혷?靻䊗", + "邙쇡㯇%#=,E4勃驆V繚q[Y댻XV㡸[逹ᰏ葢B@u=JS5?bLRn얮㍉⏅ﰳ?a6[&큟!藈": 1.2722786745736667E-19 + }, + "X블땨4{ph鵋ꉯ웸 5p簂䦭s_E徔濧d稝~No穔噕뽲)뉈c5M윅>⚋[岦䲟懷恁?鎐꓆ฬ爋獠䜔s{\u001bm鐚儸煛%bﯿXT>ꗘ@8G": 1157841540507770724, + "媤娪Q杸\u0011SAyᡈ쿯": true, + "灚^ಸ%걁<\/蛯?\"祴坓\\\\'흍": -3.4614808555942579E18, + "釴U:O湛㴑䀣렑縓\ta)(j:숾却䗌gCiB뽬Oyuq輥厁/7)?今hY︺Q": null + } + ] + ]]]}] + ], + "I笔趠Ph!<ཛྷ㸞诘X$畉F\u0005笷菟.Esr릙!W☆䲖뗷莾뒭U\"䀸犜Uo3Gꯌx4r蔇᡹㧪쨢準<䂀%ࡡꟼ瑍8炝Xs0䀝销?fi쥱ꆝલBB": -8571484181158525797, + "L⦁o#J|\"⽩-㱢d㌛8d\\㶤傩儻E[Y熯)r噤὘勇 }": "e(濨쓌K䧚僒㘍蠤Vᛸ\"络QJL2,嬓왍伢㋒䴿考澰@(㏾`kX$끑эE斡,蜍&~y", + "vj.|统圪ᵮPL?2oŶ`밧\"勃+0ue%⿥绬췈체$6:qa렐Q;~晘3㙘鹑": true, + "ශؙ4獄⶿c︋i⚅:ん閝Ⳙ苆籦kw{䙞셕pC췃ꍬ␜꟯ꚓ酄b힝hwk꭭M鬋8B耳쑘WQ\\偙ac'唀x᪌\u2048*h짎#ፇ鮠뾏ឿ뀌": false, + "⎀jꄒ牺3Ⓝ컴~?親ꕽぼܓ喏瘘!@<튋㐌꿱⩦{a?Yv%⪧笯Uܱ栅E搚i뚬:ꄃx7䙳ꦋ&䓹vq☶I䁘ᾘ涜\\썉뺌Lr%Bc㍜3?ꝭ砿裞]": null, + "⭤뙓z(㡂%亳K䌽꫿AԾ岺㦦㼴輞낚Vꦴw냟鬓㹈뽈+o3譻K1잞": 2091209026076965894, + "ㇲ\t⋇轑ꠤ룫X긒\"zoY읇희wj梐쐑l侸`e%s": -9.9240075473576563E17, + "啸ꮑ㉰!ᚓ}銏": -4.0694813896301194E18, + ">]囋੽EK뇜>_ꀣ緳碖{쐐裔[<ನ\"䇅\"5L?#xTwv#罐\u0005래t应\\N?빗;": "v쮽瞭p뭃" + } + ]], + "斴槾?Z翁\"~慍弞ﻆ=꜡o5鐋dw\"?K蠡i샾ogDﲰ_C*⬟iㇷ4nય蟏[㟉U꽌娛苸 ঢ়操贻洞펻)쿗૊許X⨪VY츚Z䍾㶭~튃ᵦ<\/E臭tve猑x嚢": null, + "锡⛩<\/칥ꈙᬙ蝀&Ꚑ籬■865?_>L詏쿨䈌浿弥爫̫lj&zx<\/C쉾?覯n?": null, + "꾳鑤/꼩d=ᘈn挫ᑩ䰬ZC": "3錢爋6Ƹ䴗v⪿Wr益G韠[\u0010屗9쁡钁u?殢c䳀蓃樄욂NAq赟c튒瘁렶Aૡɚ捍" + } + ] + ] + ]} + ] + ] + }]]]}} + ]}], + "Ej䗳U<\/Q=灒샎䞦,堰頠@褙g_\u0003ꤾfⶽ?퇋!łB〙ד3CC䌴鈌U:뭔咎(Qો臃䡬荋BO7㢝䟸\"Yb": 2.36010731779814E-20, + "逸'0岔j\u000e눘먷翌C츊秦=ꭣ棭ှ;鳸=麱$XP⩉駚橄A\\좱⛌jqv䰞3Ь踌v㳆¹gT┌gvLB賖烡m?@E঳i": null + }, + "曺v찘ׁ?&绫O័": 9107241066550187880 + } + ] + ], + "(e屄\u0019昜훕琖b蓘ᬄ0/۲묇Z蘮ဏ⨏蛘胯뢃@㘉8ሪWᨮ⦬ᅳ䅴HI၇쨳z囕陻엣1赳o": true, + ",b刈Z,ၠ晐T솝ŕB⩆ou'퐼≃绗雗d譊": null, + "a唥KB\"ﳝ肕$u\n^⅄P䟼냉䞸⩪u윗瀱ꔨ#yşs꒬=1|ﲤ爢`t౐튼쳫_Az(Ṋ擬㦷좕耈6": 2099309172767331582, + "?㴸U<\/䢔ꯡ阽扆㐤q鐋?f㔫wM嬙-;UV죫嚔픞G&\"Cᗍ䪏풊Q": "VM7疹+陕枡툩窲}翡䖶8欞čsT뮐}璤:jﺋ鎴}HfA൝⧻Zd#Qu茅J髒皣Y-︴[?-~쉜v딏璮㹚䅊﩯<-#\u000e걀h\u0004u抱﵊㼃U<㱷⊱IC進" + }, + "숌dee節鏽邺p넱蹓+e罕U": true + } + ], + "b⧴룏??ᔠ3ぱ>%郿劃翐ꏬꠛW瞳᫏누躨狀ໄy੽\"ីuS=㨞馸k乆E": "トz݈^9R䬑<ﮛGRꨳ\u000fTT泠纷꽀MRᴱ纊:㠭볮?%N56%鈕1䗍䜁a䲗j陇=뿻偂衋࿘ᓸ?ᕵZ+<\/}H耢b䀁z^f$&㝒LkꢳI脚뙛u": 5.694374481577558E-20 + }] + } + ]], + "obj": {"key": "wrong value"}, + "퓲꽪m{㶩/뇿#⼢&᭙硞㪔E嚉c樱㬇1a綑᝖DḾ䝩": null + }, + "key": "6.908319653520691E8", + "z": { + "6U閆崬밺뀫颒myj츥휘:$薈mY햚#rz飏+玭V㭢뾿愴YꖚX亥ᮉ푊\u0006垡㐭룝\"厓ᔧḅ^Sqpv媫\"⤽걒\"˽Ἆ?ꇆ䬔未tv{DV鯀Tἆl凸g\\㈭ĭ즿UH㽤": null, + "b茤z\\.N": [[ + "ZL:ᅣዎ*Y|猫劁櫕荾Oj为1糕쪥泏S룂w࡛Ᏺ⸥蚙)", + { + "\"䬰ỐwD捾V`邀⠕VD㺝sH6[칑.:醥葹*뻵倻aD\"": true, + "e浱up蔽Cr෠JK軵xCʨ<뜡癙Y獩ケ齈X/螗唻?<蘡+뷄㩤쳖3偑犾&\\첊xz坍崦ݻ鍴\"嵥B3㰃詤豺嚼aqJ⑆∥韼@\u000b㢊\u0015L臯.샥": false, + "l?Ǩ喳e6㔡$M꼄I,(3᝝縢,䊀疅뉲B㴔傳䂴\u0088㮰钘ꜵ!ᅛ韽>": -5514085325291784739, + "o㮚?\"춛㵉<\/﬊ࠃ䃪䝣wp6ἀ䱄[s*S嬈貒pᛥ㰉'돀": [{ + "(QP윤懊FI<ꃣ『䕷[\"珒嶮?%Ḭ壍಻䇟0荤!藲끹bd浶tl\u2049#쯀@僞": {"i妾8홫": { + ",M맃䞛K5nAㆴVN㒊햬$n꩑&ꎝ椞阫?/ṏ세뉪1x쥼㻤㪙`\"$쟒薟B煌܀쨝ଢ଼2掳7㙟鴙X婢\u0002": "Vዉ菈᧷⦌kﮞఈnz*﷜FM\"荭7ꍀ-VR<\/';䁙E9$䩉\f @s?퍪o3^衴cඎ䧪aK鼟q䆨c{䳠5mᒲՙ蘹ᮩ": { + "F㲷JGo⯍P덵x뒳p䘧☔\"+ꨲ吿JfR㔹)4n紬G练Q፞!C|": true, + "p^㫮솎oc.೚A㤠??r\u000f)⾽⌲們M2.䴘䩳:⫭胃\\፾@Fᭌ\\K": false, + "蟌Tk愙潦伩": { + "a<\/@ᾛ慂侇瘎": -7271305752851720826, + "艓藬/>၄ṯ,XW~㲆w": {"E痧郶)㜓ha朗!N赻瞉駠uC\u20ad辠x퓮⣫P1ࠫLMMX'M刼唳됤": null, + "P쓫晥%k覛ዩIUᇸ滨:噐혲lMR5䋈V梗>%幽u頖\\)쟟": null, + "eg+昉~矠䧞难\b?gQ쭷筝\\eꮠNl{ಢ哭|]Mn銌╥zꖘzⱷ⭤ᮜ^": [ + -1.30142114406914976E17, + -1.7555215491128452E-19, + null, + "渾㨝ߏ牄귛r?돌?w[⚞ӻ~廩輫㼧/", + -4.5737191805302129E18, + null, + "xy࿑M[oc셒竓Ⓔx?뜓y䊦>-D켍(&&?XKkc꩖ﺸᏋ뵞K伕6ী)딀P朁yW揙?훻魢傎EG碸9類៌g踲C⟌aEX舲:z꒸许", + 3808159498143417627, + null, + {"m試\u20df1{G8&뚈h홯J<\/": { + "3ஸ厠zs#1K7:rᥞoꅔꯧ&띇鵼鞫6跜#赿5l'8{7㕳(b/j\"厢aq籀ꏚ\u0015厼稥": [ + -2226135764510113982, + true, + null, + { + "h%'맞S싅Hs&dl슾W0j鿏MםD놯L~S-㇡R쭬%": null, + "⟓咔謡칲\u0000孺ꛭx旑檉㶆?": null, + "恇I転;￸B2Y`z\\獓w,놏濐撐埵䂄)!䶢D=ഭ㴟jyY": { + "$ࡘt厛毣ൢI芁<겿骫⫦6tr惺a": [ + 6.385779736989334E-20, + false, + true, + true, + [ + -6.891946211462334E-19, + null, + { + "]-\\Ꟑ1/薓❧Ὂ\\l牑\u0007A郃)阜ᇒᓌ-塯`W峬G}SDb㬨Q臉⮻빌O鞟톴첂B㺱<ƈmu챑J㴹㷳픷Oㆩs": { + "\"◉B\"pᶉt骔J꩸ᄇᛐi╰栛K쉷㉯鐩!㈐n칍䟅難>盥y铿e୔蒏M貹ヅ8嘋퀯䉶ጥ㏢殊뻳\"絧╿ꉑ䠥?∃蓊{}㣣Gk긔H1哵峱": false, + "6.瀫cN䇮F㧺?\\椯=ڈT䘆4␘8qv": -3.5687501019676885E-19, + "Q?yऴr혴{஀䳘p惭f1ﹸ䅷䕋贲<ྃᄊ繲hq\\b|#QSTs1c-7(䵢\u2069匏絘ꯉ:l毴汞t戀oෟᵶ뮱፣-醇Jx䙬䐁햢0࣫ᡁgrㄛ": "\u0011_xM/蘇Chv;dhA5.嗀绱V爤ﰦi뵲M", + "⏑[\"ugoy^儣횎~U\\섯겜論l2jw஌yD腅̂\u0019": true, + "ⵯɇ䐲᫿࢚!㯢l샅笶戮1꣖0Xe": null, + "劅f넀識b宁焊E찓橵G!ʱ獓뭔雩괛": [{"p⹣켙[q>燣䍃㞽ᩲx:쓤삘7玑퇼0<\/q璂ᑁ[Z\\3䅵䧳\u0011㤧|妱緒C['췓Yꞟ3Z鳱雼P錻BU씧U`ᢶg蓱>.1ӧ譫'L_5V䏵Ц": [ + false, + false, + {"22䂍盥N霂얢躰e9⑩_뵜斌n@B}$괻Yᐱ@䧋V\"☒-諯cV돯ʠ": true, + "Ű螧ᔼ檍鍎땒딜qꄃH뜣<獧ूCY吓⸏>XQ㵡趌o끬k픀빯a(ܵ甏끆୯/6Nᪧ}搚ᆚ짌P牰泱鈷^d꣟#L삀\"㕹襻;k㸊\\f+": true, + "쎣\",|⫝̸阊x庿k잣v庅$鈏괎炔k쬪O_": [ + "잩AzZGz3v愠ꉈⵎ?㊱}S尳௏p\r2>췝IP䘈M)w|\u000eE", + -9222726055990423201, + null, + [ + false, + {"´킮'뮤쯽Wx讐V,6ᩪ1紲aႈ\u205czD": [ + -930994432421097536, + 3157232031581030121, + "l貚PY䃛5@䭄귻m㎮琸f": 1.0318894506812084E-19, + "࢜⩢Ш䧔1肽씮+༎ᣰ闺馺窃䕨8Mƶq腽xc(៯夐J5굄䕁Qj_훨/~価.䢵慯틠퇱豠㼇Qﵘ$DuSp(8Uญ<\/ಟ룴𥳐ݩ$": 8350772684161555590, + "ㆎQ䄾\u001bpᩭ${[諟^^骴᤮b^ㅥI┧T㉇⾞\"绦r䰂f矩'-7䡭桥Dz兔V9谶居㺍ᔊ䩯덲.\u001eL0ὅㅷ釣": [{ + "<쯬J卷^숞u࠯䌗艞R9닪g㐾볎a䂈歖意:%鐔|ﵤ|y}>;2,覂⶚啵tb*仛8乒㓶B࿠㯉戩oX 貘5V嗆렽낁߼4h䧛ꍺM空\\b꿋貼": 8478577078537189402, + "VD*|吝z~h譺aᯒ": { + "YI췢K<\/濳xNne玗rJo쾘3핰鴊\"↱AR:ࢷ\"9?\"臁說)?誚ꊏe)_D翾W?&F6J@뺾ꍰNZ醊Z쾈വH嶿?炫㷱鬰M겈᭨b,⻁鈵P䕡䀠८ⱄ홎鄣": { + "@?k2鶖㋮\"Oರ K㨇廪儲\u0017䍾J?);\b*묀㗠섳햭1MC V": null, + "UIICP!BUA`ᢈ㋸~袩㗪⾒=fB﮴l1ꡛ죘R辂여ҳ7쮡<䩲`熕8頁": 4481809488267626463, + "Y?+8먙ᚔ鋳蜩럶1㥔y璜౩`": [ + null, + 1.2850335807501874E-19, + "~V2", + 2035406654801997866, + { + "<숻1>\"": -8062468865199390827, + "M㿣E]}qwG莎Gn᝶(ꔙ\\D⬲iꇲs寢t駇S뀡ꢜ": false, + "pꝤ㎏9W%>M;-U璏f(^j1?&RB隧 忓b똊E": "#G?C8.躬ꥯ'?냪#< 渟&헿란zpo왓Kj}鷧XﻘMツb䕖;㪻", + "vE풤幉xz뱕쫥Ug㦲aH} ᣟp:鬼YᰟH3镔ᴚ斦\\鏑r*2橱G⼔F/.j": true, + "RK좬뎂a홠f*f㱉ᮍ⦋潙㨋Gu곌SGI3I뿐\\F',)t`荁蘯囯ﮉ裲뇟쥼_ገ驪▵撏ᕤV": 1.52738225997956557E18, + "^k굲䪿꠹B逤%F㱢漥O披M㽯镞竇霒i꼂焅륓\u00059=皫之눃\u2047娤閍銤唫ၕb<\/w踲䔼u솆맚,䝒ᝳ'/it": "B餹饴is権ꖪ怯ꦂẉဎt\"!凢谵⧿0\\<=(uL䷍刨쑪>俆揓Cy襸Q힆䆭涷<\/ᐱ0ɧ䗾䚹\\ኜ?ꄢᇘ`䴢{囇}᠈䴥X4퓪檄]ꥷ/3謒ሴn+g騍X", + "GgG꽬[(嫓몍6\u0004궍宩㙻/>\u0011^辍dT腪hxǑ%ꊇk,8(W⧂結P鬜O": [{ + "M㴾c>\\ᓲ\u0019V{>ꤩ혙넪㭪躂TS-痴໸闓⍵/徯O.M㏥ʷD囎⧔쁳휤T??鉬뇙=#ꢫ숣BX䭼<\/d똬졬g榿)eꨋﯪ좇첻\u001a\u0011\";~쓆BH4坋攊7힪", + "iT:L闞椕윚*滛gI≀Wਟඊ'ꢆ縺뱹鮚Nꩁ᧬蕼21줧\\䋯``⍐\\㏱鳨": 1927052677739832894, + "쮁缦腃g]礿Y㬙 fヺSɪ꾾N㞈": [ + null, + null, + { + "!t,灝Y 1䗉罵?c饃호䉂Cᐭ쒘z(즽sZG㬣sഖE4뢜㓕䏞丮Qp簍6EZឪ겛fx'ꩱQ0罣i{k锩*㤴㯞r迎jTⲤ渔m炅肳": [ + -3.3325685522591933E18, + [{"㓁5]A䢕1룥BC?Ꙍ`r룔Ⳛ䙡u伲+\u0001്o": [ + null, + 4975309147809803991, + null, + null, + {"T팘8Dﯲ稟MM☻㧚䥧/8ﻥ⥯aXLaH\"顾S☟耲ît7fS෉놁뮔/ꕼ䓈쁺4\\霶䠴ᩢ<\/t4?죵>uD5➶༆쉌럮⢀秙䘥\u20972ETR3濡恆vB? ~鸆\u0005": { + "`閖m璝㥉b뜴?Wf;?DV콜\u2020퍉౓擝宏ZMj3mJ먡-傷뱙yח㸷꥿ ໘u=M읝!5吭L4v\\?ǎ7C홫": null, + "|": false, + "~Ztᛋ䚘\\擭㗝傪W陖+㗶qᵿ蘥ᙄp%䫎)}=⠔6ᮢS湟-螾-mXH?cp": 448751162044282216, + "\u209fad놹j檋䇌ᶾ梕㉝bוּ": {"?苴ꩠD䋓帘5騱qﱖPF?☸珗顒yU ᡫcb䫎 S@㥚gꮒ쎘泴멖\\:I鮱TZ듒ᶨQ3+f7캙\"?\f풾\\o杞紟﻽M.⏎靑OP": [ + -2.6990368911551596E18, + [{"䒖@<᰿<\/⽬tTr腞&G%᳊秩蜰擻f㎳?S㵧\r*k뎾-乢겹隷j軛겷0룁鮁": {")DO0腦:춍逿:1㥨่!蛍樋2": [{ + ",ꌣf侴笾m๫ꆽ?1?U?\u0011ꌈꂇ": { + "x捗甠nVq䅦w`CD⦂惺嘴0I#vỵ} \\귂S끴D얾?Ԓj溯\"v餄a": { + "@翙c⢃趚痋i\u0015OQ⍝lq돆Y0pࢥ3쉨䜩^<8g懥0w)]䊑n洺o5쭝QL댊랖L镈Qnt⪟㒅십q헎鳒⮤眉ᔹ梠@O縠u泌ㄘb榚癸XޔFtj;iC": false, + "I&뱋゘|蓔䔕측瓯%6ᗻHW\\N1貇#?僐ᗜgh᭪o'䗈꽹Rc욏/蔳迄༝!0邔䨷푪8疩)[쭶緄㇈୧ፐ": { + "B+:ꉰ`s쾭)빼C羍A䫊pMgjdx䐝Hf9᥸W0!C樃'蘿f䫤סи\u0017Jve? 覝f둀⬣퓉Whk\"஼=չﳐ皆笁BIW虨쫓F廰饞": -642906201042308791, + "sb,XcZ<\/m㉹ ;䑷@c䵀s奤⬷7`ꘖ蕘戚?Feb#輜}p4nH⬮eKL트}": [ + "RK鳗z=袤Pf|[,u욺", + "Ẏᏻ罯뉋⺖锅젯㷻{H䰞쬙-쩓D]~\u0013O㳢gb@揶蔉|kᦂ❗!\u001ebM褐sca쨜襒y⺉룓", + null, + null, + true, + -1.650777344339075E-19, + false, + "☑lꄆs힨꤇]'uTന⌳농].1⋔괁沰\"IWഩ\u0019氜8쟇䔻;3衲恋,窌z펏喁횗?4?C넁问?ᥙ橭{稻Ⴗ_썔", + "n?]讇빽嗁}1孅9#ꭨ靶v\u0014喈)vw祔}룼쮿I", + -2.7033457331882025E18, + { + ";⚃^㱋x:饬ኡj'꧵T☽O㔬RO婎?향ᒭ搩$渣y4i;(Q>꿘e8q": "j~錘}0g;L萺*;ᕭꄮ0l潛烢5H▄쳂ꏒוֹꙶT犘≫x閦웧v", + "~揯\u2018c4職렁E~ᑅቚꈂ?nq뎤.:慹`F햘+%鉎O瀜쟏敛菮⍌浢<\/㮺紿P鳆ࠉ8I-o?#jﮨ7v3Dt赻J9": null, + "ࣝW䌈0ꍎqC逖,횅c၃swj;jJS櫍5槗OaB>D踾Y": {"㒰䵝F%?59.㍈cᕨ흕틎ḏ㋩B=9IېⓌ{:9.yw}呰ㆮ肒᎒tI㾴62\"ዃ抡C﹬B<\/촋jo朣", + [ + -7675533242647793366, + {"ᙧ呃:[㒺쳀쌡쏂H稈㢤\u001dᶗGG-{GHྻຊꡃ哸䵬;$?&d\\⥬こN圴됤挨-'ꕮ$PU%?冕눖i魁q騎Q": [ + false, + [[ + 7929823049157504248, + [[ + true, + "Z菙\u0017'eꕤ᱕l,0\\X\u001c[=雿8蠬L<\/낲긯W99g톉4ퟋb㝺\u0007劁'!麕Q궈oW:@X၎z蘻m絙璩귓죉+3柚怫tS捇蒣䝠-擶D[0=퉿8)q0ٟ", + "唉\nFA椭穒巯\\䥴䅺鿤S#b迅獘 ﶗ꬘\\?q1qN犠pX꜅^䤊⛤㢌[⬛휖岺q唻ⳡ틍\"㙙Eh@oA賑㗠y必Nꊑᗘ", + -2154220236962890773, + -3.2442003245397908E18, + "Wᄿ筠:瘫퀩?o貸q⊻(᎞KWf宛尨h^残3[U(='橄", + -7857990034281549164, + 1.44283696979059942E18, + null, + {"ꫯAw跭喀 ?_9\"Aty背F=9缉ྦྷ@;?^鞀w:uN㘢Rỏ": [ + 7.393662029337442E15, + 3564680942654233068, + [ + false, + -5253931502642112194, + "煉\\辎ೆ罍5⒭1䪁䃑s䎢:[e5}峳ﴱn騎3?腳Hyꏃ膼N潭錖,Yᝋ˜YAၓ㬠bG렣䰣:", + true, + null, + { + "⒛'P&%죮|:⫶춞": -3818336746965687085, + "钖m<\/0ݎMtF2Pk=瓰୮洽겎.": [[ + -8757574841556350607, + -3045234949333270161, + null, + { + "Ꮬr輳>⫇9hU##w@귪A\\C 鋺㘓ꖐ梒뒬묹㹻+郸嬏윤'+g<\/碴,}ꙫ>손;情d齆J䬁ຩ撛챝탹/R澡7剌tꤼ?ặ!`⏲睤\u00002똥଴⟏": null, + "\u20f2ܹe\\tAꥍư\\x当뿖렉禛;G檳ﯪS૰3~㘠#[J<}{奲 5箉⨔{놁<\/釿抋,嚠/曳m&WaOvT赋皺璑텁": [[ + false, + null, + true, + -5.7131445659795661E18, + "萭m䓪D5|3婁ఞ>蠇晼6nﴺPp禽羱DS<睓닫屚삏姿", + true, + [ + -8759747687917306831, + { + ">ⓛ\t,odKr{䘠?b퓸C嶈=DyEᙬ@ᴔ쨺芛髿UT퓻春<\/yꏸ>豚W釺N뜨^?꽴﨟5殺ᗃ翐%>퍂ဿ䄸沂Ea;A_\u0005閹殀W+窊?Ꭼd\u0013P汴G5썓揘": 4.342729067882445E-18, + "Q^즾眆@AN\u0011Kb榰냎Y#䝀ꀒᳺ'q暇睵s\"!3#I⊆畼寤@HxJ9": false, + "⿾D[)袨㇩i]웪䀤ᛰMvR<蟏㣨": {"v퇓L㪱ꖣ豛톤\\곱#kDTN": [{ + "(쾴䡣,寴ph(C\"㳶w\"憳2s馆E!n!&柄<\/0Pꈗſ?㿳Qd鵔": {"娇堰孹L錮h嵅⛤躏顒?CglN束+쨣ﺜ\\MrH": {"獞䎇둃ቲ弭팭^ꄞ踦涟XK錆쳞ឌ`;੶S炥騞ଋ褂B៎{ڒ䭷ᶼ靜pI荗虶K$": [{"◖S~躘蒉꫿輜譝Q㽙闐@ᢗ¥E榁iء5┄^B[絮跉ᰥ遙PWi3wㄾⵀDJ9!w㞣ᄎ{듒ꓓb6\\篴??c⼰鶹⟧\\鮇ꮇ": [[ + 654120831325413520, + -1.9562073916357608E-19, + { + "DC(昐衵ἡ긙갵姭|֛[t": 7.6979110359897907E18, + "J␅))嫼❳9Xfd飉j7猬ᩉ+⤻眗벎E鰉Zᄊ63zၝ69}ZᶐL崭ᦥ⡦靚⋛ꎨ~i㨃咊ꧭo䰠阀3C(": -3.5844809362512589E17, + "p꣑팱쒬ꎑ뛡Ꙩ挴恍胔&7ᔈ묒4Hd硶훐㎖zꢼ豍㿢aሃ=<\/湉鵲EӅ%$F!퍶棌孼{O駍਺geu+": ")\u001b잓kŀX쩫A밁®ڣ癦狢)扔弒p}k縕ꩋ,䃉tࣼi", + "ァF肿輸<솄G-䢹䛸ꊏl`Tqꕗ蒞a氷⸅ᴉ蠰]S/{J왲m5{9.uέ~㕚㣹u>x8U讁B덺襪盎QhVS맅킃i识{벂磄Iහ䙅xZy/抍૭Z鲁-霳V据挦ℒ": null, + "㯛|Nꐸb7ⵐb?拠O\u0014ކ?-(EꞨ4ꕷᄤYᯕOW瞺~螸\"욿ќe㺰\"'㌢ƐW\u0004瞕>0?V鷵엳": true, + "뤥G\\迋䠿[庩'꼡\u001aiᩮV쯁ᳪ䦪Ô;倱ନ뛁誈": null, + "쥹䄆䚟Q榁䎐᢭<\/2㕣p}HW蟔|䃏꿈ꚉ锳2Pb7㙑Tⅹᵅ": { + "Y?֭$>#cVBꩨ:>eL蒁務": { + "86柡0po 䏚&-捑Ћ祌<\/휃-G*㶢הּ쩍s㶟餇c걺yu꽎還5*턧簕Og婥SꝐ": null, + "a+葞h٥ࠆ裈嗫ﵢ5輙퀟ᛜ,QDﹼ⟶Y騠锪E_|x죗j侵;m蜫轘趥?븅w5+mi콛L": { + ";⯭ﱢ!买F⽍柤鶂n䵣V㫚墱2렾ELEl⣆": [ + true, + -3.6479311868339015E-18, + -7270785619461995400, + 3.334081886177621E18, + 2.581457786298155E18, + -6.605252412954115E-20, + -3.9232347037744167E-20, + { + "B6㊕.k1": null, + "ZAꄮJ鮷ᳱo갘硥鈠䠒츼": { + "ᕅ}럡}.@y陪鶁r業'援퀉x䉴ﵴl퍘):씭脴ᥞhiꃰblﲂ䡲엕8߇M㶭0燋標挝-?PCwe⾕J碻Ᾱ䬈䈥뷰憵賣뵓痬+": {"a췩v礗X⋈耓ፊf罅靮!㔽YYᣓw澍33⎔芲F|\"䜏T↮輦挑6ᓘL侘?ᅥ]덆1R௯✎餘6ꏽ<\/௨\\?q喷ꁫj~@ulq": {"嗫欆뾔Xꆹ4H㌋F嵧]ࠎ]㠖1ꞤT<$m뫏O i댳0䲝i": {"?෩?\u20cd슮|ꯆjs{?d7?eNs⢚嫥氂䡮쎱:鑵롟2hJꎒﯭ鱢3춲亄:뼣v䊭諱Yj択cVmR䩃㘬T\"N홝*ै%x^F\\_s9보zz4淗?q": [ + null, + "?", + 2941869570821073737, + "{5{殇0䝾g6밖퍋臩綹R$䖭j紋釰7sXI繳漪행y", + false, + "aH磂?뛡#惇d婅?Fe,쐘+늵䍘\"3r瘆唊勐j⳧࠴ꇓ<\/唕윈x⬌讣䋵%拗ᛆⰿ妴᝔M2㳗必꧂淲?ゥ젯檢<8끒MidX䏒3᳻Q▮佐UT|⤪봦靏⊏", + [[{ + "颉(&뜸귙{y^\"P퟉춝Ჟ䮭D顡9=?}Y誱<$b뱣RvO8cH煉@tk~4ǂ⤧⩝屋SS;J{vV#剤餓ᯅc?#a6D,s": [ + -7.8781018564821536E16, + true, + [ + -2.28770899315832371E18, + false, + -1.0863912140143876E-20, + -6282721572097446995, + 6767121921199223078, + -2545487755405567831, + false, + null, + -9065970397975641765, + [ + -5.928721243413937E-20, + {"6촊\u001a홯kB0w撨燠룉{绎6⳹!턍贑y▾鱧ժ[;7ᨷ∀*땒䪮1x霆Hᩭ☔\"r䝐7毟ᝰr惃3ꉭE+>僒澐": [ + "Ta쎩aƝt쵯ⰪVb", + [ + -5222472249213580702, + null, + -2851641861541559595, + null, + 4808804630502809099, + 5657671602244269874, + "5犲﨣4mᥣ?yf젫꾯|䋬잁$`Iⳉﴷ扳兝,'c", + false, + [ + null, + { + "DyUIN쎾M仼惀⮥裎岶泭lh扠\u001e礼.tEC癯튻@_Qd4c5S熯A<\/\6U윲蹴Q=%푫汹\\\u20614b[௒C⒥Xe⊇囙b,服3ss땊뢍i~逇PA쇸1": -2.63273619193485312E17, + "Mq꺋貘k휕=nK硍뫞輩>㾆~἞ࡹ긐榵l⋙Hw뮢帋M엳뢯v⅃^": 1877913476688465125, + "ᶴ뻗`~筗免⚽টW˃⽝b犳䓺Iz篤p;乨A\u20ef쩏?疊m㝀컩뫡b탔鄃ᾈV(遢珳=뎲ିeF仢䆡谨8t0醄7㭧瘵⻰컆r厡궥d)a阄፷Ed&c﯄伮1p": null, + "⯁w4曢\"(欷輡": "\"M᭫]䣒頳B\\燧ࠃN㡇j姈g⊸⺌忉ꡥF矉স%^", + "㣡Oᄦ昵⫮Y祎S쐐級㭻撥>{I$": -378474210562741663, + "䛒掷留Q%쓗1*1J*끓헩ᦢ﫫哉쩧EↅIcꅡ\\?ⴊl귛顮4": false, + "寔愆샠5]䗄IH贈=d﯊/偶?ॊn%晥D視N򗘈'᫂⚦|X쵩넽z질tskxDQ莮Aoﱻ뛓": true, + "钣xp?&\u001e侉/y䴼~?U篔蘚缣/I畚?Q绊": -3034854258736382234, + "꺲໣眀)⿷J暘pИfAV삕쳭Nꯗ4々'唄ⶑ伻㷯騑倭D*Ok꧁3b␽_<\/챣Xm톰ၕ䆄`*fl㭀暮滠毡?": [ + "D男p`V뙸擨忝븪9c麺`淂⢦Yw⡢+kzܖ\fY1䬡H歁)벾Z♤溊-혰셢?1<-\u0005;搢Tᐁle\\ᛵߓﭩ榩訝-xJ;巡8깊蠝ﻓU$K": { + "Vꕡ諅搓W=斸s︪vﲜ츧$)iꡟ싉e寳?ጭムVથ嵬i楝Fg<\/Z|៪ꩆ-5'@ꃱ80!燱R쇤t糳]罛逇dṌ֣XHiͦ{": true, + "Ya矲C멗Q9膲墅携휻c\\딶G甔<\/.齵휴": -1.1456247877031811E-19, + "z#.OO￝J": -8263224695871959017, + "崍_3夼ᮟ1F븍뽯ᦓ鴭V豈Ь": [{ + "N蒬74": null, + "yuB?厅vK笗!ᔸcXQ旦컶P-녫mᄉ麟_": "1R@ 톘xa_|﩯遘s槞d!d껀筤⬫薐焵먑D{\\6k共倌☀G~AS_D\"딟쬚뮥馲렓쓠攥WTMܭ8nX㩴䕅檹E\u0007ﭨN 2 ℆涐ꥏ꠵3▙玽|됨_\u2048", + "恐A C䧩G": {":M큣5e들\\ꍀ恼ᔄ靸|I﨏$)n": { + "|U䬫㟯SKV6ꛤ㗮\bn봻䲄fXT:㾯쳤'笓0b/ೢC쳖?2浓uO.䰴": "ཐ꼋e?``,ᚇ慐^8ꜙNM䂱\u0001IᖙꝧM'vKdꌊH牮r\\O@䊷ᓵ쀆(fy聻i툺\"?<\/峧ࣞ⓺ᤤ쵒߯ꎺ騬?)刦\u2072l慪y꺜ﲖTj+u", + "뽫hh䈵w>1ⲏ쐭V[ⅎ\\헑벑F_㖝⠗㫇h恽;῝汰ᱼ瀖J옆9RR셏vsZ柺鶶툤r뢱橾/ꉇ囦FGm\"謗ꉦ⨶쒿⥡%]鵩#ᖣ_蹎 u5|祥?O", + null, + 2.0150326776036215E-19, + null, + true, + false, + true, + {"\fa᭶P捤WWc᠟f뚉ᬏ퓗ⳀW睹5:HXH=q7x찙X$)모r뚥ᆟ!Jﳸf": [ + -2995806398034583407, + [ + 6441377066589744683, + "Mﶒ醹i)Gἦ廃s6몞 KJ౹礎VZ螺费힀\u0000冺업{谥'꡾뱻:.ꘘ굄奉攼Di᷑K鶲y繈욊阓v㻘}枭캗e矮1c?휐\"4\u0005厑莔뀾墓낝⽴洗ṹ䇃糞@b1\u0016즽Y轹", + { + "1⽕⌰鉟픏M㤭n⧴ỼD#%鐘⊯쿼稁븣몐紧ᅇ㓕ᛖcw嬀~ഌ㖓(0r⧦Q䑕髍ര铂㓻R儮\"@ꇱm❈௿᦯頌8}㿹犴?xn잆꥽R": 2.07321075750427366E18, + "˳b18㗈䃟柵Z曆VTAu7+㛂cb0﯑Wp執<\/臋뭡뚋刼틮荋벲TLP预庰܈G\\O@VD'鱃#乖끺*鑪ꬳ?Mޞdﭹ{␇圯쇜㼞顄︖Y홡g": [{ + "0a,FZ": true, + "2z̬蝣ꧦ驸\u0006L↛Ḣ4๚뿀'?lcwᄧ㐮!蓚䃦-|7.飑挴.樵*+1ﮊ\u0010ꛌ%貨啺/JdM:똍!FBe?鰴㨗0O财I藻ʔWA᫓G쳛u`<\/I": [{ + "$τ5V鴐a뾆両環iZp頻යn븃v": -4869131188151215571, + "*즢[⦃b礞R◚nΰꕢH=귰燙[yc誘g䆌?ଜ臛": { + "洤湌鲒)⟻\\䥳va}PeAMnN[": "㐳ɪ/(軆lZR,Cp殍ȮN啷\"3B婴?i=r$펽ᤐ쀸", + "阄R4㒿㯔ڀ69ZᲦ2癁핌噗P崜#\\-쭍袛&鐑/$4童V꩑_ZHA澢fZ3": {"x;P{긳:G閉:9?活H": [ + "繺漮6?z犞焃슳\">ỏ[Ⳛ䌜녏䂹>聵⼶煜Y桥[泥뚩MvK$4jtロ", + "E#갶霠좭㦻ୗ먵F+䪀o蝒ba쮎4X㣵 h", + -335836610224228782, + null, + null, + [ + "r1᫩0>danjY짿bs{", + [ + -9.594464059325631E-23, + 1.0456894622831624E-20, + null, + 5.803973284253454E-20, + -8141787905188892123, + true, + -4735305442504973382, + 9.513150514479281E-20, + "7넳$螔忷㶪}䪪l짴\u0007鹁P鰚HF銏ZJﳴ/⍎1ᷓ忉睇ᜋ쓈x뵠m䷐窥Ꮤ^\u0019ᶌ偭#ヂt☆၃pᎍ臶䟱5$䰵&๵分숝]䝈뉍♂坎\u0011<>", + "C蒑貑藁lﰰ}X喇몛;t밿O7/᯹f\u0015kI嘦<ዴ㟮ᗎZ`GWퟩ瑹࡮ᅴB꿊칈??R校s脚", + { + "9珵戬+AU^洘拻ቒy柭床'粙XG鞕᠜繀伪%]hC,$輙?Ut乖Qm떚W8઼}~q⠪rU䤶CQ痗ig@#≲t샌f㈥酧l;y闥ZH斦e⸬]j⸗?ঢ拻퀆滌": null, + "畯}㧢J罚帐VX㨑>1ꢶkT⿄蘥㝑o|<嗸層沈挄GEOM@-䞚䧰$만峬輏䠱V✩5宸-揂D'㗪yP掶7b⠟J㕻SfP?d}v㼂Ꮕ'猘": { + "陓y잀v>╪": null, + "鬿L+7:됑Y=焠U;킻䯌잫!韎ஔ\f": { + "駫WmGጶ": { + "\\~m6狩K": -2586304199791962143, + "ႜࠀ%͑l⿅D.瑢Dk%0紪dḨTI픸%뗜☓s榗኉\"?V籄7w髄♲쟗翛歂E䤓皹t ?)ᄟ鬲鐜6C": { + "_췤a圷1\u000eB-XOy缿請∎$`쳌eZ~杁튻/蜞`塣৙\"⪰\"沒l}蕌\\롃荫氌.望wZ|o!)Hn獝qg}": null, + "kOSܧ䖨钨:಼鉝ꭝO醧S`십`ꓭ쭁ﯢN&Et㺪馻㍢ⅳ㢺崡ຊ蜚锫\\%ahx켨|ż劻ꎄ㢄쐟A躊᰹p譞綨Ir쿯\u0016ﵚOd럂*僨郀N*b㕷63z": { + ":L5r+T㡲": [{ + "VK泓돲ᮙRy㓤➙Ⱗ38oi}LJቨ7Ó㹡৘*q)1豢⛃e᫛뙪壥镇枝7G藯g㨛oI䄽 孂L缊ꋕ'EN`": -2148138481412096818, + "`⛝ᘑ$(खꊲ⤖ᄁꤒ䦦3=)]Y㢌跨NĴ驳줟秠++d孳>8ᎊ떩EꡣSv룃 쯫أ?#E|᭙㎐?zv:5祉^⋑V": [ + -1.4691944435285607E-19, + 3.4128661569395795E17, + "㐃촗^G9佭龶n募8R厞eEw⺡_ㆱ%⼨D뉄퉠2ꩵᛅⳍ搿L팹Lවn=\"慉념ᛮy>!`g!풲晴[/;?[v겁軇}⤳⤁핏∌T㽲R홓遉㓥", + "愰_⮹T䓒妒閤둥?0aB@㈧g焻-#~跬x<\/舁P݄ꐡ=\\׳P\u0015jᳪᢁq;㯏l%᭗;砢觨▝,謁ꍰGy?躤O黩퍋Y㒝a擯\n7覌똟_䔡]fJ晋IAS", + 4367930106786121250, + -4.9421193149720582E17, + null, + { + ";ᄌ똾柉곟ⰺKpፇ䱻ฺ䖝{o~h!eꁿ઻욄ښ\u0002y?xUd\u207c悜ꌭ": [ + 1.6010824122815255E-19, + [ + "宨︩9앉檥pr쇷?WxLb", + "氇9】J玚\u000f옛呲~ 輠1D嬛,*mW3?n휂糊γ虻*ᴫ꾠?q凐趗Ko↦GT铮", + "㶢ថmO㍔k'诔栀Z蛟}GZ钹D", + false, + -6.366995517736813E-20, + -4894479530745302899, + null, + "V%᫡II璅䅛䓎풹ﱢ/pU9se되뛞x梔~C)䨧䩻蜺(g㘚R?/Ự[忓C뾠ࢤc왈邠买?嫥挤풜隊枕", + ",v碍喔㌲쟚蔚톬៓ꭶ", + 3.9625444752577524E-19, + null, + [ + "kO8란뿒䱕馔b臻⍟隨\"㜮鲣Yq5m퐔K#ꢘug㼈ᝦ=P^6탲@䧔%$CqSw铜랊0&m⟭<\/a逎ym\u0013vᯗ": true, + "洫`|XN뤮\u0018詞=紩鴘_sX)㯅鿻Ố싹": 7.168252736947373E-20, + "ꛊ饤ﴏ袁(逊+~⽫얢鈮艬O힉7D筗S곯w操I斞᠈븘蓷x": [[[[ + -7.3136069426336952E18, + -2.13572396712722688E18, + { + "硢3㇩R:o칢行E<=\u0018ၬYuH!\u00044U%卝炼2>\u001eSi$⓷ꒈ'렢gᙫ番ꯒ㛹럥嶀澈v;葷鄕x蓎\\惩+稘UEᖸﳊ㊈壋N嫿⏾挎,袯苷ኢ\\x|3c": 7540762493381776411, + "?!*^ᢏ窯?\u0001ڔꙃw虜돳FgJ?&⨫*uo籤:?}ꃹ=ٴ惨瓜Z媊@ત戹㔏똩Ԛ耦Wt轁\\枒^\\ꩵ}}}ꀣD\\]6M_⌫)H豣:36섘㑜": { + ";홗ᰰU஋㙛`D왔ཿЃS회爁\u001b-㢈`봆?盂㛣듿ᦾ蒽_AD~EEຆ㊋(eNwk=Rɠ峭q\"5Ἠ婾^>'ls\n8QAK)- Q䲌mo펹L_칍樖庫9꩝쪹ᘹ䑖瀍aK ?*趤f뭓廝p=磕", + "哑z懅ᤏ-ꍹux쀭", + [ + true, + 3998739591332339511, + "ጻ㙙?᳸aK<\/囩U`B3袗ﱱ?\"/k鏔䍧2l@쿎VZ쨎/6ꃭ脥|B?31+on颼-ꮧ,O嫚m ࡭`KH葦:粘i]aSU쓙$쐂f+詛頖b", + [{"^<9<箝&絡;%i﫡2攑紴\\켉h쓙-柂䚝ven\u20f7浯-Ꮏ\r^훁䓚헬\u000e?\\ㅡֺJ떷VOt": [{ + "-௄卶k㘆혐஽y⎱㢬sS઄+^瞥h;ᾷj;抭\u0003밫f<\/5Ⱗ裏_朻%*[-撵䷮彈-芈": { + "㩩p3篊G|宮hz䑊o곥j^Co0": [ + 653239109285256503, + {"궲?|\":N1ۿ氃NZ#깩:쇡o8킗ࡊ[\"됸Po핇1(6鰏$膓}⽐*)渽J'DN<썙긘毦끲Ys칖": { + "2Pr?Xjㆠ?搮/?㓦柖馃5뚣Nᦼ|铢r衴㩖\"甝湗ܝ憍": "\"뾯i띇筝牻$珲/4ka $匝휴译zbAᩁꇸ瑅&뵲衯ꎀᆿ7@ꈋ'ᶨH@ᠴl+", + "7뢽뚐v?4^ꊥ_⪛.>pởr渲<\/⢕疻c\"g䇘vU剺dஔ鮥꒚(dv祴X⼹\\a8y5坆": true, + "o뼄B욞羁hr﷔폘뒚⿛U5pꪴfg!6\\\"爑쏍䢱W<ﶕ\\텣珇oI/BK뺡'谑♟[Ut븷亮g(\"t⡎有?ꬊ躺翁艩nl F⤿蠜": 1695826030502619742, + "ۊ깖>ࡹ햹^ⵕ쌾BnN〳2C䌕tʬ]찠?ݾ2饺蹳ぶꌭ訍\"◹ᬁD鯎4e滨T輀ﵣ੃3\u20f3킙D瘮g\\擦+泙ၧ 鬹ﯨַ肋7놷郟lP冝{ߒhড়r5,꓋": null, + "ΉN$y{}2\\N﹯ⱙK'8ɜͣwt,.钟廣䎘ꆚk媄_": null, + "䎥eᾆᝦ읉,Jުn岪㥐s搖謽䚔5t㯏㰳㱊ZhD䃭f絕s鋡篟a`Q鬃┦鸳n_靂(E4迠_觅뷝_宪D(NL疶hL追V熑%]v肫=惂!㇫5⬒\u001f喺4랪옑": { + "2a輍85먙R㮧㚪Sm}E2yꆣꫨrRym㐱膶ᔨ\\t綾A☰.焄뙗9<쫷챻䒵셴᭛䮜.<\/慌꽒9叻Ok䰊Z㥪幸k": [ + null, + true, + {"쌞쐍": { + "▟GL K2i뛱iQ\"̠.옛1X$}涺]靎懠ڦ늷?tf灟ݞゟ{": 1.227740268699265E-19, + "꒶]퓚%ฬK❅": [{ + "(ෛ@Ǯっ䧼䵤[aテൖvEnAdU렖뗈@볓yꈪ,mԴ|꟢캁(而첸죕CX4Y믅": "2⯩㳿ꢚ훀~迯?᪑\\啚;4X\u20c2襏B箹)俣eỻw䇄", + "75༂f詳䅫ꐧ鏿 }3\u20b5'∓䝱虀f菼Iq鈆﨤g퍩)BFa왢d0뮪痮M鋡nw∵謊;ꝧf美箈ḋ*\u001c`퇚퐋䳫$!V#N㹲抗ⱉ珎(V嵟鬒_b㳅\u0019": null, + "e_m@(i㜀3ꦗ䕯䭰Oc+-련0뭦⢹苿蟰ꂏSV䰭勢덥.ྈ爑Vd,ᕥ=퀍)vz뱊ꈊB_6듯\"?{㒲&㵞뵫疝돡믈%Qw限,?\r枮\"? N~癃ruࡗdn&": null, + "㉹&'Pfs䑜공j<\/?|8oc᧨L7\\pXᭁ 9᪘": -2.423073789014103E18, + "䝄瑄䢸穊f盈᥸,B뾧푗횵B1쟢f\u001f凄": "魖⚝2儉j꼂긾껢嗎0ࢇ纬xI4](੓`蕞;픬\fC\"斒\")2櫷I﹥迧", + "ퟯ詔x悝령+T?Bg⥄섅kOeQ큼㻴*{E靼6氿L缋\u001c둌๶-㥂2==-츫I즃㠐Lg踞ꙂEG貨鞠\"\u0014d'.缗gI-lIb䋱ᎂDy缦?": null, + "紝M㦁犿w浴詟棓쵫G:䜁?V2ힽ7N*n&㖊Nd-'ຊ?-樹DIv⊜)g䑜9뉂ㄹ푍阉~ꅐ쵃#R^\u000bB䌎䦾]p.䀳": [{"ϒ爛\"ꄱ︗竒G䃓-ま帳あ.j)qgu扐徣ਁZ鼗A9A鸦甈!k蔁喙:3T%&㠘+,䷞|챽v䚞문H<\/醯r셓㶾\\a볜卺zE䝷_죤ဵ뿰᎟CB": [ + 6233512720017661219, + null, + -1638543730522713294, + false, + -8901187771615024724, + [ + 3891351109509829590, + true, + false, + -1.03836679125188032E18, + { + "j랎:g曞ѕᘼ}链N", + -1.1103819473845426E-19, + true, + [ + true, + null, + -7.9091791735309888E17, + true, + {"}蔰鋈+ꐨ啵0?g*사%`J?*": [{ + "\"2wG?yn,癷BK\\龞䑞x?蠢": -3.7220345009853505E-19, + ";饹়❀)皋`噿焒j(3⿏w>偍5X薙婏聿3aFÆÝ": "2,ꓴg?_섦_>Y쪥션钺;=趘F~?D㨫\bX?㹤+>/믟kᠪ멅쬂Uzỵ]$珧`m雁瑊ඖ鯬cꙉ梢f묛bB", + "♽n$YjKiXX*GO贩鏃豮祴遞K醞眡}ꗨv嵎꼷0୸+M菋eH徸J꣆:⼐悥B켽迚㯃b諂\u000bjꠜ碱逮m8": [ + "푷᣺ﻯd8ﱖ嬇ភH鹎⡱᱅0g:果6$GQ췎{vᷧYy-脕x偹砡館⮸C蓼ꏚ=軄H犠G谖ES詤Z蠂3l봟hᅭ7䦹1GPQG癸숟~[#駥8zQ뛣J소obg,", + null, + 1513751096373485652, + null, + -6.851466660824754E-19, + {"䩂-⴮2ٰK솖풄꾚ႻP앳1H鷛wmR䗂皎칄?醜<\/&ࠧ㬍X濬䵈K`vJ륒Q/IC묛!;$vϑ": { + "@-ꚗxྐྵ@m瘬\u0010U絨ﮌ驐\\켑寛넆T=tQ㭤L연@脸삯e-:⩼u㎳VQ㋱襗ຓ<Ⅶ䌸cML3+\u001e_C)r\\9+Jn\\Pﺔ8蠱檾萅Pq鐳话T䄐I": -1.80683891195530061E18, + "ᷭዻU~ཷsgSJ`᪅'%㖔n5픆桪砳峣3獮枾䌷⊰呀": { + "Ş੉䓰邟自~X耤pl7间懑徛s첦5ਕXexh⬖鎥᐀nNr(J컗|ૃF\"Q겮葲놔엞^겄+㈆话〾희紐G'E?飕1f❼텬悚泬먐U睬훶Qs": false, + "(\u20dag8큽튣>^Y{뤋.袊䂓;_g]S\u202a꽬L;^'#땏bႌ?C緡<䝲䲝断ꏏ6\u001asD7IK5Wxo8\u0006p弊⼂ꯍ扵\u0003`뵂픋%ꄰ⫙됶l囏尛+䗅E쟇\\": [ + true, + { + "\n鱿aK㝡␒㼙2촹f;`쾏qIࡔG}㝷䐍瓰w늮*粅9뒪ㄊCj倡翑閳R渚MiUO~仨䜶RꙀA僈㉋⦋n{㖥0딿벑逦⥻0h薓쯴Ꝼ": [ + 5188716534221998369, + 2579413015347802508, + 9.010794400256652E-21, + -6.5327297761238093E17, + 1.11635352494065523E18, + -6656281618760253655, + { + "": ")?", + "TWKLꑙ裑꺔UE俸塑炌Ũ᜕-o\"徚#": {"M/癟6!oI51ni퐚=댡>xꍨ\u0004 ?": { + "皭": {"⢫䋖>u%w잼<䕏꘍P䋵$魋拝U䮎緧皇Y훂&|羋ꋕ잿cJ䨈跓齳5\u001a삱籷I꿾뤔S8㌷繖_Yឯ䲱B턼O歵F\\l醴o_欬6籏=D": [ + false, + true, + {"Mt|ꏞD|F궣MQ뵕T,띺k+?㍵i": [ + 7828094884540988137, + false, + { + "!༦鯠,&aﳑ>[euJꏽ綷搐B.h": -7648546591767075632, + "-n켧嘰{7挐毄Y,>❏螵煫乌pv醑Q嶚!|⌝責0왾덢ꏅ蛨S\\)竰'舓Q}A釡5#v": 3344849660672723988, + "8閪麁V=鈢1녈幬6棉⪮둌\u207d᚛驉ꛃ'r䆉惏ै|bἧﺢᒙ<=穊强s혧eꮿ慩⌡ \\槳W븧J檀C,ᘉ의0俯퀉M;筷ࣴ瓿{늊埂鄧_4揸Nn阼Jੵ˥(社": true, + "o뼀vw)4A뢵(a䵢)p姃뛸\u000fK#KiQp\u0005ꅍ芅쏅": null, + "砥$ꥸ┇耽u斮Gc{z빔깎밇\\숰\u001e괷各㶇쵿_ᴄ+h穢p촀Ნ䃬z䝁酳ӂ31xꔄ1_砚W렘G#2葊P ": [ + -3709692921720865059, + null, + [ + 6669892810652602379, + -135535375466621127, + "뎴iO}Z? 馢녱稹ᄾ䐩rSt帤넆&7i騏멗畖9誧鄜'w{Ͻ^2窭외b㑎粖i矪ꦨ탪跣)KEㆹ\u0015V8[W?⽉>'kc$䨘ᮛ뉻٬M5", + 1.10439588726055846E18, + false, + -4349729830749729097, + null, + [ + false, + "_蠢㠝^䟪/D녒㡋ỎC䒈판\u0006એq@O펢%;鹐쏌o戥~A[ꡉ濽ỳ&虃᩾荣唙藍茨Ig楡꒻M窓冉?", + true, + 2.17220752996421728E17, + -5079714907315156164, + -9.960375974658589E-20, + "ᾎ戞༒", + true, + false, + [[ + "ⶉᖌX⧕홇)g엃⹪x뚐癟\u0002", + -5185853871623955469, + { + "L㜤9ợㇶK鐰⋓V뽋˖!斫as|9"፬䆪?7胜&n薑~": -2.11545634977136992E17, + "O8뀩D}캖q萂6༣㏗䈓煮吽ਆᎼDᣘ폛;": false, + "YTᡅ^L㗎cbY$pᣞ縿#fh!ꘂb삵玊颟샞ဢ$䁗鼒몁~rkH^:닮먖츸륈⪺쒉砉?㙓扫㆕꣒`R䢱B酂?C뇞<5Iޚ讳騕S瞦z": null, + "\\RB?`mG댵鉡幐物䵎有5*e骄T㌓ᛪ琾駒Ku\u001a[柆jUq8⋈5鿋츿myﻗ?雍ux঴?": 5828963951918205428, + "n0晅:黯 xu씪^퓞cB㎊ᬍ⺘٤փ~B岚3㥕擄vᲂ~F?C䶖@$m~忔S왖㲚?챴⊟W#벌{'㰝I䝠縁s樘\\X뢻9핡I6菍ㄛ8쯶]wॽ0L\"q": null, + "x增줖j⦦t䏢᎙㛿Yf鼘~꫓恄4惊\u209c": "oOhbᤃ᛽z&Bi犑\\3B㩬劇䄑oŁ쨅孥멁ຖacA㖫借㞝vg싰샂㐜#譞⢤@k]鋰嘘䜾L熶塥_<\/⍾屈ﮊ_mY菹t뙺}Ox=w鮮4S1ꐩמּ'巑", + "㗓蟵ꂾe蠅匳(JP䗏෸\u0089耀왲": [{ + "ᤃ㵥韎뤽\r?挥O쯡⇔㞚3伖\u0005P⋪\"D궣QLn(⚘罩䩢Ŏv䤘尗뼤됛O淽鋋闚r崩a{4箙{煷m6〈": { + "l곺1L": { + "T'ਤ?砅|੬Km]䄩\"(࿶<\/6U爢䫈倔郴l2㴱^줣k'L浖L鰄Rp今鎗⒗C얨M훁㡧ΘX粜뫈N꤇輊㌻켑#㮮샶-䍗룲蠝癜㱐V>=\\I尬癤t=": 7648082845323511446, + "鋞EP:<\/_`ၧe混ㇹBd⯢㮂驋\\q碽饩跓྿ᴜ+j箿렏㗑yK毢宸p謹h䦹乕U媣\\炤": [[ + "3", + [ + true, + 3.4058271399411134E-20, + true, + "揀+憱f逮@먻BpW曉\u001a㣐⎊$n劈D枤㡞좾\u001aᛁ苔౩闝1B䷒Ṋ݋➐ꀞꐃ磍$t੤_:蘺⮼(#N", + 697483894874368636, + [ + "vᘯ锴)0訶}䳅⩚0O壱韈ߜ\u0018*U鍾䏖=䧉뽑单휻ID쿇嘗?ꌸῬ07", + -5.4858784319382006E18, + 7.5467775182251151E18, + -8911128589670029195, + -7531052386005780140, + null, + [ + null, + true, + [[{ + "1欯twG<\/Q:0怯押殃탷聫사<ỗꕧ蚨䡁nDꌕ\u001c녬~蓩鲃g儊>ꏡl㻿/⑷*챳6㻜W毤緛ﹺᨪ4\u0013뺚J髬e3쳸䘦伧?恪&{L掾p+꬜M䏊d娘6": { + "2p첼양棜h䜢﮶aQ*c扦v︥뮓kC寵횂S銩&ǝ{O*य़iH`U큅ࡓr䩕5ꄸ?`\\᧫?ᮼ?t〟崾훈k薐ì/iy꤃뵰z1<\/AQ#뿩8jJ1z@u䕥": 1.82135747285215155E18, + "ZdN &=d년ᅆ'쑏ⅉ:烋5&៏ᄂ汎来L㯄固{钧u\\㊏튚e摑&t嗄ꖄUb❌?m䴘熚9EW": [{ + "ଛ{i*a(": -8.0314147546006822E17, + "⫾ꃆY\u000e+W`௸ \"M뒶+\\뷐lKE}(NT킶Yj選篒쁶'jNQ硾(똡\\\"逌ⴍy? IRꜘ὞鄬﨧:M\\f⠋Cꚜ쫊ᚴNV^D䕗ㅖἔIao꿬C⍏8": [ + 287156137829026547, + { + "H丞N逕⯲": {"": { + "7-;枮阕梒9ᑄZ": [[[[ + null, + { + "": [[[[ + -7.365909561486078E-19, + 2948694324944243408, + null, + [ + true, + "荒\"并孷䂡쵼9o䀘F\u0002龬7⮹Wz%厖/*? a*R枈㌦됾g뒠䤈q딄㺿$쮸tᶎ릑弣^鏎<\/Y鷇驜L鿽<\/춋9Mᲆឨ^<\/庲3'l낢", + "c鮦\u001b두\\~?眾ಢu݆綑෪蘛轋◜gȃ<\/ⴃcpkDt誩܅\"Y", + [[ + null, + null, + [ + 3113744396744005402, + true, + "v(y", + { + "AQ幆h쾜O+꺷铀ꛉ練A蚗⼺螔j㌍3꽂楎䥯뎸먩?": null, + "蠗渗iz鱖w]擪E": 1.2927828494783804E-17, + "튷|䀭n*曎b✿~杤U]Gz鄭kW|㴚#㟗ഠ8u擨": [[ + true, + null, + null, + {"⾪壯톽g7?㥜ώQꑐ㦀恃㧽伓\\*᧰閖樧뢇赸N휶䎈pI氇镊maᬠ탷#X?A+kНM ༑᩟؝?5꧎鰜ṚY즫궔 =ঈ;ﳈ?*s|켦蜌wM笙莔": [ + null, + -3808207793125626469, + [ + -469910450345251234, + 7852761921290328872, + -2.7979740127017492E18, + 1.4458504352519893E-20, + true, + "㽙깹?먏䆢:䴎ۻg殠JBTU⇞}ꄹꗣi#I뵣鉍r혯~脀쏃#釯:场:䔁>䰮o'㼽HZ擓௧nd", + [ + 974441101787238751, + null, + -2.1647718292441327E-19, + 1.03602824249831488E18, + [ + null, + 1.0311977941822604E-17, + false, + true, + { + "": -3.7019778830816707E18, + "E峾恆茍6xLIm縂0n2视֯J-ᤜz+ᨣ跐mYD豍繹⹺䊓몓ﴀE(@詮(!Y膽#᎙2䟓섣A䈀㟎,囪QbK插wcG湎ꤧtG엝x⥏俎j'A一ᯥ뛙6ㅑ鬀": 8999803005418087004, + "よ殳\\zD⧅%Y泥簳Uꈩ*wRL{3#3FYHା[d岀䉯T稉駅䞘礄P:闈W怏ElB㤍喬赔bG䠼U଄Nw鰯闀楈ePsDꥷ꭬⊊": [ + 6.77723657904486E-20, + null, + [ + "ཚ_뷎꾑蹝q'㾱ꂓ钚蘞慵렜떆`ⴹ⎼櫯]J?[t9Ⓢ !컶躔I᮸uz>3a㠕i,錃L$氰텰@7녫W㸮?羧W뇧ꃞ,N鋮숪2ɼ콏┍䁲6", + "&y?뢶=킕올Za惻HZk>c\u20b58i?ꦶcfBv잉ET9j䡡", + "im珊Ճb칧校\\뼾쯀", + 9.555715121193197E-20, + true, + { + "<㫚v6腓㨭e1㕔&&V∌ᗈT奄5Lጥ>탤?튣瑦㳆ꉰ!(ᙪ㿬擇_n쌯IMΉ㕨␰櫈ᱷ5풔蟹&L.첽e鰷쯃劼﫭b#ﭶ퓀7뷄Wr㢈๧Tʴશ㶑澕鍍%": -1810142373373748101, + "fg晌o?߲ꗄ;>C>?=鑰監侯Kt굅": true, + "䫡蓺ꑷ]C蒹㦘\"1ః@呫\u0014NL䏾eg呮፳,r$裢k>/\\?ㄤᇰﻛ쉕1஥'Ċ\" \\_?쨔\"ʾr: 9S䘏禺ᪧꄂ㲄", + [[{ + "*硙^+E쌺I1䀖ju?:⦈Ꞓl๴竣迃xKC/饉:\fl\"XTFᄄ蟭,芢<\/骡軺띜hꏘ\u001f銿<棔햳▨(궆*=乥b8\\媦䷀뫝}닶ꇭ(Kej䤑M": [{ + "1Ꮼ?>옿I╅C<ގ?ꊌ冉SV5A㢊㶆z-๎玶绢2F뵨@㉌뀌o嶔f9-庒茪珓뷳4": null, + ";lᰳ": "CbB+肻a䄷苝*/볳+/4fq=㰁h6瘉샴4铢Y骐.⌖@哼猎㦞+'gꋸ㒕ߤ㞑(䶒跲ti⑴a硂#No볔", + "t?/jE幸YHT셵⩎K!Eq糦ꗣv刴w\"l$ο:=6:移": { + "z]鑪醊嫗J-Xm銌翁絨c里됏炙Ep㣋鏣똼嚌䀓GP﹖cmf4鹭T䅿꣭姧␸wy6ꦶ;S&(}ᎧKxᾂQ|t뻳k\"d6\"|Ml췆hwLt꼼4$&8Պ褵婶鯀9": {"嵃닢ᒯ'd᧫䳳#NXe3-붋鸿ଢ떓%dK\u0013䲎ꖍYV.裸R⍉rR3蟛\\:젯:南ĺLʆ넕>|텩鴷矔ꋅⒹ{t孶㓑4_": [ + true, + null, + [ + false, + "l怨콈lᏒ", + { + "0w䲏嬧-:`䉅쉇漧\\܂yㄨb%㽄j7ᦶ涶<": 3.7899452730383747E-19, + "ꯛTẀq纤q嶏V⿣?\"g}ი艹(쥯B T騠I=仵및X": {"KX6颠+&ᅃ^f畒y[": { + "H?뱜^?꤂-⦲1a㋞&ꍃ精Ii᤾챪咽쬘唂쫷<땡劈훫놡o㥂\\ KⴙD秼F氮[{'좴:례晰Iq+I쭥_T綺砸GO煝䟪ᚪ`↹l羉q쐼D꽁ᜅ훦: vUV": true, + "u^yﳍ0㱓#[y뜌앸ꊬL㷩?蕶蘾⻍KӼ": -7931695755102841701, + "䤬轉車>\u001c鴵惋\"$쯃྆⇻n뽀G氠S坪]ಲꨍ捇Qxኻ椕駔\\9ࣼ﫻읜磡煮뺪ᶚ볝l㕆t+sζ": [[[ + true, + false, + [ + null, + 3363739578828074923, + true, + { + "\"鸣詩 볰㑵gL㯦῅춝旫}ED辗ﮈI쀤-ꧤ|㠦Z\"娑ᕸ4爏騍㣐\"]쳝Af]茛⬻싦o蚁k䢯䩐菽3廇喑ޅ": 4.5017999150704666E17, + "TYႇ7ʠ值4챳唤~Zo&ݛ": false, + "`塄J袛㭆끺㳀N㺣`꽐嶥KﯝSVᶔ∲퀠獾N딂X\"ᤏhNﬨvI": {"\u20bb㭘I䖵䰼?sw䂷쇪](泒f\"~;꼪Fԝsᝦ": {"p,'ꉂ軿=A蚶?bƉ㏵䅰諬'LYKL6B깯⋩겦뎙(ᜭ\u0006噣d꾆㗼Z;䄝䚔cd<情@䞂3苼㸲U{)<6&ꩻ钛\u001au〷N숨囖愙j=BXW욕^x芜堏Ῑ爂뛷꒻t✘Q\b": [[ + "籛&ଃ䩹.ꃩ㦔\\C颫#暪&!勹ꇶ놽攺J堬镙~軌C'꾖䣹㮅岃ᙴ鵣", + 4.317829988264744E15, + 6.013585322002147E-20, + false, + true, + null, + null, + -3.084633632357326E-20, + false, + null, + { + "\"짫愔昻 X\"藣j\"\"먁ཅѻ㘤㬯0晲DU꟒㸃d벀윒l䦾c੻*3": null, + "谈Wm陧阦咟ฯ歖擓N喴㋐銭rCCnVࢥ^♼Ⅾ젲씗刊S༝+_t赔\\b䚍뉨ꬫ6펛cL䊘᜼<\/澤pF懽&H": [ + null, + { + "W\"HDUuΌ퀟M'P4࿰H똆ⰱﮯ<\/凐蘲\"C鴫ﭒж}ꭩ쥾t5yd诪ﮡ퍉ⴰ@?氐醳rj4I6Qt": 6.9090159359219891E17, + "絛ﳛ⺂": {"諰P㗮聦`ZQ?ꫦh*റcb⧱}埌茥h{棩렛툽o3钛5鮁l7Q榛6_g)ὄ\u0013kj뤬^爖eO4Ⱈ槞鉨ͺ订%qX0T썗嫷$?\\\"봅늆'%": [ + -2.348150870600346E-19, + [[ + true, + -6619392047819511778, + false, + [[ + -1.2929189982356161E-20, + 1.7417192219309838E-19, + {"?嵲2࿐2\u0001啑㷳c縯": [ + null, + [ + false, + true, + 2578060295690793218, + { + "?\"殃呎#㑑F": true, + "}F炊_殛oU헢兔Ꝉ,赭9703.B数gTz3⏬": { + "5&t3,햓Mݸᵣ㴵;꣫䩍↳#@뫷䠅+W-ࣇzᓃ鿕ಔ梭?T䮑ꥬ旴]u뫵막bB讍:왳둛lEh=숾鱠p咐$짏#?g⹷ᗊv㷵.斈u頻\u0018-G.": "뽙m-ouࣤ஫牷\"`Ksꕞ筼3HlȨvC堈\"I]㖡玎r먞#'W賜鴇k'c룼髋䆿飉㗆xg巤9;芔cጐ/ax䊨♢큓r吓㸫೼䢗da᩾\"]屣`", + ":M딪<䢥喠\u0013㖅x9蕐㑂XO]f*Q呰瞊吭VP@9,㨣 D\\穎vˤƩs㜂-曱唅L걬/롬j㈹EB8g<\/섩o渀\"u0y&룣": ">氍緩L/䕑돯Ꟙ蕞^aB뒣+0jK⪄瑨痜LXK^힦1qK{淚t츔X:Vm{2r獁B뾄H첚7氥?쉟䨗ꠂv팳圎踁齀\\", + "D彤5㢷Gꪻ[lㄆ@὜⓰絳[ଃ獽쮹☒[*0ꑚ㜳": 9022717159376231865, + "ҖaV銣tW+$魿\u20c3亜~뫡ᙰ禿쨽㏡fṼzE/h": "5臐㋇Ჯ쮺? 昨탰Wム밎#'\"崲钅U?幫뺀⍾@4kh>騧\\0ҾEV=爐͌U捀%ꉼ 㮋<{j]{R>:gԩL\u001c瀈锌ﯲﳡꚒ'⫿E4暍㌗뵉X\"H᝜", + "ᱚגּ;s醒}犍SἿ㦣&{T$jkB\\\tḮ앾䤹o<避(tW": "vb⯽䴪䮢@|)", + "⥒퐁껉%惀뗌+녣迺顀q條g⚯i⤭룐M琹j̈́⽜A": -8385214638503106917, + "逨ꊶZ<\/W⫟솪㎮ᘇb?ꠔi\"H㧺x෷韒Xꫨฟ|]窽\u001a熑}Agn?Mᶖa9韲4$3Ỵ^=쏍煤ፐ돷2䣃%鷠/eQ9頸쥎", + 2398360204813891033, + false, + 3.2658897259932633E-19, + null, + "?ꚃ8Nn㞷幵d䲳䱲뀙ꪛQ瑓鎴]䩋-鰾捡䳡??掊", + false, + -1309779089385483661, + "ᦲxu_/yecR.6芏.ᜇ過 ~", + -5658779764160586501, + "쒌:曠=l썜䢜wk#s蕚\"互㮉m䉤~0듐䋙#G;h숄옥顇෤勹(C7㢅雚㐯L⠅VV簅<", + null, + -4.664877097240962E18, + -4.1931322262828017E18, + { + ",": { + "v㮟麑䄠뤵g{M띮.\u001bzt뢜뵡0Ǥ龍떟Ᾰ怷ϓRT@Lꀌ樂U㏠⾕e扉|bJg(뵒㠶唺~ꂿ(땉x⻫싉쁊;%0鎻V(o\f,N鏊%nk郼螺": -1.73631993428376141E18, + "쟧摑繮Q@Rᕾ㭚㾣4隅待㓎3蒟": [ + 4971487283312058201, + 8973067552274458613, + { + "`a揙ᣗ\u0015iBo¸": 4.3236479112537999E18, + "HW&퉡ぁ圍Y?瑡Qy훍q!帰敏s舠㫸zꚗaS歲v`G株巷Jp6킼 (귶鍔⾏⡈>M汐㞍ቴ꙲dv@i㳓ᇆ?黍": [ + null, + 4997607199327183467, + "E㻎蠫ᐾ高䙟蘬洼旾﫠텛㇛?'M$㣒蔸=A_亀绉앭rN帮", + null, + [{ + "Eᑞ)8餧A5u&㗾q?": [ + -1.969987519306507E-19, + null, + [ + 3.42437673373841E-20, + true, + "e걷M墁\"割P␛퍧厀R䱜3ﻴO퓫r﹉⹊", + [ + -8164221302779285367, + [ + true, + null, + "爘y^-?蘞Ⲽꪓa␅ꍨ}I", + 1.4645984996724427E-19, + [{ + "tY좗⧑mrzﺝ㿥ⴖ᥷j諅\u0000q賋譁Ꞅ⮱S\nࡣB/큃굪3Zɑ复o<\/;롋": null, + "彟h浠_|V4䦭Dᙣ♞u쿻=삮㍦\u001e哀鬌": [{"6횣楠,qʎꗇ鎆빙]㱭R굋鈌%栲j分僅ペ䇰w폦p蛃N溈ꡐꏀ?@(GI뉬$ﮄ9誁ꓚ2e甸ڋ[䁺,\u0011\u001cࢃ=\\+衪䷨ᯕ鬸K": [[ + "ㅩ拏鈩勥\u000etgWVXs陂規p狵w퓼{뮵_i\u0002ퟑႢ⬐d6鋫F~챿搟\u0096䚼1ۼ칥0꣯儏=鋷牋ⅈꍞ龐", + -7283717290969427831, + true, + [ + 4911644391234541055, + { + "I鈒첽P릜朸W徨觘-Hᎄ퐟⓺>8kr1{겵䍃〛ᬡ̨O귑o䝕'쿡鉕p5": "fv粖RN瞖蛐a?q꤄\u001d⸥}'ꣴ犿ꦼ?뤋?鵆쥴덋䡫s矷̄?ඣ/;괱絢oWfV<\/\u202cC,㖦0䑾%n賹g&T;|lj_欂N4w", + "짨䠗;䌕u i+r๏0": [{"9䥁\\఩8\"馇z䇔<\/ႡY3e狚쐡\"ุ6ﰆZ遖c\"Ll:ꮾ疣<\/᭙O◌납୕湞9⡳Und㫜\u0018^4pj1;䧐儂䗷ୗ>@e톬": { + "a⑂F鋻Q螰'<퇽Q贝瀧{ᘪ,cP&~䮃Z?gI彃": [ + -1.69158726118025933E18, + [ + "궂z簽㔛㮨瘥⤜䛖Gℤ逆Y⪾j08Sn昞ꘔ캻禀鴚P謦b{ꓮmN靐Mᥙ5\"睏2냑I\u0011.L&=?6ᄠ뻷X鸌t刑\"#z)o꫚n쳟줋", + null, + 7517598198523963704, + "ኑQp襟`uᩄr方]*F48ꔵn俺ሙ9뇒", + null, + null, + 6645782462773449868, + 1219168146640438184, + null, + { + ")ယ넌竀Sd䰾zq⫣⏌ʥ\u0010ΐ' |磪&p牢蔑mV蘸૰짬꺵;K": [ + -7.539062290108008E-20, + [ + true, + false, + null, + true, + 6574577753576444630, + [[ + 1.2760162530699766E-19, + [ + null, + [ + "顊\\憎zXB,", + [{ + "㇆{CVC9-MN㜋ઘR눽#{h@ퟨ!鼚׼XOvXS\u0017ᝣ=cS+梽៲綆16s덽휐y屬?ᇳG2ᴭ\u00054쫖y룇nKcW̭炦s/鰘ᬽ?J|퓀髣n勌\u0010홠P>j": false, + "箴": [ + false, + "鍞j\"ꮾ*엇칬瘫xṬ⭽쩁䃳\"-⋵?ᦽ댎Ĝ": true, + "Pg帯佃籛n㔠⭹࠳뷏≻࿟3㞱!-쒾!}쭪䃕!籿n涻J5ਲ਼yvy;Rኂ%ᔡጀ裃;M⣼)쵂쑈": 1.80447711803435366E18, + "ꈑC⡂ᑆ㤉壂뎃Xub<\/쀆༈憓ق쨐ק\\": [ + 7706977185172797197, + {"": {"K╥踮砆NWࡆFy韣7ä밥{|紒︧䃀榫rᩛꦡTSy잺iH8}ퟴ,M?Ʂ勺ᴹ@T@~꾂=I㙕뾰_涀쑜嫴曣8IY?ҿo줫fऒ}\\S\"ᦨ뵼#nDX": { + "♘k6?଱癫d68?㽚乳䬳-V顷\u0005蝕?\u0018䞊V{邾zじl]雏k臤~ൖH뒐iꢥ]g?.G碄懺䔛pR$䅒X觨l봜A刊8R梒',}u邩퉕?;91Ea䈈믁G⊶芔h袪&廣㺄j;㡏綽\u001bN頸쳘橆": -2272208444812560733, + "拑Wﵚj鵼駳Oࣿ)#㾅顂N傓纝y僱栜'Bꐍ-!KF*ꭇK¦?䈴^:啤wG逭w᧯": "xᣱmYe1ۏ@霄F$ě꧘푫O䤕퀐Pq52憬ꀜ兴㑗ᡚ?L鷝ퟐ뭐zJꑙ}╆ᅨJB]\"袌㺲u8䯆f", + "꿽၅㔂긱Ǧ?SI": -1669030251960539193, + "쇝ɨ`!葎>瞺瘡驷錶❤ﻮ酜=": -6961311505642101651, + "?f7♄꫄Jᡔ훮e읇퍾፣䭴KhखT;Qty}O\\|뫁IῒNe(5惁ꥶㆷY9ﮡ\\ oy⭖-䆩婁m#x봉>Y鈕E疣s驇↙ᙰm<": {"퉻:dꂁ&efᅫ쫢[\"돈늖꺙|Ô剐1͖-K:ʚ᭕/;쏖㷛]I痐职4gZ4⍜kเꛘZ⥺\\Bʫᇩ鄨魢弞&幟ᓮ2̊盜", + -9006004849098116748, + -3118404930403695681, + { + "_彃Y艘-\"Xx㤩㳷瑃?%2䐡鵛o귵옔夘v*탋职&㳈챗|O钧": [ + false, + "daꧺdᗹ羞쯧H㍤鄳頳<型孒ン냆㹀f4㹰\u000f|C*ሟ鰠(O<ꨭ峹ipຠ*y೧4VQ蔔hV淬{?ᵌEfrI_", + "j;ꗣ밷邍副]ᗓ", + -4299029053086432759, + -5610837526958786727, + [ + null, + [ + -1.3958390678662759E-19, + { + "lh좈T_믝Y\"伨\u001cꔌG爔겕ꫳ晚踍⿻읐T䯎]~e#฽燇\"5hٔ嶰`泯r;ᗜ쮪Q):/t筑,榄&5懶뎫狝(": [{ + "2ፁⓛ]r3C攟וּ9賵s⛔6'ஂ|\"ⵈ鶆䐹禝3\"痰ࢤ霏䵩옆䌀?栕r7O簂Isd?K᫜`^讶}z8?z얰T:X倫⨎ꑹ": -6731128077618251511, + "|︦僰~m漿햭\\Y1'Vvخ굇ቍ챢c趖": [null] + }], + "虌魿閆5⛔煊뎰㞤ᗴꥰF䮥蘦䂪樳-K᝷-(^\u20dd_": 2.11318679791770592E17 + } + ] + ] + ]}, + "묗E䀳㧯᳀逞GMc\b墹㓄끖Ơ&U??펌鑍 媋k))ᄊ": null, + "묥7콽벼諌J_DɯﮪM殴䣏,煚ྼ`Y:씧<\/⩫%yf䦀!1Ჶk춎Q米W∠WC跉鬽*ᛱi㴕L꘻ꀏ쓪\"_g鿄'#t⽙?,Wg㥖|D鑆e⥏쪸僬h鯔咼ඡ;4TK聎졠嫞" + } + ] + ] + } + ] + ] + ]}} + } + ]} + }, + "뿋뀾淣截䔲踀&XJ펖꙯^Xb訅ꫥgᬐ>棟S\"혧騾밫겁7-": "擹8C憎W\"쵮yR뢩浗絆䠣簿9䏈引Wcy䤶孖ꯥ;퐌]輩䍐3@{叝 뽸0ᡈ쵡Ⲇ\u001dL匁꧐2F~ݕ㪂@W^靽L襒ᦘ~沦zZ棸!꒲栬R" + } + ] + ], + "Z:덃൛5Iz찇䅄駠㭧蓡K1": "e8᧤좱U%?ⵇ䯿鿝\u0013縮R∱骒EO\u000fg?幤@֗퉙vU`", + "䐃쪈埽້=Ij,쭗쓇చ": false + }]}} + ] + } + ]} + } + ] + ] + ], + "咰긖VM]᝼6䓑쇎琺etDҌ?㞏ꩄ퇫밉gj8蠃\"⩐5䛹1ࣚ㵪": "ക蹊?⎲⧘⾚̀I#\"䈈⦞돷`wo窭戕෱휾䃼)앷嵃꾞稧,Ⴆ윧9S?೗EMk3Მ3+e{⹔Te驨7䵒?타Ulg悳o43" + } + ], + "zQᤚ纂땺6#ٽ﹧v￿#ࠫ휊冟蹧텈ꃊʆ?&a䥯De潝|쿓pt瓞㭻啹^盚2Ꝋf醪,얏T窧\\Di䕎谄nn父ꋊE": -2914269627845628872, + "䉩跐|㨻ᷢ㝉B{蓧瞸`I!℄욃힕#ೲᙾ竛ᔺCjk췒늕貭词\u0017署?W딚%(pꍁ⤼띳^=on뺲l䆼bzrﳨ[&j狸䠠=ᜑꦦ\u2061յnj=牲攑)M\\龏": false, + "뎕y絬᫡⥮Ϙᯑ㌔/NF*˓.,QEzvK!Iwz?|쥾\"ꩻL꼗Bꔧ賴緜s뉣隤茛>ロ?(?^`>冺飒=噸泥⺭Ᲊ婓鎔븜z^坷裮êⓅ໗jM7ﶕ找\\O": 1.376745434746303E-19 + }, + "䐛r滖w㏤,|Nዜ": false + } + ]], + "@꿙?薕尬 gd晆(띄5躕ﻫS蔺4)떒錸瓍?~": 1665108992286702624, + "w믍nᏠ=`঺ᅥC>'從됐槷䤝眷螄㎻揰扰XᅧC贽uჍ낟jKD03T!lDV쀉Ӊy뢖,袛!终캨G?鉮Q)⑗1쾅庅O4ꁉH7?d\u0010蠈줘월ސ粯Q!낇껉6텝|{": null, + "~˷jg쿤촖쉯y": -5.5527605669177098E18, + "펅Wᶺzꐆと푭e?4j仪열[D<鈑皶婆䵽ehS?袪;HꍨM뗎ば[(嗏M3q퍟g4y╸鰧茀[Bi盤~﫝唎鋆彺⦊q?B4쉓癚O洙킋툈䶯_?ퟲ": null + } + ] + ]] + ]], + "꟱Ԕ㍤7曁聯ಃ錐V䷰?v㪃૦~K\"$%请|ꇹn\"k䫛㏨鲨\u2023䄢\u0004[︊VJ?䶟ាꮈ䗱=깘U빩": -4863152493797013264 + } + ]}]} + ] + }}} + ], + "쏷쐲۹퉃~aE唙a챑,9㮹gLHd'䔏|킗㍞䎥&KZYT맵7䥺Nⱳ同莞鿧w\\༌疣n/+ꎥU\"封랾○ퟙAJᭌ?9䛝$?驔9讐짘魡T֯c藳`虉C읇쐦T" + } + ], + "谶개gTR￐>ၵ͚dt晑䉇陏滺}9㉸P漄": -3350307268584339381 + }] + ] + ] + ]] + ] + ], + "0y꟭馋X뱔瑇:䌚￐廿jg-懲鸭䷭垤㒬茭u賚찶ಽ+\\mT땱\u20821殑㐄J쩩䭛ꬿNS潔*d\\X,壠뒦e殟%LxG9:摸": 3737064585881894882, + "풵O^-⧧ⅶvѪ8廸鉵㈉ר↝Q㿴뺟EႳvNM:磇>w/៻唎뷭୥!냹D䯙i뵱貁C#⼉NH6`柴ʗ#\\!2䂗Ⱨf?諳.P덈-返I꘶6?8ꐘ": -8934657287877777844, + "溎-蘍寃i诖ര\"汵\"\ftl,?d⼡쾪⺋h匱[,෩I8MҧF{k瓿PA'橸ꩯ綷퉲翓": null + } + ] + ], + "ោ係؁<元": 1.7926963090826924E-18 + }}] + } + ] + ]]}] + }] + ] + ] + ] + ], + "ጩV<\"ڸsOᤘ": 2.0527167903723048E-19 + }] + ]} + ] + ]], + "∳㙰3젴p᧗䱙?`yZA8Ez0,^ᙛ4_0븢\u001ft:~䎼s.bb룦明yNP8弆C偯;⪾짍'蕴뮛": -6976654157771105701, + "큵ꦀ\\㇑:nv+뒤燻䀪ﴣ﷍9ᚈ኷K㚊誦撪䚛,ꮪxሲ쳊\u0005HSf?asg昱dqꬌVꙇ㼺'k*'㈈": -5.937042203633044E-20 + } + ] + }], + "?}\u20e0],s嶳菋@#2u쒴sQS䩗=ꥮ;烌,|ꘔ䘆": "ᅩ영N璠kZ먕眻?2ቲ芋眑D륟渂⸑ﴃIRE]啗`K'" + }}, + "쨀jmV賂ﰊ姐䂦玞㬙ᏪM᪟Վ씜~`uOn*ॠ8\u000ef6??\\@/?9見d筜ﳋB|S䝬葫㽁o": true + }, + "즛ꄤ酳艚␂㺘봿㎨iG৕ࡿ?1\"䘓您\u001fSኝ⺿溏zៀ뻤B\u0019?윐a䳵᭱䉺膷d:<\/": 3935553551038864272 + } + ] + ]} + ]] + ]] + ]} + } + ] + } + ]]}}, + "᥺3h↛!ꋰy\"攜(ெl䪕oUkc1A㘞ᡲ촾ᣫ<\/䒌E㛝潨i{v?W౾H\\RჅpz蝬R脾;v:碽✘↯삞鷱o㸧瑠jcmK7㶧뾥찲n": true, + "ⶸ?x䊺⬝-䰅≁!e쩆2ꎿ准G踌XXᩯ1߁}0?.헀Z馟;稄\baDꟹ{-寪⚈ꉷ鮸_L7ƽᾚ<\u001bጨA䧆송뇵⨔\\礍뗔d设룱㶉cq{HyぱR㥽吢ſtp": -7985372423148569301, + "緫#콮IB6<\/=5Eh礹\t8럭@饹韠r㰛斣$甝LV췐a갵'请o0g:^": "䔨(.", + "띳℡圤pン௄ĝ倧訜B쁟G䙔\"Sb⓮;$$▏S1J뢙SF|赡g*\"Vu䲌y": "䪈&틐),\\kT鬜1풥;뷴'Zေ䩹@J鞽NぼM?坥eWb6榀ƩZڮ淽⺞삳煳xჿ絯8eⶍ羷V}ჿ쎱䄫R뱃9Z>'\u20f1ⓕ䏜齮" + } + ] + ]]] + }} + } + ] + ]}, + "펮b.h粔폯2npX詫g錰鷇㇒<쐙S値bBi@?镬矉`剔}c2壧ଭfhY깨R()痩⺃a\\⍔?M&ﯟ<劜꺄멊ᄟA\"_=": null + }, + "~潹Rqn榢㆓aR鬨侅?䜑亡V_翅㭔(䓷w劸ၳDp䀅<\/ﰎ鶊m䵱팱긽ꆘ긓准D3掱;o:_ќ)껚콥8곤d矦8nP倥ꃸI": null, + "뾎/Q㣩㫸벯➡㠦◕挮a鶧⋓偼\u00001뱓fm覞n?㛅\"": 2.8515592202045408E17 + }], + ",": -5426918750465854828, + "2櫫@0柡g䢻/gꆑ6演&D稒肩Y?艘/놘p{f투`飷ᒉ챻돎<늛䘍ﴡ줰쫄": false, + "8(鸑嵀⵹ퟡ<9㣎Tߗ┘d슒ل蘯&㠦뮮eࠍk砝g 엻": false, + "d-\u208b?0ﳮ嵙'(J`蔿d^踅⤔榥\\J⵲v7": 6.8002426206715341E17, + "ཎ耰큓ꐕ㱷\u0013y=詽I\"盈xm{0쾽倻䉚ષso#鰑/8㸴짯%ꀄ떸b츟*\\鲷礬ZQ兩?np㋄椂榨kc᡹醅3": false, + "싊j20": false + }]] + ]], + "俛\u0017n緽Tu뫉蜍鼟烬.ꭠIⰓ\"Ἀ᜾uC쎆J@古%ꛍm뻨ᾀ画蛐휃T:錖㑸ዚ9죡$": true + } + ] + ], + "㍵⇘ꦖ辈s}㱮慀밒s`\"㞟j:`i픻Z섫^諎0Ok{켿歁෣胰a2﨤[탳뚬쎼嫭뉮m": 409440660915023105, + "w墄#*ᢄ峠밮jLa`ㆪ꺊漓Lで끎!Agk'ꁛ뢃㯐岬D#㒦": false, + "ଦPGI䕺L몥罭ꃑ궩﮶#⮈ᢓӢ䚬p7웼臧%~S菠␌힀6&t䳙y㪘냏\\*;鉏ᅧ鿵'嗕pa\"oL쇿꬈Cg": "㶽1灸D⟸䴅ᆤ뉎﷛渤csx 䝔цꬃ锚捬?ຽ+x~꘩uI࡞\u0007栲5呚ẓem?袝\")=㥴䨃pac!/揎Y", + "ᷱo\\||뎂몷r篙|#X䦜I#딌媸픕叞RD斳X4t⯩夬=[뭲r=绥jh뷱츝⪘%]⚋܈㖴スH텹m(WO曝劉0~K3c柢Ր㏉着逳~": false, + "煽_qb[첑\\륌wE❽ZtCNﭝ+餌ᕜOꛭ": "{ﳾ쉌&s惧ᭁⵆ3䢫;䨞팑꒪흘褀࢖Q䠿V5뭀䎂澻%받u5텸oA⮥U㎦;B䳌wz䕙$ឿ\\௅婺돵⪾퐆\\`Kyौꋟ._\u0006L챯l뇠Hi䧈偒5", + "艊佁ࣃ롇䱠爬!*;⨣捎慓q靓|儑ᨋL+迥=6㒺딉6弄3辅J-㕎뛄듘SG㆛(\noAzQꝱ䰩X*ぢO퀌%펠낌mo틮a^<\/F&_눊ᾉ㨦ы4\"8H": 2974648459619059400, + "鬙@뎣䫳ၮ끡?){y?5K;TA*k溱䫜J汃ꂯ싔썍\u001dA}룖(<\/^,": false, + "몏@QꋦFꊩᒐ뎶lXl垨4^郣|ꮇ;䝴ᝓ}쵲z珖": null + } + ]]]], + ":_=닧弗D䙋暨鏛. 㱻붘䂍J儒&ZK/녩䪜r囁⽯D喠죥7⹌䪥c\u001a\u2076￞妈朹oLk菮F౟覛쐧㮏7T;}蛙2{9\"崓bB<\/⡷룀;즮鿹)丒툃୤뷠5W⊢嶜(fb뭳갣": "E{响1WM" + }}, + "䘨tjJ驳豨?y輊M*᳑梵瞻઻ofQG瑮e": 2.222802939724948E-19, + "䮴=❑➶T෋w䞜\"垦ꃼUt\u001dx;B$뵣䙶E↌艣ᡥ!᧟;䱀[䔯k쬃`੍8饙른熏'2_'袻tGf蒭J땟as꯳╖&啒zWࡇᒫYSᏬ\u0014ℑ첥鈤|cG~Pᓮ\">\"": "ႆl\f7V儊㦬nHꄬꨧC{쐢~C⮃⛓嶦vꄎ1w鰠嘩뿠魄&\"_qMⵖ釔녮ꝇ 㝚{糍J哋 cv?-jkﻯྌ鹑L舟r", + "龧葆yB✱H盋夔ﶉ?n*0(": "ꧣኆ㢓氥qZZ酒ຜ)鮢樛)X䣆gTSґG텞k.J圬疝롫쯭z L:\\ྤ@w炋塜쿖ᾳy뢀䶃뱝N䥨㚔勇겁#p", + "도畎Q娡\"@S/뼋:䵏!P衅촚fVHQs✜ᐫi㻑殡B䜇%믚k*U#濨낄~": "ꍟዕ쳸ꍈ敋&l妏\u0005憡멗瘌uPgᅪm<\/To쯬锩h뒓k" + } + ] + }], + "墥홞r绚<\/⸹ⰃB}<躅\\Y;๑@䔸>韫䜲뱀X뗩鿥쩗SI%ﴞ㳕䛇?<\/\u00018x\\&侂9鋙a[LR㋭W胕)⡿8㞙0JF,}?허d1cDMᐃ␛鄝ⱕ%X)!XQ": "ⳍꗳ=橇a;3t⦾꼑仈ူaᚯ⯋ꕃAs鴷N⍕_䎃ꙎAz\u0016䯷\\<࿫>8q{}キ?ᣰ}'0ᴕ펓B┦lF#趤厃T?㕊#撹圂䆲" + }, + "܋닐龫論c웑": false, + "ㇿ/q\"6-co髨휝C큦#\u001b4~?3䐹E삇<<": 7.600917488140322E-20, + "䁝E6?㣖ꃁ间t祗*鑠{ḣV(浾h逇큞=W?ૉ?nꇽ8ꅉຉj으쮺@Ꚅ㰤u]Oyr": "v≁᫸_*όAඤԆl)ۓᦇQ}폠z༏q滚", + "ソ᥊/넺I": true + }]] + ] + ] + ] + ]] + }, + "䭑Ik攑\u0002QV烄:芩.麑㟴㘨≕": true, + "坄꿕C쇻풉~崍%碼\\8\"䬦꣙": null, + "欌L圬䅘Y8c(♺2?ON}o椳s宥2䉀eJ%闹r冁O^K諭%凞⺉⡻,掜?$ꥉ?略焕찳㯊艼誜4?\"﯎<゛XፈINT:詓 +": -1.0750456770694562E-19, + "獒àc뜭싼ﺳ뎤K`]p隨LtE": null, + "甙8䵊神EIꩤ鐯ᢀ,ﵮU䝑u疒ử驺䚿≚ഋ梶秓F`覤譐#짾蔀묊4<媍쬦靪_Yzgcࡶ4k紥`kc[Lﮗ簐*I瀑[⾰L殽鑥_mGȠ<\/|囹灠g桰iri": true, + "챓ꖙꟻ좝菇ou,嗠0\\jK핻뜠qwQ?ഩ㼕3Y彦b\u009bJ榶N棨f?됦鏖綃6鳵M[OE봨u햏.Ꮁ癜蟳뽲ꩌ뻾rM豈R嗀羫 uDꎚ%": null + }, + "V傜2<": 7175127699521359521 + }], + "铫aG切<\/\"ী⊆e<^g࢛)D顝nאַ饼\u008c猪繩嵿ﱚCꡬ㻊g엺A엦\u000f暿_f꿤볝㦕桦`蒦䎔j甬%岝rj 糏": "䚢偎눴Au<4箞7礦Iﱔ坠eȧ䪸u䵁p|逹$嗫쨘ꖾ﷐!胠z寓팢^㨔|u8Nሇe텔ꅦ抷]،鹎㳁#༔繁 ", + "낂乕ꃻ볨ϱ-ꇋ㖍fs⿫)zꜦ/K?솞♞ꑌ宭hJ᤭瑥Fu": false, + "쟰ぜ魛G\u0003u?`㾕ℾ㣭5螠烶這趩ꖢ:@咕ꐶx뒘느m䰨b痃렐0鳊喵熬딃$摉_~7*ⱦ녯1錾GKhJ惎秴6'H妈Tᧅ窹㺒疄矤铟wላ": null, + "쯆q4!3錕㲏ⵆ㇛꘷Z瑩뭆\\◪NH\u001d\\㽰U~㯶<\"쑣낞3ᵤ'峉eꢬ;鬹o꣒木X*長PXᘱu\"䠹n惞": null, + "ᅸ祊\"&ꥴCjࢼ﴿?䡉`U效5殼㮞V昽ꏪ#ﺸ\\&t6x꠹盥꣰a[\u001aꪍSpe鎿蠹": -1.1564713893659811E-19 + } + ]] + ] + ] + ], + "羵䥳H,6ⱎ겾|@t\"#햊1|稃 섭)띜=뻔ꡜ???櫎~*ῡ꫌/繣ﻠq": null + } + ]} + ]}, + "츤": false + }}, + "s": 3.7339341963399598E18 + } + ], + "N,I?1+㢓|ࣱ嶃쩥V2\u0012(4EE虪朶$|w颇v步": "~읢~_,Mzr㐫YB溓E淚\"ⅹ䈔ᏺ抙 b,nt5V㐒J檶ꏨ⻔?", + "Q껑ꡡ}$넎qH煔惍/ez^!ẳF댙䝌馻剁8": "梲;yt钰$i冄}AL%a j뜐奷걳뚾d꿽*ሬuDY3?뮟鼯뮟w㍪틱V", + "o{Q/K O胟㍏zUdꀐm&⨺J舕⾏魸訟㌥[T籨櫉唐킝 aṭ뱫촙莛>碶覆⧬짙쭰ׯdAiH໥벤퐥_恸[ 0e:죃TC弼荎뵁DA:w唵ꣁ": null, + "὏樎䵮軧|?౗aWH쩃1 ꅭsu": null + } + ] + }, + "勂\\&m鰈J釮=Ⲽ鳋+䂡郑": null, + "殣b綊倶5㥗惢⳷萢ᑀ䬄镧M^ﱴ3⣢翣n櫻1㨵}ኯ뗙顖Z.Q➷ꮨ뗇\u0004": "ꔙ䁼>n^[GीA䨟AM琢ᒊS쨲w?d㶣젊嘶纝麓+愣a%気ྞSc됓ᔘ:8bM7Xd8㶑臌]Ꙥ0ꐭ쒙䫣挵C薽Dfⵃ떼᷸", + "?紡.셪_෨j\u0013Ox┠$Xᶨ-ᅇo薹-}軫;y毝㪜K㣁?.EV쮱4둽⛻䤜'2盡\u001f60(|e쐰㼎ᦀ㒧-$l@ﻑ坳\u0003䭱响巗WFo5c㧆T턁Y맸♤(": -2.50917882560589088E17 + }} + ], + "侸\\릩.᳠뎠狣살cs项䭩畳H1s瀉븇19?.w骴崖㤊h痠볭㞳㞳䁮Ql怠㦵": "@䟴-=7f", + "鹟1x௢+d ;vi䭴FSDS\u0004hꎹ㚍?⒍⦏ў6u,扩@됷Su)Pag휛TᒗV痩!瞏釀ꖞ蘥&ೞ蘐ꭰꞇᝎ": "ah懱Ժ&\u20f7䵅♎඀䞧鿪굛ౕ湚粎蚵ᯋ幌YOE)५襦㊝Y*^\"R+ඈ咷蝶9ꥂ榨艦멎헦閝돶v좛咊E)K㓷ྭr", + "搆q쮦4綱켙셁.f4<\/g<籽늷?#蚴픘:fF\u00051㹉뀭.ᰖ풎f֦Hv蔎㧤.!䭽=鞽]음H:?\"-4": 8.740133984938656E-20 + }]} + } + ], + "tVKn딩꘥⊾蹓᤹{\u0003lR꼽ᄲQFᅏ傅ﱋ猢⤊ᔁ,E㓒秤nTතv`♛I\u0000]꫔ṞD\"麵c踝杰X&濿또꣹깳౥葂鿎\\aꡨ?": 3900062609292104525 + } + ], + "ਉ샒⊩Lu@S䧰^g": -1.1487677090371648E18, + "⎢k⑊꬗yᏫ7^err糎Dt\u000bJ礯확ㆍ沑サꋽe赔㝢^J\u0004笲㿋idra剰-᪉C錇/Ĝ䂾ညS지?~콮gR敉⬹'䧭": 1901472137232418266, + "灗k䶥:?촽贍쓉꓈㒸g獘[뵎\\胕?\u0014_榙p.j稶,$`糉妋0>Fᡰly㘽$?": "]ꙛO赎&#㠃돱剳\"<◆>0誉齐_|z|裵씪>ᐌ㼍\"Z[琕}O?G뚇諦cs⠜撺5cu痑U圲\u001c?鴴計l춥/╓哼䄗茏ꮅ뫈댽A돌롖뤫V窗讬sHd&\nOi;_u" + } + ], + "Uﺗ\\Y\\梷䄬~\u0002": null, + "k\"Y磓ᗔ휎@U冈<\/w컑)[": false, + "曏J蝷⌻덦\u001f㙳s꥓⍟邫P늮쥄c∬ྡྷ舆렮칤Z趣5콡넛A쳨\\뀙骫(棻.*&輛LiIfi{@EA婳KᬰTXT": -4.3088230431977587E17 + }]} + ] + ], + "곃㲧<\/dఓꂟs其ࡧ&N葶=?c㠤Ჴ'횠숄臼#\u001a~": false + } + ] + ]}] + }] + }} + ], + "2f`⽰E쵟>J笂裭!〛觬囀ۺ쟰#桊l鹛ⲋ|RA_Vx፭gE됓h﵀mfỐ|?juTU档[d⢼⺻p濚7E峿": 5613688852456817133 + }, + "濘끶g忮7㏵殬W팕Q曁 뫰)惃廊5%-蹚zYZ樭ﴷQ锘쯤崫gg": true, + "絥ᇑ⦏쒓븣爚H.㗊߄o蘵貆ꂚ(쎔O᥉ﮓ]姨Wꁓ!RMA|o퉢THx轮7M껁U즨'i뾘舯o": "跥f꜃?" + }} + ], + "鷰鹮K-9k;ﰰ?_ݦѷ-ꅣ䩨Zꥱ\"mꠟ屎/콑Y╘2&鸞脇㏢ꀇ࠺ⰼ拾喭틮L꽩bt俸墶 [l/웄\"꾦\u20d3iও-&+\u000fQ+໱뵞": -1.296494662286671E-19 + }, + "HX੹/⨇୕붷Uﮘ旧\\쾜͔3l鄈磣糂̖䟎Eᐳw橖b῀_딕hu葰窳闹вU颵|染H죶.fP䗮:j䫢\\b뎖i燕ꜚG⮠W-≚뉗l趕": "ଊ칭Oa᡺$IV㷧L\u0019脴셀붿餲햪$迳向쐯켂PqfT\" ?I屉鴼쿕@硙z^鏕㊵M}㚛T젣쓌-W⩐-g%⺵<뮱~빅╴瑿浂脬\u0005왦燲4Ⴭb|D堧 <\/oEQh", + "䘶#㥘੐캔f巋ἡAJ䢚쭈ࣨ뫒*mᇊK,ࣺAꑱ\u000bR<\/A\"1a6鵌㯀bh곿w(\"$ꘁ*rಐ趣.d࿩k/抶면䒎9W⊃9": "漩b挋Sw藎\u0000", + "畀e㨼mK꙼HglKb,\"'䤜": null + }]}] + ] + ] + }] + ]} + ] + ]} + ], + "歙>駿ꣂ숰Q`J΋方樛(d鱾뼣(뫖턭\u20f9lচ9歌8o]8윶l얶?镖G摄탗6폋폵+g:䱫홊<멀뀿/س|ꭺs걐跶稚W々c㫣⎖": "㣮蔊깚Cꓔ舊|XRf遻㆚︆'쾉췝\\&言", + "殭\"cށɨꝙ䞘:嬮e潽Y펪㳅/\"O@ࠗ겴]췖YǞ(t>R\"N?梳LD恭=n氯T豰2R諸#N}*灧4}㶊G䍣b얚": null, + "襞<\/啧 B|싞W瓇)6簭鼡艆lN쩝`|펭佡\\間邝[z릶&쭟愱ꅅ\\T᰽1鯯偐栈4̸s윜R7⒝/똽?치X": "⏊躖Cﱰ2Qẫ脐&இ?%냝悊", + ",鰧偵셣싹xᎹ힨᯳EṬH㹖9": -4604276727380542356 + } + } + ]]]], + "웺㚑xs}q䭵䪠馯8?LB犯zK'os䚛HZ\"L?셎s^㿧㴘Cv2": null + }] + ] + ] + ], + "Kd2Kv+|z": 7367845130646124107, + "ᦂⶨ?ᝢ 祂些ഷ牢㋇操\"腭䙾㖪\\(y4cE뽺ㆷ쫺ᔖ%zfۻ$ў1柦,㶢9r漢": -3.133230960444846E-20, + "琘M焀q%㢟f鸯O⣏蓑맕鯊$O噷|)z褫^㢦⠮ꚯ꫞`毕1qꢚ{ĭ䎀বώT\"뱘3G൴?^^of": null + } + ], + "a8V᯺?:ﺃ/8ꉿBq|9啓댚;*i2": null, + "cpT瀇H珰Ừpೃi鎪Rr␣숬-鹸ҩ䠚z脚цGoN8入y%趌I┽2ឪЀiJNcN)槣/▟6S숆牟\"箑X僛G殱娇葱T%杻:J諹昰qV쨰": 8331037591040855245 + }], + "G5ᩜ䄗巢껳": true + } + }, + "Ồ巢ゕ@_譙A`碫鄐㡥砄㠓(^K": "?܃B혢▦@犑ὺD~T⧁|醁;o=J牌9냚⢽㨘{4觍蚔9#$∺\u0016p囅\\3Xk阖⪚\"UzA穕롬✎➁㭒춺C㣌ဉ\"2瓑员ᅽꝶ뫍}꽚ꞇ鶂舟彺]ꍽJC蝧銉", + "␆Ě膝\"b-퉐ACR言J謈53~V튥x䜢?ꃽɄY뮩ꚜ": "K/↾e萃}]Bs⾿q룅鷦-膋?m+死^魊镲6", + "粡霦c枋AHퟁo礼Ke?qWcA趸㡔ꂏ?\u000e춂8iতᦜ婪\u0015㢼nﵿꍻ!ᐴ関\u001d5j㨻gfῩUK5Ju丝tかTI'?㓏t>⼟o a>i}ᰗ;뤕ܝ": false, + "ꄮ匴껢ꂰ涽+䜨B蛹H䛓-k蕞fu7kL谖,'涃V~챳逋穞cT\"vQ쓕ObaCRQ㓡Ⲯ?轭⫦輢墳?vA餽=h䮇킵n폲퉅喙?\"'1疬V嬗Qd灗'Lự": "6v!s믁㭟㣯獃!磸餠ቂh0C뿯봗F鷭gꖶ~コkK<ᦈTt\\跓w㭣횋钘ᆹ듡䑚W䟾X'ꅔ4FL勉Vܴ邨y)2'〚쭉⽵-鞣E,Q.?块", + "?(˧쩯@崟吋歄K": null + }, + "Gc럃녧>?2DYI鴿\\륨)澔0ᔬlx'觔7젘⤡縷螩%Sv׫묈/]↱&S h\u0006歋ᑛxi̘}ひY蔯_醨鯘煑橾8?䵎쨋z儬ꁏ*@츾:": null + } + } + } + ] + ] + ]} + }, + "HO츧G": 3.694949578823609E17, + "QC\u0012(翻曇Tf㷟bGBJ옉53\\嚇ᛎD/\u001b夾၉4\"핀@祎)쫆yD\"i먎Vn㿿V1W᨝䶀": -6150931500380982286, + "Z㓮P翸鍱鉼K䋞꘺튿⭁Y": -7704503411315138850, + "]모开ꬖP븣c霤<[3aΠ\"黁䖖䰑뮋ꤦ秽∼㑷冹T+YUt\"싳F↭䖏&鋌": -2.7231911483181824E18, + "tꎖ": -4.9517948741799555E-19, + "䋘즊.⬅IꬃۣQ챢ꄑ黐|f?C⾺|兕읯sC鬸섾整腨솷V": "旆柩l쪦sᖸMy㦅울썉瘗㎜檵9ꍂ駓ૉᚿ/u3씅徐拉[Z䞸ࡗ1ꆱ&Q풘?ǂ8\u0011BCDY2볨;鸏": null, + "幫 n煥s쁇펇 왊-$C\"衝:\u0014㣯舼.3뙗Yl⋇\"K迎멎[꽵s}9鉳UK8쐥\"掄㹖h㙈!얄સ?Ꜳ봺R伕UTD媚I䜘W鏨蔮": -4.150842714188901E-17, + "ﺯ^㄄\b죵@fྉkf颡팋Ꞧ{/Pm0V둳⻿/落韒ꊔᚬ@5螺G\\咸a谆⊪ቧ慷绖?财(鷇u錝F=r၍橢ឳn:^iᴵtD볠覅N赴": null + }] + }] + } + ] + ]} + ]}, + "謯?w厓奰T李헗聝ឍ貖o⪇弒L!캶$ᆅ": -4299324168507841322, + "뺊奉_垐浸延몏孄Z舰2i$q붿좾껇d▵餏\"v暜Ҭ섁m￴g>": -1.60911932510533427E18 + } + ] + } + ] + ]], + "퉝꺔㠦楶Pꅱ": 7517896876489142899, + "": false + } + ]}, + "是u&I狻餼|谖j\"7c됮sסּ-踳鉷`䣷쉄_A艣鳞凃*m⯾☦椿q㎭N溔铉tlㆈ^": 1.93547720203604352E18, + "kⲨ\\%vr#\u000bⒺY\\t<\/3﬌R訤='﹠8蝤Ꞵ렴曔r": false + } + ]}, + "阨{c?C\u001d~K?鎌Ԭ8烫#뙣P초遗t㭱E­돒䆺}甗[R*1!\\~h㕅᰺@<9JꏏષI䳖栭6綘걹ᅩM\"▯是∔v鬽顭⋊譬": "운ﶁK敂(欖C취پ℄爦賾" + } + }} + }], + "鷨赼鸙+\\䭣t圙ڹx᜾ČN<\/踘\"S_맶a鷺漇T彚⎲i㈥LT-xA캔$\u001cUH=a0츺l릦": "溣㣂0濕=鉵氬駘>Pꌢpb솇쬤h힊줎獪㪬CrQ矠a&脍꼬爼M茴/΅\u0017弝轼y#Ꞡc6둴=?R崏뷠麖w?" + }, + "閕ᘜ]CT)䵞l9z'xZF{:ؐI/躅匽졁:䟇AGF૸\u001cퟗ9)駬慟ꡒꆒRS״툋A<>\u0010\"ꂔ炃7g덚E৏bꅰ輤]o㱏_뷕ܘ暂\"u": "芢+U^+㢩^鱆8*1鈶鮀\u0002뺰9⬳ꪮlL䃣괟,G8\u20a8DF㉪錖0ㄤ瓶8Nଷd?眡GLc陓\\_죌V쁰ल二?c띦捱 \u0019JC\u0011b⤉zẒT볕\"绣蘨뚋cꡉkI\u001e鳴", + "ꃣI'{6u^㡃#཰Kq4逹y൒䧠䵮!㱙/n??{L풓ZET㙠퍿X2᩟綳跠葿㚙w཮x캽扳B唕S|尾}촕%N?o䪨": null, + "ⰴFjෟ셈[\u0018辷px?椯\\1<ﲻ栘ᣁ봢憠뉴p": -5263694954586507640 + } + ] + ]] + ]} + ]}] + ] + ], + "?#癘82禩鋆ꊝty?&": -1.9419029518535086E-19 + } + ] + ] + ]} + ] + ] + ], + "훊榲.|῕戄&.㚏Zꛦ2\"䢥ሆ⤢fV_摕婔?≍Fji冀탆꜕i㏬_ẑKᅢ꫄蔻XWc|饡Siẘ^㲦?羡2ぴ1縁ᙅ?쐉Ou": false + }]] + ]}}}, + "慂뗄卓蓔ᐓ匐嚖/颹蘯/翻ㆼL?뇊,텵<\\獷ごCボ": null + }, + "p溉ᑟi짣z:䒤棇r^٫%G9缑r砌롧.물农g?0׼ሩ4ƸO㣥㯄쩞ጩ": null, + "껎繥YxK\"F젷쨹뤤1wq轫o?鱑뜀瘊?뎃h灑\\ꛣ}K峐^ኖ⤐林ꉓhy": null + } + ], + "᱀n肓ㄛ\"堻2>m殮'1橌%Ꞵ군=Ӳ鯨9耛<\/n據0u彘8㬇៩f᏿诙]嚊": "䋯쪦S럶匏ㅛ#)O`ሀX_鐪渲⛀㨻宅闩➈ꢙஶDR⪍" + }, + "tA썓龇 ⋥bj왎录r땽✒롰;羋^\\?툳*┎?썀ma䵳넅U䳆૘〹䆀LQ0\b疀U~u$M}(鵸g⳾i抦뛹?䤈땚검.鹆?ꩡtⶥGĒ;!ቹHS峻B츪켏f5≺": 2366175040075384032, + "전pJjleb]ួ": -7.5418493141528422E18, + "n.鎖ጲ\n?,$䪘": true + }, + "欈Ar㉣螵᪚茩?O)": null + }, + "쫸M#x}D秱欐K=侫们丐.KꕾxẠ\u001e㿯䣛F܍캗qq8꟞ṢFD훎⵳簕꭛^鳜\u205c٫~⑟~冫ऊ2쫰<\/戲윱o<\"": true + }, + "㷝聥/T뱂\u0010锕|内䞇x侁≦㭖:M?iM᣿IJe煜dG࣯尃⚩gPt*辂.{磼럾䝪@a\\袛?}ᓺB珼": true + } + } + ]]}]}}, + "tn\"6ꫤ샾䄄;銞^%VBPwu묪`Y僑N.↺Ws?3C⤻9唩S䠮ᐴm;sᇷ냞඘B/;툥B?lB∤)G+O9m裢0kC햪䪤": -4.5941249382502277E18, + "ᚔt'\\愫?鵀@\\びꂕP큠<<]煹G-b!S?\nꖽ鼫,ݛ&頺y踦?E揆릱H}햧캡b@手.p탻>췽㣬ꒅ`qe佭P>ᓂ&?u}毚ᜉ蟶頳졪ᎏzl2wO": -2.53561440423275936E17 + }]} + } + ] + ]], + "潈촒⿂叡": 5495738871964062986 + } + ]] + } + ] + ]} + ]] + ]] + ]} + ] + ]}, + "ႁq킍蓅R`謈蟐ᦏ儂槐僻ﹶ9婌櫞釈~\"%匹躾ɢ뤥>࢟瀴愅?殕节/냔O✬H鲽엢?ᮈੁ⋧d␽㫐zCe*": 2.15062231586689536E17, + "㶵Ui曚珰鋪ᾼ臧P{䍏䷪쨑̟A뼿T渠誈䏚D1!잶<\/㡍7?)2l≣穷᛾稝{:;㡹nemיּ訊`G": null, + "䀕\"飕辭p圁f#뫆䶷뛮;⛴ᩍ3灚덏ᰝ쎓⦷詵%᜖Մfs⇫(\u001e~P|ﭗCⲾផv湟W첋(텪બT<บSꏉ੗⋲X婵i ӵ⇮?L䬇|ꈏ?졸": 1.548341247351782E-19 + } + ] + }, + "t;:N\u0015q鐦Rt缆{ꮐC?஛㷱敪\\+鲊㉫㓪몗릙竏(氵kYS": "XᰂT?൮ô", + "碕飦幑|+ 㚦鏶`镥ꁩ B<\/加륙": -4314053432419755959, + "秌孳(p!G?V傫%8ሽ8w;5鲗㦙LI檸\u2098": "zG N볞䆭鎍흘\\ONK3횙<\/樚立圌Q튅k쩎Ff쁋aׂJK銆ઘ즐狩6༥✙䩜篥CzP(聻駇HHퟲ讃%,ά{렍p而刲vy䦅ክ^톺M楒鍢㹳]Mdg2>䤉洞", + "踛M젧>忔芿㌜Zk": 2215369545966507819, + "씐A`$槭頰퍻^U覒\bG毲aᣴU;8!팲f꜇E⸃_卵{嫏羃X쀳C7뗮m(嚼u N܁谟D劯9]#": true, + "ﻩ!뵸-筚P᭛}ἰ履lPh?౮ⶹꆛ穉뎃g萑㑓溢CX뾇G㖬A錟]RKaꄘ]Yo+@䘁's섎襠$^홰}F": null + }, + "粘ꪒ4HXᕘ蹵.$區\r\u001d묁77pPc^y笲Q<\/ꖶ 訍䃍ᨕG?*": 1.73773035935040224E17 + }, + "婅拳?bkU;#D矠❴vVN쩆t㜷A풃갮娪a%鮏絪3dAv룒#tm쑬⌛qYwc4|L8KZ;xU⓭㳔밆拓EZ7襨eD|隰ऌ䧼u9Ԣ+]贴P荿": 2.9628516456987075E18 + }]}}] + ]} + }} + ]}] + ], + "|g翉F*湹̶\u0005⏐1脉̀eI쩓ᖂ㫱0碞l䴨ꑅ㵽7AtἈ턧yq䳥塑:z:遀ᄐX눔擉)`N3昛oQ셖y-ڨ⾶恢ꈵq^<\/": null, + "菹\\랓G^璬x৴뭸ゆUS겧﮷Bꮤ ┉銜᯻0%N7}~f洋坄Xꔼ<\/4妟Vꄟ9:౟곡t킅冩䧉笭裟炂4봋ⱳ叺怊t+怯涗\"0㖈Hq": false, + "졬믟'ﺇফ圪쓬멤m邸QLব䗁愍4jvs翙 ྍ꧀艳H-|": null, + "컮襱⣱뗠 R毪/鹙꾀%헳8&": -5770986448525107020 + } + ], + "B䔚bꐻ뙏姓展槰T-똌鷺tc灿᫽^㓟䏀o3o$꘭趙萬I顩)뇭Ἑ䓝\f@{ᣨ`x3蔛": null + } + ] + ] + }], + "⦖扚vWꃱ꥙㾠壢輓{-⎳鹷贏璿䜑bG倛⋐磎c皇皩7a~ﳫU╣Q࠭ꎉS摅姽OW.홌ೞ.": null, + "蚪eVlH献r}ᮏ믠ﰩꔄ@瑄ⲱ": null, + "퀭$JWoꩢg역쁍䖔㑺h&ୢtXX愰㱇?㾫I_6 OaB瑈q裿": null, + "꽦ﲼLyr纛Zdu珍B絟쬴糔?㕂짹䏵e": "ḱ\u2009cX9멀i䶛簆㳀k" + } + ]]]], + "(_ꏮg່澮?ᩑyM<艷\u001aꪽ\\庼뙭Z맷㰩Vm\\lY筺]3㋲2㌩㄀Eਟ䝵⨄쐨ᔟgङHn鐖⤇놋瓇Q탚單oY\"♆臾jHᶈ征ቄ??uㇰA?#1侓": null + }, + "觓^~ሢ&iI띆g륎ḱ캀.ᓡꀮ胙鈉": 1.0664523593012836E-19, + "y詭Gbᔶऽs댁U:杜⤎ϲ쁗⮼D醄诿q뙰I#즧v蔎xHᵿt᡽[**?崮耖p缫쿃L菝,봬ꤦC쯵#=X1瞻@OZc鱗CQTx": null + } + ] + }}], + "剘紁\u0004\\Xn⊠6,တױ;嵣崇}讃iႽ)d1\\䔓": null + }, + "脨z\"{X,1u찜<'k&@?1}Yn$\u0015Rd輲ーa쮂굄+B$l": true, + "諳>*쭮괐䵟Ґ+<箁}빀䅱⡔檏臒hIH脟ꩪC핝ଗP좕\"0i<\/C褻D۞恗+^5?'ꂱ䚫^7}㡠cq6\\쨪ꔞꥢ?纖䫀氮蒫侲빦敶q{A煲G": -6880961710038544266 + }}] + }, + "5s⨲JvಽῶꭂᄢI.a৊": null, + "?1q꽏쿻ꛋDR%U娝>DgN乭G": -1.2105047302732358E-19 + } + ] + ]}, + "qZz`撋뙹둣j碇쁏\\ꆥ\u0018@藴疰Wz)O{F䶛l᷂绘訥$]뮍夻䢋䩇萿獰樧猵⣭j萶q)$꬚⵷0馢W:Ⱍ!Qoe": -1666634370862219540, + "t": "=wp|~碎Q鬳Ӎ\\l-<\/^ﳊhn퐖}䍔t碵ḛ혷?靻䊗", + "邙쇡㯇%#=,E4勃驆V繚q[Y댻XV㡸[逹ᰏ葢B@u=JS5?bLRn얮㍉⏅ﰳ?a6[&큟!藈": 1.2722786745736667E-19 + }, + "X블땨4{ph鵋ꉯ웸 5p簂䦭s_E徔濧d稝~No穔噕뽲)뉈c5M윅>⚋[岦䲟懷恁?鎐꓆ฬ爋獠䜔s{\u001bm鐚儸煛%bﯿXT>ꗘ@8G": 1157841540507770724, + "媤娪Q杸\u0011SAyᡈ쿯": true, + "灚^ಸ%걁<\/蛯?\"祴坓\\\\'흍": -3.4614808555942579E18, + "釴U:O湛㴑䀣렑縓\ta)(j:숾却䗌gCiB뽬Oyuq輥厁/7)?今hY︺Q": null + } + ] + ]]]}] + ], + "I笔趠Ph!<ཛྷ㸞诘X$畉F\u0005笷菟.Esr릙!W☆䲖뗷莾뒭U\"䀸犜Uo3Gꯌx4r蔇᡹㧪쨢準<䂀%ࡡꟼ瑍8炝Xs0䀝销?fi쥱ꆝલBB": -8571484181158525797, + "L⦁o#J|\"⽩-㱢d㌛8d\\㶤傩儻E[Y熯)r噤὘勇 }": "e(濨쓌K䧚僒㘍蠤Vᛸ\"络QJL2,嬓왍伢㋒䴿考澰@(㏾`kX$끑эE斡,蜍&~y", + "vj.|统圪ᵮPL?2oŶ`밧\"勃+0ue%⿥绬췈체$6:qa렐Q;~晘3㙘鹑": true, + "ශؙ4獄⶿c︋i⚅:ん閝Ⳙ苆籦kw{䙞셕pC췃ꍬ␜꟯ꚓ酄b힝hwk꭭M鬋8B耳쑘WQ\\偙ac'唀x᪌\u2048*h짎#ፇ鮠뾏ឿ뀌": false, + "⎀jꄒ牺3Ⓝ컴~?親ꕽぼܓ喏瘘!@<튋㐌꿱⩦{a?Yv%⪧笯Uܱ栅E搚i뚬:ꄃx7䙳ꦋ&䓹vq☶I䁘ᾘ涜\\썉뺌Lr%Bc㍜3?ꝭ砿裞]": null, + "⭤뙓z(㡂%亳K䌽꫿AԾ岺㦦㼴輞낚Vꦴw냟鬓㹈뽈+o3譻K1잞": 2091209026076965894, + "ㇲ\t⋇轑ꠤ룫X긒\"zoY읇희wj梐쐑l侸`e%s": -9.9240075473576563E17, + "啸ꮑ㉰!ᚓ}銏": -4.0694813896301194E18, + ">]囋੽EK뇜>_ꀣ緳碖{쐐裔[<ನ\"䇅\"5L?#xTwv#罐\u0005래t应\\N?빗;": "v쮽瞭p뭃" + } + ]], + "斴槾?Z翁\"~慍弞ﻆ=꜡o5鐋dw\"?K蠡i샾ogDﲰ_C*⬟iㇷ4nય蟏[㟉U꽌娛苸 ঢ়操贻洞펻)쿗૊許X⨪VY츚Z䍾㶭~튃ᵦ<\/E臭tve猑x嚢": null, + "锡⛩<\/칥ꈙᬙ蝀&Ꚑ籬■865?_>L詏쿨䈌浿弥爫̫lj&zx<\/C쉾?覯n?": null, + "꾳鑤/꼩d=ᘈn挫ᑩ䰬ZC": "3錢爋6Ƹ䴗v⪿Wr益G韠[\u0010屗9쁡钁u?殢c䳀蓃樄욂NAq赟c튒瘁렶Aૡɚ捍" + } + ] + ] + ]} + ] + ] + }]]]}} + ]}], + "Ej䗳U<\/Q=灒샎䞦,堰頠@褙g_\u0003ꤾfⶽ?퇋!łB〙ד3CC䌴鈌U:뭔咎(Qો臃䡬荋BO7㢝䟸\"Yb": 2.36010731779814E-20, + "逸'0岔j\u000e눘먷翌C츊秦=ꭣ棭ှ;鳸=麱$XP⩉駚橄A\\좱⛌jqv䰞3Ь踌v㳆¹gT┌gvLB賖烡m?@E঳i": null + }, + "曺v찘ׁ?&绫O័": 9107241066550187880 + } + ] + ], + "(e屄\u0019昜훕琖b蓘ᬄ0/۲묇Z蘮ဏ⨏蛘胯뢃@㘉8ሪWᨮ⦬ᅳ䅴HI၇쨳z囕陻엣1赳o": true, + ",b刈Z,ၠ晐T솝ŕB⩆ou'퐼≃绗雗d譊": null, + "a唥KB\"ﳝ肕$u\n^⅄P䟼냉䞸⩪u윗瀱ꔨ#yşs꒬=1|ﲤ爢`t౐튼쳫_Az(Ṋ擬㦷좕耈6": 2099309172767331582, + "?㴸U<\/䢔ꯡ阽扆㐤q鐋?f㔫wM嬙-;UV죫嚔픞G&\"Cᗍ䪏풊Q": "VM7疹+陕枡툩窲}翡䖶8欞čsT뮐}璤:jﺋ鎴}HfA൝⧻Zd#Qu茅J髒皣Y-︴[?-~쉜v딏璮㹚䅊﩯<-#\u000e걀h\u0004u抱﵊㼃U<㱷⊱IC進" + }, + "숌dee節鏽邺p넱蹓+e罕U": true + } + ], + "b⧴룏??ᔠ3ぱ>%郿劃翐ꏬꠛW瞳᫏누躨狀ໄy੽\"ីuS=㨞馸k乆E": "トz݈^9R䬑<ﮛGRꨳ\u000fTT泠纷꽀MRᴱ纊:㠭볮?%N56%鈕1䗍䜁a䲗j陇=뿻偂衋࿘ᓸ?ᕵZ+<\/}H耢b䀁z^f$&㝒LkꢳI脚뙛u": 5.694374481577558E-20 + }] + } + ]], + "obj": {"key": "wrong value"}, + "퓲꽪m{㶩/뇿#⼢&᭙硞㪔E嚉c樱㬇1a綑᝖DḾ䝩": null + } +} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/data/webapp.json b/src/s3select/rapidjson/bin/data/webapp.json new file mode 100644 index 000000000..ee7b0f8ba --- /dev/null +++ b/src/s3select/rapidjson/bin/data/webapp.json @@ -0,0 +1,88 @@ +{"web-app": { + "servlet": [ + { + "servlet-name": "cofaxCDS", + "servlet-class": "org.cofax.cds.CDSServlet", + "init-param": { + "configGlossary:installationAt": "Philadelphia, PA", + "configGlossary:adminEmail": "ksm@pobox.com", + "configGlossary:poweredBy": "Cofax", + "configGlossary:poweredByIcon": "/images/cofax.gif", + "configGlossary:staticPath": "/content/static", + "templateProcessorClass": "org.cofax.WysiwygTemplate", + "templateLoaderClass": "org.cofax.FilesTemplateLoader", + "templatePath": "templates", + "templateOverridePath": "", + "defaultListTemplate": "listTemplate.htm", + "defaultFileTemplate": "articleTemplate.htm", + "useJSP": false, + "jspListTemplate": "listTemplate.jsp", + "jspFileTemplate": "articleTemplate.jsp", + "cachePackageTagsTrack": 200, + "cachePackageTagsStore": 200, + "cachePackageTagsRefresh": 60, + "cacheTemplatesTrack": 100, + "cacheTemplatesStore": 50, + "cacheTemplatesRefresh": 15, + "cachePagesTrack": 200, + "cachePagesStore": 100, + "cachePagesRefresh": 10, + "cachePagesDirtyRead": 10, + "searchEngineListTemplate": "forSearchEnginesList.htm", + "searchEngineFileTemplate": "forSearchEngines.htm", + "searchEngineRobotsDb": "WEB-INF/robots.db", + "useDataStore": true, + "dataStoreClass": "org.cofax.SqlDataStore", + "redirectionClass": "org.cofax.SqlRedirection", + "dataStoreName": "cofax", + "dataStoreDriver": "com.microsoft.jdbc.sqlserver.SQLServerDriver", + "dataStoreUrl": "jdbc:microsoft:sqlserver://LOCALHOST:1433;DatabaseName=goon", + "dataStoreUser": "sa", + "dataStorePassword": "dataStoreTestQuery", + "dataStoreTestQuery": "SET NOCOUNT ON;select test='test';", + "dataStoreLogFile": "/usr/local/tomcat/logs/datastore.log", + "dataStoreInitConns": 10, + "dataStoreMaxConns": 100, + "dataStoreConnUsageLimit": 100, + "dataStoreLogLevel": "debug", + "maxUrlLength": 500}}, + { + "servlet-name": "cofaxEmail", + "servlet-class": "org.cofax.cds.EmailServlet", + "init-param": { + "mailHost": "mail1", + "mailHostOverride": "mail2"}}, + { + "servlet-name": "cofaxAdmin", + "servlet-class": "org.cofax.cds.AdminServlet"}, + + { + "servlet-name": "fileServlet", + "servlet-class": "org.cofax.cds.FileServlet"}, + { + "servlet-name": "cofaxTools", + "servlet-class": "org.cofax.cms.CofaxToolsServlet", + "init-param": { + "templatePath": "toolstemplates/", + "log": 1, + "logLocation": "/usr/local/tomcat/logs/CofaxTools.log", + "logMaxSize": "", + "dataLog": 1, + "dataLogLocation": "/usr/local/tomcat/logs/dataLog.log", + "dataLogMaxSize": "", + "removePageCache": "/content/admin/remove?cache=pages&id=", + "removeTemplateCache": "/content/admin/remove?cache=templates&id=", + "fileTransferFolder": "/usr/local/tomcat/webapps/content/fileTransferFolder", + "lookInContext": 1, + "adminGroupID": 4, + "betaServer": true}}], + "servlet-mapping": { + "cofaxCDS": "/", + "cofaxEmail": "/cofaxutil/aemail/*", + "cofaxAdmin": "/admin/*", + "fileServlet": "/static/*", + "cofaxTools": "/tools/*"}, + + "taglib": { + "taglib-uri": "cofax.tld", + "taglib-location": "/WEB-INF/tlds/cofax.tld"}}} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/data/widget.json b/src/s3select/rapidjson/bin/data/widget.json new file mode 100644 index 000000000..32690e8b7 --- /dev/null +++ b/src/s3select/rapidjson/bin/data/widget.json @@ -0,0 +1,26 @@ +{"widget": { + "debug": "on", + "window": { + "title": "Sample Konfabulator Widget", + "name": "main_window", + "width": 500, + "height": 500 + }, + "image": { + "src": "Images/Sun.png", + "name": "sun1", + "hOffset": 250, + "vOffset": 250, + "alignment": "center" + }, + "text": { + "data": "Click Here", + "size": 36, + "style": "bold", + "name": "text1", + "hOffset": 250, + "vOffset": 100, + "alignment": "center", + "onMouseUp": "sun1.opacity = (sun1.opacity / 100) * 90;" + } +}} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/draft-04/schema b/src/s3select/rapidjson/bin/draft-04/schema new file mode 100644 index 000000000..85eb502a6 --- /dev/null +++ b/src/s3select/rapidjson/bin/draft-04/schema @@ -0,0 +1,150 @@ +{ + "id": "http://json-schema.org/draft-04/schema#", + "$schema": "http://json-schema.org/draft-04/schema#", + "description": "Core schema meta-schema", + "definitions": { + "schemaArray": { + "type": "array", + "minItems": 1, + "items": { "$ref": "#" } + }, + "positiveInteger": { + "type": "integer", + "minimum": 0 + }, + "positiveIntegerDefault0": { + "allOf": [ { "$ref": "#/definitions/positiveInteger" }, { "default": 0 } ] + }, + "simpleTypes": { + "enum": [ "array", "boolean", "integer", "null", "number", "object", "string" ] + }, + "stringArray": { + "type": "array", + "items": { "type": "string" }, + "minItems": 1, + "uniqueItems": true + } + }, + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uri" + }, + "$schema": { + "type": "string", + "format": "uri" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "default": {}, + "multipleOf": { + "type": "number", + "minimum": 0, + "exclusiveMinimum": true + }, + "maximum": { + "type": "number" + }, + "exclusiveMaximum": { + "type": "boolean", + "default": false + }, + "minimum": { + "type": "number" + }, + "exclusiveMinimum": { + "type": "boolean", + "default": false + }, + "maxLength": { "$ref": "#/definitions/positiveInteger" }, + "minLength": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "pattern": { + "type": "string", + "format": "regex" + }, + "additionalItems": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "items": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/schemaArray" } + ], + "default": {} + }, + "maxItems": { "$ref": "#/definitions/positiveInteger" }, + "minItems": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "uniqueItems": { + "type": "boolean", + "default": false + }, + "maxProperties": { "$ref": "#/definitions/positiveInteger" }, + "minProperties": { "$ref": "#/definitions/positiveIntegerDefault0" }, + "required": { "$ref": "#/definitions/stringArray" }, + "additionalProperties": { + "anyOf": [ + { "type": "boolean" }, + { "$ref": "#" } + ], + "default": {} + }, + "definitions": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "properties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "patternProperties": { + "type": "object", + "additionalProperties": { "$ref": "#" }, + "default": {} + }, + "dependencies": { + "type": "object", + "additionalProperties": { + "anyOf": [ + { "$ref": "#" }, + { "$ref": "#/definitions/stringArray" } + ] + } + }, + "enum": { + "type": "array", + "minItems": 1, + "uniqueItems": true + }, + "type": { + "anyOf": [ + { "$ref": "#/definitions/simpleTypes" }, + { + "type": "array", + "items": { "$ref": "#/definitions/simpleTypes" }, + "minItems": 1, + "uniqueItems": true + } + ] + }, + "allOf": { "$ref": "#/definitions/schemaArray" }, + "anyOf": { "$ref": "#/definitions/schemaArray" }, + "oneOf": { "$ref": "#/definitions/schemaArray" }, + "not": { "$ref": "#" } + }, + "dependencies": { + "exclusiveMaximum": [ "maximum" ], + "exclusiveMinimum": [ "minimum" ] + }, + "default": {} +} diff --git a/src/s3select/rapidjson/bin/encodings/utf16be.json b/src/s3select/rapidjson/bin/encodings/utf16be.json new file mode 100644 index 000000000..e46dbfb9d Binary files /dev/null and b/src/s3select/rapidjson/bin/encodings/utf16be.json differ diff --git a/src/s3select/rapidjson/bin/encodings/utf16bebom.json b/src/s3select/rapidjson/bin/encodings/utf16bebom.json new file mode 100644 index 000000000..0a23ae205 Binary files /dev/null and b/src/s3select/rapidjson/bin/encodings/utf16bebom.json differ diff --git a/src/s3select/rapidjson/bin/encodings/utf16le.json b/src/s3select/rapidjson/bin/encodings/utf16le.json new file mode 100644 index 000000000..92d504530 Binary files /dev/null and b/src/s3select/rapidjson/bin/encodings/utf16le.json differ diff --git a/src/s3select/rapidjson/bin/encodings/utf16lebom.json b/src/s3select/rapidjson/bin/encodings/utf16lebom.json new file mode 100644 index 000000000..eaba00132 Binary files /dev/null and b/src/s3select/rapidjson/bin/encodings/utf16lebom.json differ diff --git a/src/s3select/rapidjson/bin/encodings/utf32be.json b/src/s3select/rapidjson/bin/encodings/utf32be.json new file mode 100644 index 000000000..9cbb52227 Binary files /dev/null and b/src/s3select/rapidjson/bin/encodings/utf32be.json differ diff --git a/src/s3select/rapidjson/bin/encodings/utf32bebom.json b/src/s3select/rapidjson/bin/encodings/utf32bebom.json new file mode 100644 index 000000000..bde6a99ab Binary files /dev/null and b/src/s3select/rapidjson/bin/encodings/utf32bebom.json differ diff --git a/src/s3select/rapidjson/bin/encodings/utf32le.json b/src/s3select/rapidjson/bin/encodings/utf32le.json new file mode 100644 index 000000000..b00f290a6 Binary files /dev/null and b/src/s3select/rapidjson/bin/encodings/utf32le.json differ diff --git a/src/s3select/rapidjson/bin/encodings/utf32lebom.json b/src/s3select/rapidjson/bin/encodings/utf32lebom.json new file mode 100644 index 000000000..d3db39bf7 Binary files /dev/null and b/src/s3select/rapidjson/bin/encodings/utf32lebom.json differ diff --git a/src/s3select/rapidjson/bin/encodings/utf8.json b/src/s3select/rapidjson/bin/encodings/utf8.json new file mode 100644 index 000000000..c500c943f --- /dev/null +++ b/src/s3select/rapidjson/bin/encodings/utf8.json @@ -0,0 +1,7 @@ +{ + "en":"I can eat glass and it doesn't hurt me.", + "zh-Hant":"我能吞下玻璃而不傷身體。", + "zh-Hans":"我能吞下玻璃而不伤身体。", + "ja":"私はガラスを食べられます。それは私を傷つけません。", + "ko":"나는 유리를 먹을 수 있어요. 그래도 아프지 않아요" +} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/encodings/utf8bom.json b/src/s3select/rapidjson/bin/encodings/utf8bom.json new file mode 100644 index 000000000..b9839fe2f --- /dev/null +++ b/src/s3select/rapidjson/bin/encodings/utf8bom.json @@ -0,0 +1,7 @@ +{ + "en":"I can eat glass and it doesn't hurt me.", + "zh-Hant":"我能吞下玻璃而不傷身體。", + "zh-Hans":"我能吞下玻璃而不伤身体。", + "ja":"私はガラスを食べられます。それは私を傷つけません。", + "ko":"나는 유리를 먹을 수 있어요. 그래도 아프지 않아요" +} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail1.json b/src/s3select/rapidjson/bin/jsonchecker/fail1.json new file mode 100644 index 000000000..6216b865f --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail1.json @@ -0,0 +1 @@ +"A JSON payload should be an object or array, not a string." \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail10.json b/src/s3select/rapidjson/bin/jsonchecker/fail10.json new file mode 100644 index 000000000..5d8c0047b --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail10.json @@ -0,0 +1 @@ +{"Extra value after close": true} "misplaced quoted value" \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail11.json b/src/s3select/rapidjson/bin/jsonchecker/fail11.json new file mode 100644 index 000000000..76eb95b45 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail11.json @@ -0,0 +1 @@ +{"Illegal expression": 1 + 2} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail12.json b/src/s3select/rapidjson/bin/jsonchecker/fail12.json new file mode 100644 index 000000000..77580a452 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail12.json @@ -0,0 +1 @@ +{"Illegal invocation": alert()} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail13.json b/src/s3select/rapidjson/bin/jsonchecker/fail13.json new file mode 100644 index 000000000..379406b59 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail13.json @@ -0,0 +1 @@ +{"Numbers cannot have leading zeroes": 013} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail14.json b/src/s3select/rapidjson/bin/jsonchecker/fail14.json new file mode 100644 index 000000000..0ed366b38 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail14.json @@ -0,0 +1 @@ +{"Numbers cannot be hex": 0x14} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail15.json b/src/s3select/rapidjson/bin/jsonchecker/fail15.json new file mode 100644 index 000000000..fc8376b60 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail15.json @@ -0,0 +1 @@ +["Illegal backslash escape: \x15"] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail16.json b/src/s3select/rapidjson/bin/jsonchecker/fail16.json new file mode 100644 index 000000000..3fe21d4b5 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail16.json @@ -0,0 +1 @@ +[\naked] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail17.json b/src/s3select/rapidjson/bin/jsonchecker/fail17.json new file mode 100644 index 000000000..62b9214ae --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail17.json @@ -0,0 +1 @@ +["Illegal backslash escape: \017"] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail18.json b/src/s3select/rapidjson/bin/jsonchecker/fail18.json new file mode 100644 index 000000000..edac92716 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail18.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail19.json b/src/s3select/rapidjson/bin/jsonchecker/fail19.json new file mode 100644 index 000000000..3b9c46fa9 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail19.json @@ -0,0 +1 @@ +{"Missing colon" null} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail2.json b/src/s3select/rapidjson/bin/jsonchecker/fail2.json new file mode 100644 index 000000000..6b7c11e5a --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail2.json @@ -0,0 +1 @@ +["Unclosed array" \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail20.json b/src/s3select/rapidjson/bin/jsonchecker/fail20.json new file mode 100644 index 000000000..27c1af3e7 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail20.json @@ -0,0 +1 @@ +{"Double colon":: null} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail21.json b/src/s3select/rapidjson/bin/jsonchecker/fail21.json new file mode 100644 index 000000000..62474573b --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail21.json @@ -0,0 +1 @@ +{"Comma instead of colon", null} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail22.json b/src/s3select/rapidjson/bin/jsonchecker/fail22.json new file mode 100644 index 000000000..a7752581b --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail22.json @@ -0,0 +1 @@ +["Colon instead of comma": false] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail23.json b/src/s3select/rapidjson/bin/jsonchecker/fail23.json new file mode 100644 index 000000000..494add1ca --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail23.json @@ -0,0 +1 @@ +["Bad value", truth] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail24.json b/src/s3select/rapidjson/bin/jsonchecker/fail24.json new file mode 100644 index 000000000..caff239bf --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail24.json @@ -0,0 +1 @@ +['single quote'] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail25.json b/src/s3select/rapidjson/bin/jsonchecker/fail25.json new file mode 100644 index 000000000..8b7ad23e0 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail25.json @@ -0,0 +1 @@ +[" tab character in string "] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail26.json b/src/s3select/rapidjson/bin/jsonchecker/fail26.json new file mode 100644 index 000000000..845d26a6a --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail26.json @@ -0,0 +1 @@ +["tab\ character\ in\ string\ "] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail27.json b/src/s3select/rapidjson/bin/jsonchecker/fail27.json new file mode 100644 index 000000000..6b01a2ca4 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail27.json @@ -0,0 +1,2 @@ +["line +break"] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail28.json b/src/s3select/rapidjson/bin/jsonchecker/fail28.json new file mode 100644 index 000000000..621a0101c --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail28.json @@ -0,0 +1,2 @@ +["line\ +break"] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail29.json b/src/s3select/rapidjson/bin/jsonchecker/fail29.json new file mode 100644 index 000000000..47ec421bb --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail29.json @@ -0,0 +1 @@ +[0e] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail3.json b/src/s3select/rapidjson/bin/jsonchecker/fail3.json new file mode 100644 index 000000000..168c81eb7 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail3.json @@ -0,0 +1 @@ +{unquoted_key: "keys must be quoted"} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail30.json b/src/s3select/rapidjson/bin/jsonchecker/fail30.json new file mode 100644 index 000000000..8ab0bc4b8 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail30.json @@ -0,0 +1 @@ +[0e+] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail31.json b/src/s3select/rapidjson/bin/jsonchecker/fail31.json new file mode 100644 index 000000000..1cce602b5 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail31.json @@ -0,0 +1 @@ +[0e+-1] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail32.json b/src/s3select/rapidjson/bin/jsonchecker/fail32.json new file mode 100644 index 000000000..45cba7396 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail32.json @@ -0,0 +1 @@ +{"Comma instead if closing brace": true, \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail33.json b/src/s3select/rapidjson/bin/jsonchecker/fail33.json new file mode 100644 index 000000000..ca5eb19dc --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail33.json @@ -0,0 +1 @@ +["mismatch"} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail4.json b/src/s3select/rapidjson/bin/jsonchecker/fail4.json new file mode 100644 index 000000000..9de168bf3 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail4.json @@ -0,0 +1 @@ +["extra comma",] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail5.json b/src/s3select/rapidjson/bin/jsonchecker/fail5.json new file mode 100644 index 000000000..ddf3ce3d2 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail5.json @@ -0,0 +1 @@ +["double extra comma",,] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail6.json b/src/s3select/rapidjson/bin/jsonchecker/fail6.json new file mode 100644 index 000000000..ed91580e1 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail6.json @@ -0,0 +1 @@ +[ , "<-- missing value"] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail7.json b/src/s3select/rapidjson/bin/jsonchecker/fail7.json new file mode 100644 index 000000000..8a96af3e4 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail7.json @@ -0,0 +1 @@ +["Comma after the close"], \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail8.json b/src/s3select/rapidjson/bin/jsonchecker/fail8.json new file mode 100644 index 000000000..b28479c6e --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail8.json @@ -0,0 +1 @@ +["Extra close"]] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/fail9.json b/src/s3select/rapidjson/bin/jsonchecker/fail9.json new file mode 100644 index 000000000..5815574f3 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/fail9.json @@ -0,0 +1 @@ +{"Extra comma": true,} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/pass1.json b/src/s3select/rapidjson/bin/jsonchecker/pass1.json new file mode 100644 index 000000000..70e268543 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/pass1.json @@ -0,0 +1,58 @@ +[ + "JSON Test Pattern pass1", + {"object with 1 member":["array with 1 element"]}, + {}, + [], + -42, + true, + false, + null, + { + "integer": 1234567890, + "real": -9876.543210, + "e": 0.123456789e-12, + "E": 1.234567890E+34, + "": 23456789012E66, + "zero": 0, + "one": 1, + "space": " ", + "quote": "\"", + "backslash": "\\", + "controls": "\b\f\n\r\t", + "slash": "/ & \/", + "alpha": "abcdefghijklmnopqrstuvwyz", + "ALPHA": "ABCDEFGHIJKLMNOPQRSTUVWYZ", + "digit": "0123456789", + "0123456789": "digit", + "special": "`1~!@#$%^&*()_+-={':[,]}|;.?", + "hex": "\u0123\u4567\u89AB\uCDEF\uabcd\uef4A", + "true": true, + "false": false, + "null": null, + "array":[ ], + "object":{ }, + "address": "50 St. James Street", + "url": "http://www.JSON.org/", + "comment": "// /* */": " ", + " s p a c e d " :[1,2 , 3 + +, + +4 , 5 , 6 ,7 ],"compact":[1,2,3,4,5,6,7], + "jsontext": "{\"object with 1 member\":[\"array with 1 element\"]}", + "quotes": "" \u0022 %22 0x22 034 "", + "\/\\\"\uCAFE\uBABE\uAB98\uFCDE\ubcda\uef4A\b\f\n\r\t`1~!@#$%^&*()_+-=[]{}|;:',./<>?" +: "A key can be any string" + }, + 0.5 ,98.6 +, +99.44 +, + +1066, +1e1, +0.1e1, +1e-1, +1e00,2e+00,2e-00 +,"rosebud"] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/pass2.json b/src/s3select/rapidjson/bin/jsonchecker/pass2.json new file mode 100644 index 000000000..d3c63c7ad --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/pass2.json @@ -0,0 +1 @@ +[[[[[[[[[[[[[[[[[[["Not too deep"]]]]]]]]]]]]]]]]]]] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonchecker/pass3.json b/src/s3select/rapidjson/bin/jsonchecker/pass3.json new file mode 100644 index 000000000..4528d51f1 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/pass3.json @@ -0,0 +1,6 @@ +{ + "JSON Test Pattern pass3": { + "The outermost value": "must be an object or array.", + "In this test": "It is an object." + } +} diff --git a/src/s3select/rapidjson/bin/jsonchecker/readme.txt b/src/s3select/rapidjson/bin/jsonchecker/readme.txt new file mode 100644 index 000000000..321d89d99 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonchecker/readme.txt @@ -0,0 +1,3 @@ +Test suite from http://json.org/JSON_checker/. + +If the JSON_checker is working correctly, it must accept all of the pass*.json files and reject all of the fail*.json files. diff --git a/src/s3select/rapidjson/bin/jsonschema/.gitignore b/src/s3select/rapidjson/bin/jsonschema/.gitignore new file mode 100644 index 000000000..1333ed77b --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/.gitignore @@ -0,0 +1 @@ +TODO diff --git a/src/s3select/rapidjson/bin/jsonschema/.travis.yml b/src/s3select/rapidjson/bin/jsonschema/.travis.yml new file mode 100644 index 000000000..deecd6110 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/.travis.yml @@ -0,0 +1,4 @@ +language: python +python: "2.7" +install: pip install jsonschema +script: bin/jsonschema_suite check diff --git a/src/s3select/rapidjson/bin/jsonschema/LICENSE b/src/s3select/rapidjson/bin/jsonschema/LICENSE new file mode 100644 index 000000000..c28adbadd --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/LICENSE @@ -0,0 +1,19 @@ +Copyright (c) 2012 Julian Berman + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/src/s3select/rapidjson/bin/jsonschema/README.md b/src/s3select/rapidjson/bin/jsonschema/README.md new file mode 100644 index 000000000..6d9da9493 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/README.md @@ -0,0 +1,148 @@ +JSON Schema Test Suite [![Build Status](https://travis-ci.org/json-schema/JSON-Schema-Test-Suite.png?branch=develop)](https://travis-ci.org/json-schema/JSON-Schema-Test-Suite) +====================== + +This repository contains a set of JSON objects that implementors of JSON Schema +validation libraries can use to test their validators. + +It is meant to be language agnostic and should require only a JSON parser. + +The conversion of the JSON objects into tests within your test framework of +choice is still the job of the validator implementor. + +Structure of a Test +------------------- + +If you're going to use this suite, you need to know how tests are laid out. The +tests are contained in the `tests` directory at the root of this repository. + +Inside that directory is a subdirectory for each draft or version of the +schema. We'll use `draft3` as an example. + +If you look inside the draft directory, there are a number of `.json` files, +which logically group a set of test cases together. Often the grouping is by +property under test, but not always, especially within optional test files +(discussed below). + +Inside each `.json` file is a single array containing objects. It's easiest to +illustrate the structure of these with an example: + +```json + { + "description": "the description of the test case", + "schema": {"the schema that should" : "be validated against"}, + "tests": [ + { + "description": "a specific test of a valid instance", + "data": "the instance", + "valid": true + }, + { + "description": "another specific test this time, invalid", + "data": 15, + "valid": false + } + ] + } +``` + +So a description, a schema, and some tests, where tests is an array containing +one or more objects with descriptions, data, and a boolean indicating whether +they should be valid or invalid. + +Coverage +-------- + +Draft 3 and 4 should have full coverage. If you see anything missing or think +there is a useful test missing, please send a pull request or open an issue. + +Who Uses the Test Suite +----------------------- + +This suite is being used by: + +### Coffeescript ### + +* [jsck](https://github.com/pandastrike/jsck) + +### Dart ### + +* [json_schema](https://github.com/patefacio/json_schema) + +### Erlang ### + +* [jesse](https://github.com/klarna/jesse) + +### Go ### + +* [gojsonschema](https://github.com/sigu-399/gojsonschema) +* [validate-json](https://github.com/cesanta/validate-json) + +### Haskell ### + +* [aeson-schema](https://github.com/timjb/aeson-schema) +* [hjsonschema](https://github.com/seagreen/hjsonschema) + +### Java ### + +* [json-schema-validator](https://github.com/fge/json-schema-validator) + +### JavaScript ### + +* [json-schema-benchmark](https://github.com/Muscula/json-schema-benchmark) +* [direct-schema](https://github.com/IreneKnapp/direct-schema) +* [is-my-json-valid](https://github.com/mafintosh/is-my-json-valid) +* [jassi](https://github.com/iclanzan/jassi) +* [JaySchema](https://github.com/natesilva/jayschema) +* [json-schema-valid](https://github.com/ericgj/json-schema-valid) +* [Jsonary](https://github.com/jsonary-js/jsonary) +* [jsonschema](https://github.com/tdegrunt/jsonschema) +* [request-validator](https://github.com/bugventure/request-validator) +* [skeemas](https://github.com/Prestaul/skeemas) +* [tv4](https://github.com/geraintluff/tv4) +* [z-schema](https://github.com/zaggino/z-schema) +* [jsen](https://github.com/bugventure/jsen) +* [ajv](https://github.com/epoberezkin/ajv) + +### Node.js ### + +The JSON Schema Test Suite is also available as an +[npm](https://www.npmjs.com/package/json-schema-test-suite) package. +Node-specific support is maintained on the [node branch](https://github.com/json-schema/JSON-Schema-Test-Suite/tree/node). +See [NODE-README.md](https://github.com/json-schema/JSON-Schema-Test-Suite/blob/node/NODE-README.md) +for more information. + +### .NET ### + +* [Newtonsoft.Json.Schema](https://github.com/JamesNK/Newtonsoft.Json.Schema) + +### PHP ### + +* [json-schema](https://github.com/justinrainbow/json-schema) + +### Python ### + +* [jsonschema](https://github.com/Julian/jsonschema) + +### Ruby ### + +* [json-schema](https://github.com/hoxworth/json-schema) + +### Rust ### + +* [valico](https://github.com/rustless/valico) + +### Swift ### + +* [JSONSchema](https://github.com/kylef/JSONSchema.swift) + +If you use it as well, please fork and send a pull request adding yourself to +the list :). + +Contributing +------------ + +If you see something missing or incorrect, a pull request is most welcome! + +There are some sanity checks in place for testing the test suite. You can run +them with `bin/jsonschema_suite check` or `tox`. They will be run automatically by +[Travis CI](https://travis-ci.org/) as well. diff --git a/src/s3select/rapidjson/bin/jsonschema/bin/jsonschema_suite b/src/s3select/rapidjson/bin/jsonschema/bin/jsonschema_suite new file mode 100755 index 000000000..96108c86b --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/bin/jsonschema_suite @@ -0,0 +1,283 @@ +#! /usr/bin/env python +from __future__ import print_function +import sys +import textwrap + +try: + import argparse +except ImportError: + print(textwrap.dedent(""" + The argparse library could not be imported. jsonschema_suite requires + either Python 2.7 or for you to install argparse. You can do so by + running `pip install argparse`, `easy_install argparse` or by + downloading argparse and running `python2.6 setup.py install`. + + See https://pypi.python.org/pypi/argparse for details. + """.strip("\n"))) + sys.exit(1) + +import errno +import fnmatch +import json +import os +import random +import shutil +import unittest +import warnings + +if getattr(unittest, "skipIf", None) is None: + unittest.skipIf = lambda cond, msg : lambda fn : fn + +try: + import jsonschema +except ImportError: + jsonschema = None +else: + validators = getattr( + jsonschema.validators, "validators", jsonschema.validators + ) + + +ROOT_DIR = os.path.join( + os.path.dirname(__file__), os.pardir).rstrip("__pycache__") +SUITE_ROOT_DIR = os.path.join(ROOT_DIR, "tests") + +REMOTES = { + "integer.json": {"type": "integer"}, + "subSchemas.json": { + "integer": {"type": "integer"}, + "refToInteger": {"$ref": "#/integer"}, + }, + "folder/folderInteger.json": {"type": "integer"} +} +REMOTES_DIR = os.path.join(ROOT_DIR, "remotes") + +TESTSUITE_SCHEMA = { + "$schema": "http://json-schema.org/draft-03/schema#", + "type": "array", + "items": { + "type": "object", + "properties": { + "description": {"type": "string", "required": True}, + "schema": {"required": True}, + "tests": { + "type": "array", + "items": { + "type": "object", + "properties": { + "description": {"type": "string", "required": True}, + "data": {"required": True}, + "valid": {"type": "boolean", "required": True} + }, + "additionalProperties": False + }, + "minItems": 1 + } + }, + "additionalProperties": False, + "minItems": 1 + } +} + + +def files(paths): + for path in paths: + with open(path) as test_file: + yield json.load(test_file) + + +def groups(paths): + for test_file in files(paths): + for group in test_file: + yield group + + +def cases(paths): + for test_group in groups(paths): + for test in test_group["tests"]: + test["schema"] = test_group["schema"] + yield test + + +def collect(root_dir): + for root, dirs, files in os.walk(root_dir): + for filename in fnmatch.filter(files, "*.json"): + yield os.path.join(root, filename) + + +class SanityTests(unittest.TestCase): + @classmethod + def setUpClass(cls): + print("Looking for tests in %s" % SUITE_ROOT_DIR) + cls.test_files = list(collect(SUITE_ROOT_DIR)) + print("Found %s test files" % len(cls.test_files)) + assert cls.test_files, "Didn't find the test files!" + + def test_all_files_are_valid_json(self): + for path in self.test_files: + with open(path) as test_file: + try: + json.load(test_file) + except ValueError as error: + self.fail("%s contains invalid JSON (%s)" % (path, error)) + + def test_all_descriptions_have_reasonable_length(self): + for case in cases(self.test_files): + descript = case["description"] + self.assertLess( + len(descript), + 60, + "%r is too long! (keep it to less than 60 chars)" % (descript,) + ) + + def test_all_descriptions_are_unique(self): + for group in groups(self.test_files): + descriptions = set(test["description"] for test in group["tests"]) + self.assertEqual( + len(descriptions), + len(group["tests"]), + "%r contains a duplicate description" % (group,) + ) + + @unittest.skipIf(jsonschema is None, "Validation library not present!") + def test_all_schemas_are_valid(self): + for schema in os.listdir(SUITE_ROOT_DIR): + schema_validator = validators.get(schema) + if schema_validator is not None: + test_files = collect(os.path.join(SUITE_ROOT_DIR, schema)) + for case in cases(test_files): + try: + schema_validator.check_schema(case["schema"]) + except jsonschema.SchemaError as error: + self.fail("%s contains an invalid schema (%s)" % + (case, error)) + else: + warnings.warn("No schema validator for %s" % schema) + + @unittest.skipIf(jsonschema is None, "Validation library not present!") + def test_suites_are_valid(self): + validator = jsonschema.Draft3Validator(TESTSUITE_SCHEMA) + for tests in files(self.test_files): + try: + validator.validate(tests) + except jsonschema.ValidationError as error: + self.fail(str(error)) + + def test_remote_schemas_are_updated(self): + for url, schema in REMOTES.items(): + filepath = os.path.join(REMOTES_DIR, url) + with open(filepath) as schema_file: + self.assertEqual(json.load(schema_file), schema) + + +def main(arguments): + if arguments.command == "check": + suite = unittest.TestLoader().loadTestsFromTestCase(SanityTests) + result = unittest.TextTestRunner(verbosity=2).run(suite) + sys.exit(not result.wasSuccessful()) + elif arguments.command == "flatten": + selected_cases = [case for case in cases(collect(arguments.version))] + + if arguments.randomize: + random.shuffle(selected_cases) + + json.dump(selected_cases, sys.stdout, indent=4, sort_keys=True) + elif arguments.command == "remotes": + json.dump(REMOTES, sys.stdout, indent=4, sort_keys=True) + elif arguments.command == "dump_remotes": + if arguments.update: + shutil.rmtree(arguments.out_dir, ignore_errors=True) + + try: + os.makedirs(arguments.out_dir) + except OSError as e: + if e.errno == errno.EEXIST: + print("%s already exists. Aborting." % arguments.out_dir) + sys.exit(1) + raise + + for url, schema in REMOTES.items(): + filepath = os.path.join(arguments.out_dir, url) + + try: + os.makedirs(os.path.dirname(filepath)) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + with open(filepath, "wb") as out_file: + json.dump(schema, out_file, indent=4, sort_keys=True) + elif arguments.command == "serve": + try: + from flask import Flask, jsonify + except ImportError: + print(textwrap.dedent(""" + The Flask library is required to serve the remote schemas. + + You can install it by running `pip install Flask`. + + Alternatively, see the `jsonschema_suite remotes` or + `jsonschema_suite dump_remotes` commands to create static files + that can be served with your own web server. + """.strip("\n"))) + sys.exit(1) + + app = Flask(__name__) + + @app.route("/") + def serve_path(path): + if path in REMOTES: + return jsonify(REMOTES[path]) + return "Document does not exist.", 404 + + app.run(port=1234) + + +parser = argparse.ArgumentParser( + description="JSON Schema Test Suite utilities", +) +subparsers = parser.add_subparsers(help="utility commands", dest="command") + +check = subparsers.add_parser("check", help="Sanity check the test suite.") + +flatten = subparsers.add_parser( + "flatten", + help="Output a flattened file containing a selected version's test cases." +) +flatten.add_argument( + "--randomize", + action="store_true", + help="Randomize the order of the outputted cases.", +) +flatten.add_argument( + "version", help="The directory containing the version to output", +) + +remotes = subparsers.add_parser( + "remotes", + help="Output the expected URLs and their associated schemas for remote " + "ref tests as a JSON object." +) + +dump_remotes = subparsers.add_parser( + "dump_remotes", help="Dump the remote ref schemas into a file tree", +) +dump_remotes.add_argument( + "--update", + action="store_true", + help="Update the remotes in an existing directory.", +) +dump_remotes.add_argument( + "--out-dir", + default=REMOTES_DIR, + type=os.path.abspath, + help="The output directory to create as the root of the file tree", +) + +serve = subparsers.add_parser( + "serve", + help="Start a webserver to serve schemas used by remote ref tests." +) + +if __name__ == "__main__": + main(parser.parse_args()) diff --git a/src/s3select/rapidjson/bin/jsonschema/remotes/folder/folderInteger.json b/src/s3select/rapidjson/bin/jsonschema/remotes/folder/folderInteger.json new file mode 100644 index 000000000..dbe5c758e --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/remotes/folder/folderInteger.json @@ -0,0 +1,3 @@ +{ + "type": "integer" +} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonschema/remotes/integer.json b/src/s3select/rapidjson/bin/jsonschema/remotes/integer.json new file mode 100644 index 000000000..dbe5c758e --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/remotes/integer.json @@ -0,0 +1,3 @@ +{ + "type": "integer" +} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonschema/remotes/subSchemas.json b/src/s3select/rapidjson/bin/jsonschema/remotes/subSchemas.json new file mode 100644 index 000000000..8b6d8f842 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/remotes/subSchemas.json @@ -0,0 +1,8 @@ +{ + "integer": { + "type": "integer" + }, + "refToInteger": { + "$ref": "#/integer" + } +} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/additionalItems.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/additionalItems.json new file mode 100644 index 000000000..6d4bff51c --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/additionalItems.json @@ -0,0 +1,82 @@ +[ + { + "description": "additionalItems as schema", + "schema": { + "items": [], + "additionalItems": {"type": "integer"} + }, + "tests": [ + { + "description": "additional items match schema", + "data": [ 1, 2, 3, 4 ], + "valid": true + }, + { + "description": "additional items do not match schema", + "data": [ 1, 2, 3, "foo" ], + "valid": false + } + ] + }, + { + "description": "items is schema, no additionalItems", + "schema": { + "items": {}, + "additionalItems": false + }, + "tests": [ + { + "description": "all items match schema", + "data": [ 1, 2, 3, 4, 5 ], + "valid": true + } + ] + }, + { + "description": "array of items with no additionalItems", + "schema": { + "items": [{}, {}, {}], + "additionalItems": false + }, + "tests": [ + { + "description": "no additional items present", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "additional items are not permitted", + "data": [ 1, 2, 3, 4 ], + "valid": false + } + ] + }, + { + "description": "additionalItems as false without items", + "schema": {"additionalItems": false}, + "tests": [ + { + "description": + "items defaults to empty schema so everything is valid", + "data": [ 1, 2, 3, 4, 5 ], + "valid": true + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + } + ] + }, + { + "description": "additionalItems are allowed by default", + "schema": {"items": []}, + "tests": [ + { + "description": "only the first items are validated", + "data": [1, "foo", false], + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/additionalProperties.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/additionalProperties.json new file mode 100644 index 000000000..40831f9e9 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/additionalProperties.json @@ -0,0 +1,88 @@ +[ + { + "description": + "additionalProperties being false does not allow other properties", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "patternProperties": { "^v": {} }, + "additionalProperties": false + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : "boom"}, + "valid": false + }, + { + "description": "ignores non-objects", + "data": [1, 2, 3], + "valid": true + }, + { + "description": "patternProperties are not additional properties", + "data": {"foo":1, "vroom": 2}, + "valid": true + } + ] + }, + { + "description": + "additionalProperties allows a schema which should validate", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional valid property is valid", + "data": {"foo" : 1, "bar" : 2, "quux" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : 12}, + "valid": false + } + ] + }, + { + "description": + "additionalProperties can exist by itself", + "schema": { + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "an additional valid property is valid", + "data": {"foo" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1}, + "valid": false + } + ] + }, + { + "description": "additionalProperties are allowed by default", + "schema": {"properties": {"foo": {}, "bar": {}}}, + "tests": [ + { + "description": "additional properties are allowed", + "data": {"foo": 1, "bar": 2, "quux": true}, + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/default.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/default.json new file mode 100644 index 000000000..17629779f --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/default.json @@ -0,0 +1,49 @@ +[ + { + "description": "invalid type for default", + "schema": { + "properties": { + "foo": { + "type": "integer", + "default": [] + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"foo": 13}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + }, + { + "description": "invalid string value for default", + "schema": { + "properties": { + "bar": { + "type": "string", + "minLength": 4, + "default": "bad" + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"bar": "good"}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/dependencies.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/dependencies.json new file mode 100644 index 000000000..2f6ae489a --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/dependencies.json @@ -0,0 +1,108 @@ +[ + { + "description": "dependencies", + "schema": { + "dependencies": {"bar": "foo"} + }, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependant", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "with dependency", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "ignores non-objects", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "multiple dependencies", + "schema": { + "dependencies": {"quux": ["foo", "bar"]} + }, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependants", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "with dependencies", + "data": {"foo": 1, "bar": 2, "quux": 3}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"foo": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing other dependency", + "data": {"bar": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing both dependencies", + "data": {"quux": 1}, + "valid": false + } + ] + }, + { + "description": "multiple dependencies subschema", + "schema": { + "dependencies": { + "bar": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "integer"} + } + } + } + }, + "tests": [ + { + "description": "valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "wrong type", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "wrong type other", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + }, + { + "description": "wrong type both", + "data": {"foo": "quux", "bar": "quux"}, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/disallow.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/disallow.json new file mode 100644 index 000000000..a5c9d90cc --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/disallow.json @@ -0,0 +1,80 @@ +[ + { + "description": "disallow", + "schema": { + "disallow": "integer" + }, + "tests": [ + { + "description": "allowed", + "data": "foo", + "valid": true + }, + { + "description": "disallowed", + "data": 1, + "valid": false + } + ] + }, + { + "description": "multiple disallow", + "schema": { + "disallow": ["integer", "boolean"] + }, + "tests": [ + { + "description": "valid", + "data": "foo", + "valid": true + }, + { + "description": "mismatch", + "data": 1, + "valid": false + }, + { + "description": "other mismatch", + "data": true, + "valid": false + } + ] + }, + { + "description": "multiple disallow subschema", + "schema": { + "disallow": + ["string", + { + "type": "object", + "properties": { + "foo": { + "type": "string" + } + } + }] + }, + "tests": [ + { + "description": "match", + "data": 1, + "valid": true + }, + { + "description": "other match", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "mismatch", + "data": "foo", + "valid": false + }, + { + "description": "other mismatch", + "data": {"foo": "bar"}, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/divisibleBy.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/divisibleBy.json new file mode 100644 index 000000000..ef7cc1489 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/divisibleBy.json @@ -0,0 +1,60 @@ +[ + { + "description": "by int", + "schema": {"divisibleBy": 2}, + "tests": [ + { + "description": "int by int", + "data": 10, + "valid": true + }, + { + "description": "int by int fail", + "data": 7, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "by number", + "schema": {"divisibleBy": 1.5}, + "tests": [ + { + "description": "zero is divisible by anything (except 0)", + "data": 0, + "valid": true + }, + { + "description": "4.5 is divisible by 1.5", + "data": 4.5, + "valid": true + }, + { + "description": "35 is not divisible by 1.5", + "data": 35, + "valid": false + } + ] + }, + { + "description": "by small number", + "schema": {"divisibleBy": 0.0001}, + "tests": [ + { + "description": "0.0075 is divisible by 0.0001", + "data": 0.0075, + "valid": true + }, + { + "description": "0.00751 is not divisible by 0.0001", + "data": 0.00751, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/enum.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/enum.json new file mode 100644 index 000000000..0c83f0804 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/enum.json @@ -0,0 +1,71 @@ +[ + { + "description": "simple enum validation", + "schema": {"enum": [1, 2, 3]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": 1, + "valid": true + }, + { + "description": "something else is invalid", + "data": 4, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum validation", + "schema": {"enum": [6, "foo", [], true, {"foo": 12}]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": [], + "valid": true + }, + { + "description": "something else is invalid", + "data": null, + "valid": false + }, + { + "description": "objects are deep compared", + "data": {"foo": false}, + "valid": false + } + ] + }, + { + "description": "enums in properties", + "schema": { + "type":"object", + "properties": { + "foo": {"enum":["foo"]}, + "bar": {"enum":["bar"], "required":true} + } + }, + "tests": [ + { + "description": "both properties are valid", + "data": {"foo":"foo", "bar":"bar"}, + "valid": true + }, + { + "description": "missing optional property is valid", + "data": {"bar":"bar"}, + "valid": true + }, + { + "description": "missing required property is invalid", + "data": {"foo":"foo"}, + "valid": false + }, + { + "description": "missing all properties is invalid", + "data": {}, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/extends.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/extends.json new file mode 100644 index 000000000..909bce575 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/extends.json @@ -0,0 +1,94 @@ +[ + { + "description": "extends", + "schema": { + "properties": {"bar": {"type": "integer", "required": true}}, + "extends": { + "properties": { + "foo": {"type": "string", "required": true} + } + } + }, + "tests": [ + { + "description": "extends", + "data": {"foo": "baz", "bar": 2}, + "valid": true + }, + { + "description": "mismatch extends", + "data": {"foo": "baz"}, + "valid": false + }, + { + "description": "mismatch extended", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "wrong type", + "data": {"foo": "baz", "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "multiple extends", + "schema": { + "properties": {"bar": {"type": "integer", "required": true}}, + "extends" : [ + { + "properties": { + "foo": {"type": "string", "required": true} + } + }, + { + "properties": { + "baz": {"type": "null", "required": true} + } + } + ] + }, + "tests": [ + { + "description": "valid", + "data": {"foo": "quux", "bar": 2, "baz": null}, + "valid": true + }, + { + "description": "mismatch first extends", + "data": {"bar": 2, "baz": null}, + "valid": false + }, + { + "description": "mismatch second extends", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "mismatch both", + "data": {"bar": 2}, + "valid": false + } + ] + }, + { + "description": "extends simple types", + "schema": { + "minimum": 20, + "extends": {"maximum": 30} + }, + "tests": [ + { + "description": "valid", + "data": 25, + "valid": true + }, + { + "description": "mismatch extends", + "data": 35, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/items.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/items.json new file mode 100644 index 000000000..f5e18a138 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/items.json @@ -0,0 +1,46 @@ +[ + { + "description": "a schema given for items", + "schema": { + "items": {"type": "integer"} + }, + "tests": [ + { + "description": "valid items", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "wrong type of items", + "data": [1, "x"], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + } + ] + }, + { + "description": "an array of schemas for items", + "schema": { + "items": [ + {"type": "integer"}, + {"type": "string"} + ] + }, + "tests": [ + { + "description": "correct types", + "data": [ 1, "foo" ], + "valid": true + }, + { + "description": "wrong types", + "data": [ "foo", 1 ], + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/maxItems.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/maxItems.json new file mode 100644 index 000000000..3b53a6b37 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/maxItems.json @@ -0,0 +1,28 @@ +[ + { + "description": "maxItems validation", + "schema": {"maxItems": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": [1], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "too long is invalid", + "data": [1, 2, 3], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "foobar", + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/maxLength.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/maxLength.json new file mode 100644 index 000000000..4de42bcab --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/maxLength.json @@ -0,0 +1,33 @@ +[ + { + "description": "maxLength validation", + "schema": {"maxLength": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": "f", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too long is invalid", + "data": "foo", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 10, + "valid": true + }, + { + "description": "two supplementary Unicode code points is long enough", + "data": "\uD83D\uDCA9\uD83D\uDCA9", + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/maximum.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/maximum.json new file mode 100644 index 000000000..86c7b89c9 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/maximum.json @@ -0,0 +1,42 @@ +[ + { + "description": "maximum validation", + "schema": {"maximum": 3.0}, + "tests": [ + { + "description": "below the maximum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "exclusiveMaximum validation", + "schema": { + "maximum": 3.0, + "exclusiveMaximum": true + }, + "tests": [ + { + "description": "below the maximum is still valid", + "data": 2.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 3.0, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/minItems.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/minItems.json new file mode 100644 index 000000000..ed5118815 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/minItems.json @@ -0,0 +1,28 @@ +[ + { + "description": "minItems validation", + "schema": {"minItems": 1}, + "tests": [ + { + "description": "longer is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1], + "valid": true + }, + { + "description": "too short is invalid", + "data": [], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "", + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/minLength.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/minLength.json new file mode 100644 index 000000000..3f09158de --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/minLength.json @@ -0,0 +1,33 @@ +[ + { + "description": "minLength validation", + "schema": {"minLength": 2}, + "tests": [ + { + "description": "longer is valid", + "data": "foo", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too short is invalid", + "data": "f", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 1, + "valid": true + }, + { + "description": "one supplementary Unicode code point is not long enough", + "data": "\uD83D\uDCA9", + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/minimum.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/minimum.json new file mode 100644 index 000000000..d5bf000bc --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/minimum.json @@ -0,0 +1,42 @@ +[ + { + "description": "minimum validation", + "schema": {"minimum": 1.1}, + "tests": [ + { + "description": "above the minimum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "below the minimum is invalid", + "data": 0.6, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "exclusiveMinimum validation", + "schema": { + "minimum": 1.1, + "exclusiveMinimum": true + }, + "tests": [ + { + "description": "above the minimum is still valid", + "data": 1.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 1.1, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/optional/bignum.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/optional/bignum.json new file mode 100644 index 000000000..ccc7c17fe --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/optional/bignum.json @@ -0,0 +1,107 @@ +[ + { + "description": "integer", + "schema": {"type": "integer"}, + "tests": [ + { + "description": "a bignum is an integer", + "data": 12345678910111213141516171819202122232425262728293031, + "valid": true + } + ] + }, + { + "description": "number", + "schema": {"type": "number"}, + "tests": [ + { + "description": "a bignum is a number", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": true + } + ] + }, + { + "description": "integer", + "schema": {"type": "integer"}, + "tests": [ + { + "description": "a negative bignum is an integer", + "data": -12345678910111213141516171819202122232425262728293031, + "valid": true + } + ] + }, + { + "description": "number", + "schema": {"type": "number"}, + "tests": [ + { + "description": "a negative bignum is a number", + "data": -98249283749234923498293171823948729348710298301928331, + "valid": true + } + ] + }, + { + "description": "string", + "schema": {"type": "string"}, + "tests": [ + { + "description": "a bignum is not a string", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": false + } + ] + }, + { + "description": "integer comparison", + "schema": {"maximum": 18446744073709551615}, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision", + "schema": { + "maximum": 972783798187987123879878123.18878137, + "exclusiveMaximum": true + }, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 972783798187987123879878123.188781371, + "valid": false + } + ] + }, + { + "description": "integer comparison", + "schema": {"minimum": -18446744073709551615}, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision on negative numbers", + "schema": { + "minimum": -972783798187987123879878123.18878137, + "exclusiveMinimum": true + }, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -972783798187987123879878123.188781371, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/optional/format.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/optional/format.json new file mode 100644 index 000000000..3ca7319dd --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/optional/format.json @@ -0,0 +1,222 @@ +[ + { + "description": "validation of regular expressions", + "schema": {"format": "regex"}, + "tests": [ + { + "description": "a valid regular expression", + "data": "([abc])+\\s+$", + "valid": true + }, + { + "description": "a regular expression with unclosed parens is invalid", + "data": "^(abc]", + "valid": false + } + ] + }, + { + "description": "validation of date-time strings", + "schema": {"format": "date-time"}, + "tests": [ + { + "description": "a valid date-time string", + "data": "1963-06-19T08:30:06.283185Z", + "valid": true + }, + { + "description": "an invalid date-time string", + "data": "06/19/1963 08:30:06 PST", + "valid": false + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "2013-350T01:01:01", + "valid": false + } + ] + }, + { + "description": "validation of date strings", + "schema": {"format": "date"}, + "tests": [ + { + "description": "a valid date string", + "data": "1963-06-19", + "valid": true + }, + { + "description": "an invalid date string", + "data": "06/19/1963", + "valid": false + } + ] + }, + { + "description": "validation of time strings", + "schema": {"format": "time"}, + "tests": [ + { + "description": "a valid time string", + "data": "08:30:06", + "valid": true + }, + { + "description": "an invalid time string", + "data": "8:30 AM", + "valid": false + } + ] + }, + { + "description": "validation of URIs", + "schema": {"format": "uri"}, + "tests": [ + { + "description": "a valid URI", + "data": "http://foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "a valid protocol-relative URI", + "data": "//foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "an invalid URI", + "data": "\\\\WINDOWS\\fileshare", + "valid": false + }, + { + "description": "an invalid URI though valid URI reference", + "data": "abc", + "valid": false + } + ] + }, + { + "description": "validation of e-mail addresses", + "schema": {"format": "email"}, + "tests": [ + { + "description": "a valid e-mail address", + "data": "joe.bloggs@example.com", + "valid": true + }, + { + "description": "an invalid e-mail address", + "data": "2962", + "valid": false + } + ] + }, + { + "description": "validation of IP addresses", + "schema": {"format": "ip-address"}, + "tests": [ + { + "description": "a valid IP address", + "data": "192.168.0.1", + "valid": true + }, + { + "description": "an IP address with too many components", + "data": "127.0.0.0.1", + "valid": false + }, + { + "description": "an IP address with out-of-range values", + "data": "256.256.256.256", + "valid": false + } + ] + }, + { + "description": "validation of IPv6 addresses", + "schema": {"format": "ipv6"}, + "tests": [ + { + "description": "a valid IPv6 address", + "data": "::1", + "valid": true + }, + { + "description": "an IPv6 address with out-of-range values", + "data": "12345::", + "valid": false + }, + { + "description": "an IPv6 address with too many components", + "data": "1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1", + "valid": false + }, + { + "description": "an IPv6 address containing illegal characters", + "data": "::laptop", + "valid": false + } + ] + }, + { + "description": "validation of host names", + "schema": {"format": "host-name"}, + "tests": [ + { + "description": "a valid host name", + "data": "www.example.com", + "valid": true + }, + { + "description": "a host name starting with an illegal character", + "data": "-a-host-name-that-starts-with--", + "valid": false + }, + { + "description": "a host name containing illegal characters", + "data": "not_a_valid_host_name", + "valid": false + }, + { + "description": "a host name with a component too long", + "data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component", + "valid": false + } + ] + }, + { + "description": "validation of CSS colors", + "schema": {"format": "color"}, + "tests": [ + { + "description": "a valid CSS color name", + "data": "fuchsia", + "valid": true + }, + { + "description": "a valid six-digit CSS color code", + "data": "#CC8899", + "valid": true + }, + { + "description": "a valid three-digit CSS color code", + "data": "#C89", + "valid": true + }, + { + "description": "an invalid CSS color code", + "data": "#00332520", + "valid": false + }, + { + "description": "an invalid CSS color name", + "data": "puce", + "valid": false + }, + { + "description": "a CSS color name containing invalid characters", + "data": "light_grayish_red-violet", + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/optional/jsregex.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/optional/jsregex.json new file mode 100644 index 000000000..03fe97724 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/optional/jsregex.json @@ -0,0 +1,18 @@ +[ + { + "description": "ECMA 262 regex dialect recognition", + "schema": { "format": "regex" }, + "tests": [ + { + "description": "[^] is a valid regex", + "data": "[^]", + "valid": true + }, + { + "description": "ECMA 262 has no support for lookbehind", + "data": "(?<=foo)bar", + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/optional/zeroTerminatedFloats.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/optional/zeroTerminatedFloats.json new file mode 100644 index 000000000..9b50ea277 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/optional/zeroTerminatedFloats.json @@ -0,0 +1,15 @@ +[ + { + "description": "some languages do not distinguish between different types of numeric value", + "schema": { + "type": "integer" + }, + "tests": [ + { + "description": "a float is not an integer even without fractional part", + "data": 1.0, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/pattern.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/pattern.json new file mode 100644 index 000000000..25e729973 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/pattern.json @@ -0,0 +1,34 @@ +[ + { + "description": "pattern validation", + "schema": {"pattern": "^a*$"}, + "tests": [ + { + "description": "a matching pattern is valid", + "data": "aaa", + "valid": true + }, + { + "description": "a non-matching pattern is invalid", + "data": "abc", + "valid": false + }, + { + "description": "ignores non-strings", + "data": true, + "valid": true + } + ] + }, + { + "description": "pattern is not anchored", + "schema": {"pattern": "a+"}, + "tests": [ + { + "description": "matches a substring", + "data": "xxaayy", + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/patternProperties.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/patternProperties.json new file mode 100644 index 000000000..18586e5da --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/patternProperties.json @@ -0,0 +1,110 @@ +[ + { + "description": + "patternProperties validates properties matching a regex", + "schema": { + "patternProperties": { + "f.*o": {"type": "integer"} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "multiple valid matches is valid", + "data": {"foo": 1, "foooooo" : 2}, + "valid": true + }, + { + "description": "a single invalid match is invalid", + "data": {"foo": "bar", "fooooo": 2}, + "valid": false + }, + { + "description": "multiple invalid matches is invalid", + "data": {"foo": "bar", "foooooo" : "baz"}, + "valid": false + }, + { + "description": "ignores non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "multiple simultaneous patternProperties are validated", + "schema": { + "patternProperties": { + "a*": {"type": "integer"}, + "aaa*": {"maximum": 20} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"a": 21}, + "valid": true + }, + { + "description": "a simultaneous match is valid", + "data": {"aaaa": 18}, + "valid": true + }, + { + "description": "multiple matches is valid", + "data": {"a": 21, "aaaa": 18}, + "valid": true + }, + { + "description": "an invalid due to one is invalid", + "data": {"a": "bar"}, + "valid": false + }, + { + "description": "an invalid due to the other is invalid", + "data": {"aaaa": 31}, + "valid": false + }, + { + "description": "an invalid due to both is invalid", + "data": {"aaa": "foo", "aaaa": 31}, + "valid": false + } + ] + }, + { + "description": "regexes are not anchored by default and are case sensitive", + "schema": { + "patternProperties": { + "[0-9]{2,}": { "type": "boolean" }, + "X_": { "type": "string" } + } + }, + "tests": [ + { + "description": "non recognized members are ignored", + "data": { "answer 1": "42" }, + "valid": true + }, + { + "description": "recognized members are accounted for", + "data": { "a31b": null }, + "valid": false + }, + { + "description": "regexes are case sensitive", + "data": { "a_x_3": 3 }, + "valid": true + }, + { + "description": "regexes are case sensitive, 2", + "data": { "a_X_3": 3 }, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/properties.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/properties.json new file mode 100644 index 000000000..cd1644dcd --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/properties.json @@ -0,0 +1,92 @@ +[ + { + "description": "object properties validation", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "both properties present and valid is valid", + "data": {"foo": 1, "bar": "baz"}, + "valid": true + }, + { + "description": "one property invalid is invalid", + "data": {"foo": 1, "bar": {}}, + "valid": false + }, + { + "description": "both properties invalid is invalid", + "data": {"foo": [], "bar": {}}, + "valid": false + }, + { + "description": "doesn't invalidate other properties", + "data": {"quux": []}, + "valid": true + }, + { + "description": "ignores non-objects", + "data": [], + "valid": true + } + ] + }, + { + "description": + "properties, patternProperties, additionalProperties interaction", + "schema": { + "properties": { + "foo": {"type": "array", "maxItems": 3}, + "bar": {"type": "array"} + }, + "patternProperties": {"f.o": {"minItems": 2}}, + "additionalProperties": {"type": "integer"} + }, + "tests": [ + { + "description": "property validates property", + "data": {"foo": [1, 2]}, + "valid": true + }, + { + "description": "property invalidates property", + "data": {"foo": [1, 2, 3, 4]}, + "valid": false + }, + { + "description": "patternProperty invalidates property", + "data": {"foo": []}, + "valid": false + }, + { + "description": "patternProperty validates nonproperty", + "data": {"fxo": [1, 2]}, + "valid": true + }, + { + "description": "patternProperty invalidates nonproperty", + "data": {"fxo": []}, + "valid": false + }, + { + "description": "additionalProperty ignores property", + "data": {"bar": []}, + "valid": true + }, + { + "description": "additionalProperty validates others", + "data": {"quux": 3}, + "valid": true + }, + { + "description": "additionalProperty invalidates others", + "data": {"quux": "foo"}, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/ref.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/ref.json new file mode 100644 index 000000000..903ecb6bc --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/ref.json @@ -0,0 +1,159 @@ +[ + { + "description": "root pointer ref", + "schema": { + "properties": { + "foo": {"$ref": "#"} + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "match", + "data": {"foo": false}, + "valid": true + }, + { + "description": "recursive match", + "data": {"foo": {"foo": false}}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": false}, + "valid": false + }, + { + "description": "recursive mismatch", + "data": {"foo": {"bar": false}}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to object", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"$ref": "#/properties/foo"} + } + }, + "tests": [ + { + "description": "match", + "data": {"bar": 3}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": true}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to array", + "schema": { + "items": [ + {"type": "integer"}, + {"$ref": "#/items/0"} + ] + }, + "tests": [ + { + "description": "match array", + "data": [1, 2], + "valid": true + }, + { + "description": "mismatch array", + "data": [1, "foo"], + "valid": false + } + ] + }, + { + "description": "escaped pointer ref", + "schema": { + "tilda~field": {"type": "integer"}, + "slash/field": {"type": "integer"}, + "percent%field": {"type": "integer"}, + "properties": { + "tilda": {"$ref": "#/tilda~0field"}, + "slash": {"$ref": "#/slash~1field"}, + "percent": {"$ref": "#/percent%25field"} + } + }, + "tests": [ + { + "description": "slash invalid", + "data": {"slash": "aoeu"}, + "valid": false + }, + { + "description": "tilda invalid", + "data": {"tilda": "aoeu"}, + "valid": false + }, + { + "description": "percent invalid", + "data": {"percent": "aoeu"}, + "valid": false + }, + { + "description": "slash valid", + "data": {"slash": 123}, + "valid": true + }, + { + "description": "tilda valid", + "data": {"tilda": 123}, + "valid": true + }, + { + "description": "percent valid", + "data": {"percent": 123}, + "valid": true + } + ] + }, + { + "description": "nested refs", + "schema": { + "definitions": { + "a": {"type": "integer"}, + "b": {"$ref": "#/definitions/a"}, + "c": {"$ref": "#/definitions/b"} + }, + "$ref": "#/definitions/c" + }, + "tests": [ + { + "description": "nested ref valid", + "data": 5, + "valid": true + }, + { + "description": "nested ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "remote ref, containing refs itself", + "schema": {"$ref": "http://json-schema.org/draft-03/schema#"}, + "tests": [ + { + "description": "remote ref valid", + "data": {"items": {"type": "integer"}}, + "valid": true + }, + { + "description": "remote ref invalid", + "data": {"items": {"type": 1}}, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/refRemote.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/refRemote.json new file mode 100644 index 000000000..4ca804732 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/refRemote.json @@ -0,0 +1,74 @@ +[ + { + "description": "remote ref", + "schema": {"$ref": "http://localhost:1234/integer.json"}, + "tests": [ + { + "description": "remote ref valid", + "data": 1, + "valid": true + }, + { + "description": "remote ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "fragment within remote ref", + "schema": {"$ref": "http://localhost:1234/subSchemas.json#/integer"}, + "tests": [ + { + "description": "remote fragment valid", + "data": 1, + "valid": true + }, + { + "description": "remote fragment invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "ref within remote ref", + "schema": { + "$ref": "http://localhost:1234/subSchemas.json#/refToInteger" + }, + "tests": [ + { + "description": "ref within ref valid", + "data": 1, + "valid": true + }, + { + "description": "ref within ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "change resolution scope", + "schema": { + "id": "http://localhost:1234/", + "items": { + "id": "folder/", + "items": {"$ref": "folderInteger.json"} + } + }, + "tests": [ + { + "description": "changed scope ref valid", + "data": [[1]], + "valid": true + }, + { + "description": "changed scope ref invalid", + "data": [["a"]], + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/required.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/required.json new file mode 100644 index 000000000..aaaf02427 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/required.json @@ -0,0 +1,53 @@ +[ + { + "description": "required validation", + "schema": { + "properties": { + "foo": {"required" : true}, + "bar": {} + } + }, + "tests": [ + { + "description": "present required property is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "non-present required property is invalid", + "data": {"bar": 1}, + "valid": false + } + ] + }, + { + "description": "required default validation", + "schema": { + "properties": { + "foo": {} + } + }, + "tests": [ + { + "description": "not required by default", + "data": {}, + "valid": true + } + ] + }, + { + "description": "required explicitly false validation", + "schema": { + "properties": { + "foo": {"required": false} + } + }, + "tests": [ + { + "description": "not required if required is false", + "data": {}, + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/type.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/type.json new file mode 100644 index 000000000..337da1206 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/type.json @@ -0,0 +1,474 @@ +[ + { + "description": "integer type matches integers", + "schema": {"type": "integer"}, + "tests": [ + { + "description": "an integer is an integer", + "data": 1, + "valid": true + }, + { + "description": "a float is not an integer", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an integer", + "data": "foo", + "valid": false + }, + { + "description": "an object is not an integer", + "data": {}, + "valid": false + }, + { + "description": "an array is not an integer", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an integer", + "data": true, + "valid": false + }, + { + "description": "null is not an integer", + "data": null, + "valid": false + } + ] + }, + { + "description": "number type matches numbers", + "schema": {"type": "number"}, + "tests": [ + { + "description": "an integer is a number", + "data": 1, + "valid": true + }, + { + "description": "a float is a number", + "data": 1.1, + "valid": true + }, + { + "description": "a string is not a number", + "data": "foo", + "valid": false + }, + { + "description": "an object is not a number", + "data": {}, + "valid": false + }, + { + "description": "an array is not a number", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a number", + "data": true, + "valid": false + }, + { + "description": "null is not a number", + "data": null, + "valid": false + } + ] + }, + { + "description": "string type matches strings", + "schema": {"type": "string"}, + "tests": [ + { + "description": "1 is not a string", + "data": 1, + "valid": false + }, + { + "description": "a float is not a string", + "data": 1.1, + "valid": false + }, + { + "description": "a string is a string", + "data": "foo", + "valid": true + }, + { + "description": "an object is not a string", + "data": {}, + "valid": false + }, + { + "description": "an array is not a string", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a string", + "data": true, + "valid": false + }, + { + "description": "null is not a string", + "data": null, + "valid": false + } + ] + }, + { + "description": "object type matches objects", + "schema": {"type": "object"}, + "tests": [ + { + "description": "an integer is not an object", + "data": 1, + "valid": false + }, + { + "description": "a float is not an object", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an object", + "data": "foo", + "valid": false + }, + { + "description": "an object is an object", + "data": {}, + "valid": true + }, + { + "description": "an array is not an object", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an object", + "data": true, + "valid": false + }, + { + "description": "null is not an object", + "data": null, + "valid": false + } + ] + }, + { + "description": "array type matches arrays", + "schema": {"type": "array"}, + "tests": [ + { + "description": "an integer is not an array", + "data": 1, + "valid": false + }, + { + "description": "a float is not an array", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an array", + "data": "foo", + "valid": false + }, + { + "description": "an object is not an array", + "data": {}, + "valid": false + }, + { + "description": "an array is an array", + "data": [], + "valid": true + }, + { + "description": "a boolean is not an array", + "data": true, + "valid": false + }, + { + "description": "null is not an array", + "data": null, + "valid": false + } + ] + }, + { + "description": "boolean type matches booleans", + "schema": {"type": "boolean"}, + "tests": [ + { + "description": "an integer is not a boolean", + "data": 1, + "valid": false + }, + { + "description": "a float is not a boolean", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not a boolean", + "data": "foo", + "valid": false + }, + { + "description": "an object is not a boolean", + "data": {}, + "valid": false + }, + { + "description": "an array is not a boolean", + "data": [], + "valid": false + }, + { + "description": "a boolean is a boolean", + "data": true, + "valid": true + }, + { + "description": "null is not a boolean", + "data": null, + "valid": false + } + ] + }, + { + "description": "null type matches only the null object", + "schema": {"type": "null"}, + "tests": [ + { + "description": "an integer is not null", + "data": 1, + "valid": false + }, + { + "description": "a float is not null", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not null", + "data": "foo", + "valid": false + }, + { + "description": "an object is not null", + "data": {}, + "valid": false + }, + { + "description": "an array is not null", + "data": [], + "valid": false + }, + { + "description": "a boolean is not null", + "data": true, + "valid": false + }, + { + "description": "null is null", + "data": null, + "valid": true + } + ] + }, + { + "description": "any type matches any type", + "schema": {"type": "any"}, + "tests": [ + { + "description": "any type includes integers", + "data": 1, + "valid": true + }, + { + "description": "any type includes float", + "data": 1.1, + "valid": true + }, + { + "description": "any type includes string", + "data": "foo", + "valid": true + }, + { + "description": "any type includes object", + "data": {}, + "valid": true + }, + { + "description": "any type includes array", + "data": [], + "valid": true + }, + { + "description": "any type includes boolean", + "data": true, + "valid": true + }, + { + "description": "any type includes null", + "data": null, + "valid": true + } + ] + }, + { + "description": "multiple types can be specified in an array", + "schema": {"type": ["integer", "string"]}, + "tests": [ + { + "description": "an integer is valid", + "data": 1, + "valid": true + }, + { + "description": "a string is valid", + "data": "foo", + "valid": true + }, + { + "description": "a float is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "an object is invalid", + "data": {}, + "valid": false + }, + { + "description": "an array is invalid", + "data": [], + "valid": false + }, + { + "description": "a boolean is invalid", + "data": true, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + }, + { + "description": "types can include schemas", + "schema": { + "type": [ + "array", + {"type": "object"} + ] + }, + "tests": [ + { + "description": "an integer is invalid", + "data": 1, + "valid": false + }, + { + "description": "a string is invalid", + "data": "foo", + "valid": false + }, + { + "description": "a float is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "an object is valid", + "data": {}, + "valid": true + }, + { + "description": "an array is valid", + "data": [], + "valid": true + }, + { + "description": "a boolean is invalid", + "data": true, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + }, + { + "description": + "when types includes a schema it should fully validate the schema", + "schema": { + "type": [ + "integer", + { + "properties": { + "foo": {"type": "null"} + } + } + ] + }, + "tests": [ + { + "description": "an integer is valid", + "data": 1, + "valid": true + }, + { + "description": "an object is valid only if it is fully valid", + "data": {"foo": null}, + "valid": true + }, + { + "description": "an object is invalid otherwise", + "data": {"foo": "bar"}, + "valid": false + } + ] + }, + { + "description": "types from separate schemas are merged", + "schema": { + "type": [ + {"type": ["string"]}, + {"type": ["array", "null"]} + ] + }, + "tests": [ + { + "description": "an integer is invalid", + "data": 1, + "valid": false + }, + { + "description": "a string is valid", + "data": "foo", + "valid": true + }, + { + "description": "an array is valid", + "data": [1, 2, 3], + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft3/uniqueItems.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/uniqueItems.json new file mode 100644 index 000000000..c1f4ab99c --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft3/uniqueItems.json @@ -0,0 +1,79 @@ +[ + { + "description": "uniqueItems validation", + "schema": {"uniqueItems": true}, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is invalid", + "data": [1, 1], + "valid": false + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": false + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is invalid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": false + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is invalid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": false + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is invalid", + "data": [["foo"], ["foo"]], + "valid": false + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1], + "valid": true + }, + { + "description": "non-unique heterogeneous types are invalid", + "data": [{}, [1], true, null, {}, 1], + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/additionalItems.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/additionalItems.json new file mode 100644 index 000000000..521745c8d --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/additionalItems.json @@ -0,0 +1,82 @@ +[ + { + "description": "additionalItems as schema", + "schema": { + "items": [{}], + "additionalItems": {"type": "integer"} + }, + "tests": [ + { + "description": "additional items match schema", + "data": [ null, 2, 3, 4 ], + "valid": true + }, + { + "description": "additional items do not match schema", + "data": [ null, 2, 3, "foo" ], + "valid": false + } + ] + }, + { + "description": "items is schema, no additionalItems", + "schema": { + "items": {}, + "additionalItems": false + }, + "tests": [ + { + "description": "all items match schema", + "data": [ 1, 2, 3, 4, 5 ], + "valid": true + } + ] + }, + { + "description": "array of items with no additionalItems", + "schema": { + "items": [{}, {}, {}], + "additionalItems": false + }, + "tests": [ + { + "description": "no additional items present", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "additional items are not permitted", + "data": [ 1, 2, 3, 4 ], + "valid": false + } + ] + }, + { + "description": "additionalItems as false without items", + "schema": {"additionalItems": false}, + "tests": [ + { + "description": + "items defaults to empty schema so everything is valid", + "data": [ 1, 2, 3, 4, 5 ], + "valid": true + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + } + ] + }, + { + "description": "additionalItems are allowed by default", + "schema": {"items": [{"type": "integer"}]}, + "tests": [ + { + "description": "only the first item is validated", + "data": [1, "foo", false], + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/additionalProperties.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/additionalProperties.json new file mode 100644 index 000000000..40831f9e9 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/additionalProperties.json @@ -0,0 +1,88 @@ +[ + { + "description": + "additionalProperties being false does not allow other properties", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "patternProperties": { "^v": {} }, + "additionalProperties": false + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : "boom"}, + "valid": false + }, + { + "description": "ignores non-objects", + "data": [1, 2, 3], + "valid": true + }, + { + "description": "patternProperties are not additional properties", + "data": {"foo":1, "vroom": 2}, + "valid": true + } + ] + }, + { + "description": + "additionalProperties allows a schema which should validate", + "schema": { + "properties": {"foo": {}, "bar": {}}, + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "no additional properties is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "an additional valid property is valid", + "data": {"foo" : 1, "bar" : 2, "quux" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1, "bar" : 2, "quux" : 12}, + "valid": false + } + ] + }, + { + "description": + "additionalProperties can exist by itself", + "schema": { + "additionalProperties": {"type": "boolean"} + }, + "tests": [ + { + "description": "an additional valid property is valid", + "data": {"foo" : true}, + "valid": true + }, + { + "description": "an additional invalid property is invalid", + "data": {"foo" : 1}, + "valid": false + } + ] + }, + { + "description": "additionalProperties are allowed by default", + "schema": {"properties": {"foo": {}, "bar": {}}}, + "tests": [ + { + "description": "additional properties are allowed", + "data": {"foo": 1, "bar": 2, "quux": true}, + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/allOf.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/allOf.json new file mode 100644 index 000000000..bbb5f89e4 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/allOf.json @@ -0,0 +1,112 @@ +[ + { + "description": "allOf", + "schema": { + "allOf": [ + { + "properties": { + "bar": {"type": "integer"} + }, + "required": ["bar"] + }, + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + } + ] + }, + "tests": [ + { + "description": "allOf", + "data": {"foo": "baz", "bar": 2}, + "valid": true + }, + { + "description": "mismatch second", + "data": {"foo": "baz"}, + "valid": false + }, + { + "description": "mismatch first", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "wrong type", + "data": {"foo": "baz", "bar": "quux"}, + "valid": false + } + ] + }, + { + "description": "allOf with base schema", + "schema": { + "properties": {"bar": {"type": "integer"}}, + "required": ["bar"], + "allOf" : [ + { + "properties": { + "foo": {"type": "string"} + }, + "required": ["foo"] + }, + { + "properties": { + "baz": {"type": "null"} + }, + "required": ["baz"] + } + ] + }, + "tests": [ + { + "description": "valid", + "data": {"foo": "quux", "bar": 2, "baz": null}, + "valid": true + }, + { + "description": "mismatch base schema", + "data": {"foo": "quux", "baz": null}, + "valid": false + }, + { + "description": "mismatch first allOf", + "data": {"bar": 2, "baz": null}, + "valid": false + }, + { + "description": "mismatch second allOf", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "mismatch both", + "data": {"bar": 2}, + "valid": false + } + ] + }, + { + "description": "allOf simple types", + "schema": { + "allOf": [ + {"maximum": 30}, + {"minimum": 20} + ] + }, + "tests": [ + { + "description": "valid", + "data": 25, + "valid": true + }, + { + "description": "mismatch one", + "data": 35, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/anyOf.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/anyOf.json new file mode 100644 index 000000000..a58714afd --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/anyOf.json @@ -0,0 +1,68 @@ +[ + { + "description": "anyOf", + "schema": { + "anyOf": [ + { + "type": "integer" + }, + { + "minimum": 2 + } + ] + }, + "tests": [ + { + "description": "first anyOf valid", + "data": 1, + "valid": true + }, + { + "description": "second anyOf valid", + "data": 2.5, + "valid": true + }, + { + "description": "both anyOf valid", + "data": 3, + "valid": true + }, + { + "description": "neither anyOf valid", + "data": 1.5, + "valid": false + } + ] + }, + { + "description": "anyOf with base schema", + "schema": { + "type": "string", + "anyOf" : [ + { + "maxLength": 2 + }, + { + "minLength": 4 + } + ] + }, + "tests": [ + { + "description": "mismatch base schema", + "data": 3, + "valid": false + }, + { + "description": "one anyOf valid", + "data": "foobar", + "valid": true + }, + { + "description": "both anyOf invalid", + "data": "foo", + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/default.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/default.json new file mode 100644 index 000000000..17629779f --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/default.json @@ -0,0 +1,49 @@ +[ + { + "description": "invalid type for default", + "schema": { + "properties": { + "foo": { + "type": "integer", + "default": [] + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"foo": 13}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + }, + { + "description": "invalid string value for default", + "schema": { + "properties": { + "bar": { + "type": "string", + "minLength": 4, + "default": "bad" + } + } + }, + "tests": [ + { + "description": "valid when property is specified", + "data": {"bar": "good"}, + "valid": true + }, + { + "description": "still valid when the invalid default is used", + "data": {}, + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/definitions.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/definitions.json new file mode 100644 index 000000000..cf935a321 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/definitions.json @@ -0,0 +1,32 @@ +[ + { + "description": "valid definition", + "schema": {"$ref": "http://json-schema.org/draft-04/schema#"}, + "tests": [ + { + "description": "valid definition schema", + "data": { + "definitions": { + "foo": {"type": "integer"} + } + }, + "valid": true + } + ] + }, + { + "description": "invalid definition", + "schema": {"$ref": "http://json-schema.org/draft-04/schema#"}, + "tests": [ + { + "description": "invalid definition schema", + "data": { + "definitions": { + "foo": {"type": 1} + } + }, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/dependencies.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/dependencies.json new file mode 100644 index 000000000..7b9b16a7e --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/dependencies.json @@ -0,0 +1,113 @@ +[ + { + "description": "dependencies", + "schema": { + "dependencies": {"bar": ["foo"]} + }, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependant", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "with dependency", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"bar": 2}, + "valid": false + }, + { + "description": "ignores non-objects", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "multiple dependencies", + "schema": { + "dependencies": {"quux": ["foo", "bar"]} + }, + "tests": [ + { + "description": "neither", + "data": {}, + "valid": true + }, + { + "description": "nondependants", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "with dependencies", + "data": {"foo": 1, "bar": 2, "quux": 3}, + "valid": true + }, + { + "description": "missing dependency", + "data": {"foo": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing other dependency", + "data": {"bar": 1, "quux": 2}, + "valid": false + }, + { + "description": "missing both dependencies", + "data": {"quux": 1}, + "valid": false + } + ] + }, + { + "description": "multiple dependencies subschema", + "schema": { + "dependencies": { + "bar": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "integer"} + } + } + } + }, + "tests": [ + { + "description": "valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "no dependency", + "data": {"foo": "quux"}, + "valid": true + }, + { + "description": "wrong type", + "data": {"foo": "quux", "bar": 2}, + "valid": false + }, + { + "description": "wrong type other", + "data": {"foo": 2, "bar": "quux"}, + "valid": false + }, + { + "description": "wrong type both", + "data": {"foo": "quux", "bar": "quux"}, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/enum.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/enum.json new file mode 100644 index 000000000..f124436a7 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/enum.json @@ -0,0 +1,72 @@ +[ + { + "description": "simple enum validation", + "schema": {"enum": [1, 2, 3]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": 1, + "valid": true + }, + { + "description": "something else is invalid", + "data": 4, + "valid": false + } + ] + }, + { + "description": "heterogeneous enum validation", + "schema": {"enum": [6, "foo", [], true, {"foo": 12}]}, + "tests": [ + { + "description": "one of the enum is valid", + "data": [], + "valid": true + }, + { + "description": "something else is invalid", + "data": null, + "valid": false + }, + { + "description": "objects are deep compared", + "data": {"foo": false}, + "valid": false + } + ] + }, + { + "description": "enums in properties", + "schema": { + "type":"object", + "properties": { + "foo": {"enum":["foo"]}, + "bar": {"enum":["bar"]} + }, + "required": ["bar"] + }, + "tests": [ + { + "description": "both properties are valid", + "data": {"foo":"foo", "bar":"bar"}, + "valid": true + }, + { + "description": "missing optional property is valid", + "data": {"bar":"bar"}, + "valid": true + }, + { + "description": "missing required property is invalid", + "data": {"foo":"foo"}, + "valid": false + }, + { + "description": "missing all properties is invalid", + "data": {}, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/items.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/items.json new file mode 100644 index 000000000..f5e18a138 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/items.json @@ -0,0 +1,46 @@ +[ + { + "description": "a schema given for items", + "schema": { + "items": {"type": "integer"} + }, + "tests": [ + { + "description": "valid items", + "data": [ 1, 2, 3 ], + "valid": true + }, + { + "description": "wrong type of items", + "data": [1, "x"], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": {"foo" : "bar"}, + "valid": true + } + ] + }, + { + "description": "an array of schemas for items", + "schema": { + "items": [ + {"type": "integer"}, + {"type": "string"} + ] + }, + "tests": [ + { + "description": "correct types", + "data": [ 1, "foo" ], + "valid": true + }, + { + "description": "wrong types", + "data": [ "foo", 1 ], + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/maxItems.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/maxItems.json new file mode 100644 index 000000000..3b53a6b37 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/maxItems.json @@ -0,0 +1,28 @@ +[ + { + "description": "maxItems validation", + "schema": {"maxItems": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": [1], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "too long is invalid", + "data": [1, 2, 3], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "foobar", + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/maxLength.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/maxLength.json new file mode 100644 index 000000000..811d35b25 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/maxLength.json @@ -0,0 +1,33 @@ +[ + { + "description": "maxLength validation", + "schema": {"maxLength": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": "f", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too long is invalid", + "data": "foo", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 100, + "valid": true + }, + { + "description": "two supplementary Unicode code points is long enough", + "data": "\uD83D\uDCA9\uD83D\uDCA9", + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/maxProperties.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/maxProperties.json new file mode 100644 index 000000000..d282446ad --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/maxProperties.json @@ -0,0 +1,28 @@ +[ + { + "description": "maxProperties validation", + "schema": {"maxProperties": 2}, + "tests": [ + { + "description": "shorter is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "exact length is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "too long is invalid", + "data": {"foo": 1, "bar": 2, "baz": 3}, + "valid": false + }, + { + "description": "ignores non-objects", + "data": "foobar", + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/maximum.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/maximum.json new file mode 100644 index 000000000..86c7b89c9 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/maximum.json @@ -0,0 +1,42 @@ +[ + { + "description": "maximum validation", + "schema": {"maximum": 3.0}, + "tests": [ + { + "description": "below the maximum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "above the maximum is invalid", + "data": 3.5, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "exclusiveMaximum validation", + "schema": { + "maximum": 3.0, + "exclusiveMaximum": true + }, + "tests": [ + { + "description": "below the maximum is still valid", + "data": 2.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 3.0, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/minItems.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/minItems.json new file mode 100644 index 000000000..ed5118815 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/minItems.json @@ -0,0 +1,28 @@ +[ + { + "description": "minItems validation", + "schema": {"minItems": 1}, + "tests": [ + { + "description": "longer is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "exact length is valid", + "data": [1], + "valid": true + }, + { + "description": "too short is invalid", + "data": [], + "valid": false + }, + { + "description": "ignores non-arrays", + "data": "", + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/minLength.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/minLength.json new file mode 100644 index 000000000..3f09158de --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/minLength.json @@ -0,0 +1,33 @@ +[ + { + "description": "minLength validation", + "schema": {"minLength": 2}, + "tests": [ + { + "description": "longer is valid", + "data": "foo", + "valid": true + }, + { + "description": "exact length is valid", + "data": "fo", + "valid": true + }, + { + "description": "too short is invalid", + "data": "f", + "valid": false + }, + { + "description": "ignores non-strings", + "data": 1, + "valid": true + }, + { + "description": "one supplementary Unicode code point is not long enough", + "data": "\uD83D\uDCA9", + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/minProperties.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/minProperties.json new file mode 100644 index 000000000..a72c7d293 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/minProperties.json @@ -0,0 +1,28 @@ +[ + { + "description": "minProperties validation", + "schema": {"minProperties": 1}, + "tests": [ + { + "description": "longer is valid", + "data": {"foo": 1, "bar": 2}, + "valid": true + }, + { + "description": "exact length is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "too short is invalid", + "data": {}, + "valid": false + }, + { + "description": "ignores non-objects", + "data": "", + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/minimum.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/minimum.json new file mode 100644 index 000000000..d5bf000bc --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/minimum.json @@ -0,0 +1,42 @@ +[ + { + "description": "minimum validation", + "schema": {"minimum": 1.1}, + "tests": [ + { + "description": "above the minimum is valid", + "data": 2.6, + "valid": true + }, + { + "description": "below the minimum is invalid", + "data": 0.6, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "x", + "valid": true + } + ] + }, + { + "description": "exclusiveMinimum validation", + "schema": { + "minimum": 1.1, + "exclusiveMinimum": true + }, + "tests": [ + { + "description": "above the minimum is still valid", + "data": 1.2, + "valid": true + }, + { + "description": "boundary point is invalid", + "data": 1.1, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/multipleOf.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/multipleOf.json new file mode 100644 index 000000000..ca3b76180 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/multipleOf.json @@ -0,0 +1,60 @@ +[ + { + "description": "by int", + "schema": {"multipleOf": 2}, + "tests": [ + { + "description": "int by int", + "data": 10, + "valid": true + }, + { + "description": "int by int fail", + "data": 7, + "valid": false + }, + { + "description": "ignores non-numbers", + "data": "foo", + "valid": true + } + ] + }, + { + "description": "by number", + "schema": {"multipleOf": 1.5}, + "tests": [ + { + "description": "zero is multiple of anything", + "data": 0, + "valid": true + }, + { + "description": "4.5 is multiple of 1.5", + "data": 4.5, + "valid": true + }, + { + "description": "35 is not multiple of 1.5", + "data": 35, + "valid": false + } + ] + }, + { + "description": "by small number", + "schema": {"multipleOf": 0.0001}, + "tests": [ + { + "description": "0.0075 is multiple of 0.0001", + "data": 0.0075, + "valid": true + }, + { + "description": "0.00751 is not multiple of 0.0001", + "data": 0.00751, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/not.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/not.json new file mode 100644 index 000000000..cbb7f46bf --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/not.json @@ -0,0 +1,96 @@ +[ + { + "description": "not", + "schema": { + "not": {"type": "integer"} + }, + "tests": [ + { + "description": "allowed", + "data": "foo", + "valid": true + }, + { + "description": "disallowed", + "data": 1, + "valid": false + } + ] + }, + { + "description": "not multiple types", + "schema": { + "not": {"type": ["integer", "boolean"]} + }, + "tests": [ + { + "description": "valid", + "data": "foo", + "valid": true + }, + { + "description": "mismatch", + "data": 1, + "valid": false + }, + { + "description": "other mismatch", + "data": true, + "valid": false + } + ] + }, + { + "description": "not more complex schema", + "schema": { + "not": { + "type": "object", + "properties": { + "foo": { + "type": "string" + } + } + } + }, + "tests": [ + { + "description": "match", + "data": 1, + "valid": true + }, + { + "description": "other match", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "mismatch", + "data": {"foo": "bar"}, + "valid": false + } + ] + }, + { + "description": "forbidden property", + "schema": { + "properties": { + "foo": { + "not": {} + } + } + }, + "tests": [ + { + "description": "property present", + "data": {"foo": 1, "bar": 2}, + "valid": false + }, + { + "description": "property absent", + "data": {"bar": 1, "baz": 2}, + "valid": true + } + ] + } + +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/oneOf.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/oneOf.json new file mode 100644 index 000000000..1eaa4e479 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/oneOf.json @@ -0,0 +1,68 @@ +[ + { + "description": "oneOf", + "schema": { + "oneOf": [ + { + "type": "integer" + }, + { + "minimum": 2 + } + ] + }, + "tests": [ + { + "description": "first oneOf valid", + "data": 1, + "valid": true + }, + { + "description": "second oneOf valid", + "data": 2.5, + "valid": true + }, + { + "description": "both oneOf valid", + "data": 3, + "valid": false + }, + { + "description": "neither oneOf valid", + "data": 1.5, + "valid": false + } + ] + }, + { + "description": "oneOf with base schema", + "schema": { + "type": "string", + "oneOf" : [ + { + "minLength": 2 + }, + { + "maxLength": 4 + } + ] + }, + "tests": [ + { + "description": "mismatch base schema", + "data": 3, + "valid": false + }, + { + "description": "one oneOf valid", + "data": "foobar", + "valid": true + }, + { + "description": "both oneOf valid", + "data": "foo", + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/optional/bignum.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/optional/bignum.json new file mode 100644 index 000000000..ccc7c17fe --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/optional/bignum.json @@ -0,0 +1,107 @@ +[ + { + "description": "integer", + "schema": {"type": "integer"}, + "tests": [ + { + "description": "a bignum is an integer", + "data": 12345678910111213141516171819202122232425262728293031, + "valid": true + } + ] + }, + { + "description": "number", + "schema": {"type": "number"}, + "tests": [ + { + "description": "a bignum is a number", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": true + } + ] + }, + { + "description": "integer", + "schema": {"type": "integer"}, + "tests": [ + { + "description": "a negative bignum is an integer", + "data": -12345678910111213141516171819202122232425262728293031, + "valid": true + } + ] + }, + { + "description": "number", + "schema": {"type": "number"}, + "tests": [ + { + "description": "a negative bignum is a number", + "data": -98249283749234923498293171823948729348710298301928331, + "valid": true + } + ] + }, + { + "description": "string", + "schema": {"type": "string"}, + "tests": [ + { + "description": "a bignum is not a string", + "data": 98249283749234923498293171823948729348710298301928331, + "valid": false + } + ] + }, + { + "description": "integer comparison", + "schema": {"maximum": 18446744073709551615}, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision", + "schema": { + "maximum": 972783798187987123879878123.18878137, + "exclusiveMaximum": true + }, + "tests": [ + { + "description": "comparison works for high numbers", + "data": 972783798187987123879878123.188781371, + "valid": false + } + ] + }, + { + "description": "integer comparison", + "schema": {"minimum": -18446744073709551615}, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -18446744073709551600, + "valid": true + } + ] + }, + { + "description": "float comparison with high precision on negative numbers", + "schema": { + "minimum": -972783798187987123879878123.18878137, + "exclusiveMinimum": true + }, + "tests": [ + { + "description": "comparison works for very negative numbers", + "data": -972783798187987123879878123.188781371, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/optional/format.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/optional/format.json new file mode 100644 index 000000000..aacfd1198 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/optional/format.json @@ -0,0 +1,148 @@ +[ + { + "description": "validation of date-time strings", + "schema": {"format": "date-time"}, + "tests": [ + { + "description": "a valid date-time string", + "data": "1963-06-19T08:30:06.283185Z", + "valid": true + }, + { + "description": "an invalid date-time string", + "data": "06/19/1963 08:30:06 PST", + "valid": false + }, + { + "description": "only RFC3339 not all of ISO 8601 are valid", + "data": "2013-350T01:01:01", + "valid": false + } + ] + }, + { + "description": "validation of URIs", + "schema": {"format": "uri"}, + "tests": [ + { + "description": "a valid URI", + "data": "http://foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "a valid protocol-relative URI", + "data": "//foo.bar/?baz=qux#quux", + "valid": true + }, + { + "description": "an invalid URI", + "data": "\\\\WINDOWS\\fileshare", + "valid": false + }, + { + "description": "an invalid URI though valid URI reference", + "data": "abc", + "valid": false + } + ] + }, + { + "description": "validation of e-mail addresses", + "schema": {"format": "email"}, + "tests": [ + { + "description": "a valid e-mail address", + "data": "joe.bloggs@example.com", + "valid": true + }, + { + "description": "an invalid e-mail address", + "data": "2962", + "valid": false + } + ] + }, + { + "description": "validation of IP addresses", + "schema": {"format": "ipv4"}, + "tests": [ + { + "description": "a valid IP address", + "data": "192.168.0.1", + "valid": true + }, + { + "description": "an IP address with too many components", + "data": "127.0.0.0.1", + "valid": false + }, + { + "description": "an IP address with out-of-range values", + "data": "256.256.256.256", + "valid": false + }, + { + "description": "an IP address without 4 components", + "data": "127.0", + "valid": false + }, + { + "description": "an IP address as an integer", + "data": "0x7f000001", + "valid": false + } + ] + }, + { + "description": "validation of IPv6 addresses", + "schema": {"format": "ipv6"}, + "tests": [ + { + "description": "a valid IPv6 address", + "data": "::1", + "valid": true + }, + { + "description": "an IPv6 address with out-of-range values", + "data": "12345::", + "valid": false + }, + { + "description": "an IPv6 address with too many components", + "data": "1:1:1:1:1:1:1:1:1:1:1:1:1:1:1:1", + "valid": false + }, + { + "description": "an IPv6 address containing illegal characters", + "data": "::laptop", + "valid": false + } + ] + }, + { + "description": "validation of host names", + "schema": {"format": "hostname"}, + "tests": [ + { + "description": "a valid host name", + "data": "www.example.com", + "valid": true + }, + { + "description": "a host name starting with an illegal character", + "data": "-a-host-name-that-starts-with--", + "valid": false + }, + { + "description": "a host name containing illegal characters", + "data": "not_a_valid_host_name", + "valid": false + }, + { + "description": "a host name with a component too long", + "data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component", + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/optional/zeroTerminatedFloats.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/optional/zeroTerminatedFloats.json new file mode 100644 index 000000000..9b50ea277 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/optional/zeroTerminatedFloats.json @@ -0,0 +1,15 @@ +[ + { + "description": "some languages do not distinguish between different types of numeric value", + "schema": { + "type": "integer" + }, + "tests": [ + { + "description": "a float is not an integer even without fractional part", + "data": 1.0, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/pattern.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/pattern.json new file mode 100644 index 000000000..25e729973 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/pattern.json @@ -0,0 +1,34 @@ +[ + { + "description": "pattern validation", + "schema": {"pattern": "^a*$"}, + "tests": [ + { + "description": "a matching pattern is valid", + "data": "aaa", + "valid": true + }, + { + "description": "a non-matching pattern is invalid", + "data": "abc", + "valid": false + }, + { + "description": "ignores non-strings", + "data": true, + "valid": true + } + ] + }, + { + "description": "pattern is not anchored", + "schema": {"pattern": "a+"}, + "tests": [ + { + "description": "matches a substring", + "data": "xxaayy", + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/patternProperties.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/patternProperties.json new file mode 100644 index 000000000..18586e5da --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/patternProperties.json @@ -0,0 +1,110 @@ +[ + { + "description": + "patternProperties validates properties matching a regex", + "schema": { + "patternProperties": { + "f.*o": {"type": "integer"} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "multiple valid matches is valid", + "data": {"foo": 1, "foooooo" : 2}, + "valid": true + }, + { + "description": "a single invalid match is invalid", + "data": {"foo": "bar", "fooooo": 2}, + "valid": false + }, + { + "description": "multiple invalid matches is invalid", + "data": {"foo": "bar", "foooooo" : "baz"}, + "valid": false + }, + { + "description": "ignores non-objects", + "data": 12, + "valid": true + } + ] + }, + { + "description": "multiple simultaneous patternProperties are validated", + "schema": { + "patternProperties": { + "a*": {"type": "integer"}, + "aaa*": {"maximum": 20} + } + }, + "tests": [ + { + "description": "a single valid match is valid", + "data": {"a": 21}, + "valid": true + }, + { + "description": "a simultaneous match is valid", + "data": {"aaaa": 18}, + "valid": true + }, + { + "description": "multiple matches is valid", + "data": {"a": 21, "aaaa": 18}, + "valid": true + }, + { + "description": "an invalid due to one is invalid", + "data": {"a": "bar"}, + "valid": false + }, + { + "description": "an invalid due to the other is invalid", + "data": {"aaaa": 31}, + "valid": false + }, + { + "description": "an invalid due to both is invalid", + "data": {"aaa": "foo", "aaaa": 31}, + "valid": false + } + ] + }, + { + "description": "regexes are not anchored by default and are case sensitive", + "schema": { + "patternProperties": { + "[0-9]{2,}": { "type": "boolean" }, + "X_": { "type": "string" } + } + }, + "tests": [ + { + "description": "non recognized members are ignored", + "data": { "answer 1": "42" }, + "valid": true + }, + { + "description": "recognized members are accounted for", + "data": { "a31b": null }, + "valid": false + }, + { + "description": "regexes are case sensitive", + "data": { "a_x_3": 3 }, + "valid": true + }, + { + "description": "regexes are case sensitive, 2", + "data": { "a_X_3": 3 }, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/properties.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/properties.json new file mode 100644 index 000000000..cd1644dcd --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/properties.json @@ -0,0 +1,92 @@ +[ + { + "description": "object properties validation", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"type": "string"} + } + }, + "tests": [ + { + "description": "both properties present and valid is valid", + "data": {"foo": 1, "bar": "baz"}, + "valid": true + }, + { + "description": "one property invalid is invalid", + "data": {"foo": 1, "bar": {}}, + "valid": false + }, + { + "description": "both properties invalid is invalid", + "data": {"foo": [], "bar": {}}, + "valid": false + }, + { + "description": "doesn't invalidate other properties", + "data": {"quux": []}, + "valid": true + }, + { + "description": "ignores non-objects", + "data": [], + "valid": true + } + ] + }, + { + "description": + "properties, patternProperties, additionalProperties interaction", + "schema": { + "properties": { + "foo": {"type": "array", "maxItems": 3}, + "bar": {"type": "array"} + }, + "patternProperties": {"f.o": {"minItems": 2}}, + "additionalProperties": {"type": "integer"} + }, + "tests": [ + { + "description": "property validates property", + "data": {"foo": [1, 2]}, + "valid": true + }, + { + "description": "property invalidates property", + "data": {"foo": [1, 2, 3, 4]}, + "valid": false + }, + { + "description": "patternProperty invalidates property", + "data": {"foo": []}, + "valid": false + }, + { + "description": "patternProperty validates nonproperty", + "data": {"fxo": [1, 2]}, + "valid": true + }, + { + "description": "patternProperty invalidates nonproperty", + "data": {"fxo": []}, + "valid": false + }, + { + "description": "additionalProperty ignores property", + "data": {"bar": []}, + "valid": true + }, + { + "description": "additionalProperty validates others", + "data": {"quux": 3}, + "valid": true + }, + { + "description": "additionalProperty invalidates others", + "data": {"quux": "foo"}, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/ref.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/ref.json new file mode 100644 index 000000000..7e8055224 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/ref.json @@ -0,0 +1,159 @@ +[ + { + "description": "root pointer ref", + "schema": { + "properties": { + "foo": {"$ref": "#"} + }, + "additionalProperties": false + }, + "tests": [ + { + "description": "match", + "data": {"foo": false}, + "valid": true + }, + { + "description": "recursive match", + "data": {"foo": {"foo": false}}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": false}, + "valid": false + }, + { + "description": "recursive mismatch", + "data": {"foo": {"bar": false}}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to object", + "schema": { + "properties": { + "foo": {"type": "integer"}, + "bar": {"$ref": "#/properties/foo"} + } + }, + "tests": [ + { + "description": "match", + "data": {"bar": 3}, + "valid": true + }, + { + "description": "mismatch", + "data": {"bar": true}, + "valid": false + } + ] + }, + { + "description": "relative pointer ref to array", + "schema": { + "items": [ + {"type": "integer"}, + {"$ref": "#/items/0"} + ] + }, + "tests": [ + { + "description": "match array", + "data": [1, 2], + "valid": true + }, + { + "description": "mismatch array", + "data": [1, "foo"], + "valid": false + } + ] + }, + { + "description": "escaped pointer ref", + "schema": { + "tilda~field": {"type": "integer"}, + "slash/field": {"type": "integer"}, + "percent%field": {"type": "integer"}, + "properties": { + "tilda": {"$ref": "#/tilda~0field"}, + "slash": {"$ref": "#/slash~1field"}, + "percent": {"$ref": "#/percent%25field"} + } + }, + "tests": [ + { + "description": "slash invalid", + "data": {"slash": "aoeu"}, + "valid": false + }, + { + "description": "tilda invalid", + "data": {"tilda": "aoeu"}, + "valid": false + }, + { + "description": "percent invalid", + "data": {"percent": "aoeu"}, + "valid": false + }, + { + "description": "slash valid", + "data": {"slash": 123}, + "valid": true + }, + { + "description": "tilda valid", + "data": {"tilda": 123}, + "valid": true + }, + { + "description": "percent valid", + "data": {"percent": 123}, + "valid": true + } + ] + }, + { + "description": "nested refs", + "schema": { + "definitions": { + "a": {"type": "integer"}, + "b": {"$ref": "#/definitions/a"}, + "c": {"$ref": "#/definitions/b"} + }, + "$ref": "#/definitions/c" + }, + "tests": [ + { + "description": "nested ref valid", + "data": 5, + "valid": true + }, + { + "description": "nested ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "remote ref, containing refs itself", + "schema": {"$ref": "http://json-schema.org/draft-04/schema#"}, + "tests": [ + { + "description": "remote ref valid", + "data": {"minLength": 1}, + "valid": true + }, + { + "description": "remote ref invalid", + "data": {"minLength": -1}, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/refRemote.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/refRemote.json new file mode 100644 index 000000000..4ca804732 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/refRemote.json @@ -0,0 +1,74 @@ +[ + { + "description": "remote ref", + "schema": {"$ref": "http://localhost:1234/integer.json"}, + "tests": [ + { + "description": "remote ref valid", + "data": 1, + "valid": true + }, + { + "description": "remote ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "fragment within remote ref", + "schema": {"$ref": "http://localhost:1234/subSchemas.json#/integer"}, + "tests": [ + { + "description": "remote fragment valid", + "data": 1, + "valid": true + }, + { + "description": "remote fragment invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "ref within remote ref", + "schema": { + "$ref": "http://localhost:1234/subSchemas.json#/refToInteger" + }, + "tests": [ + { + "description": "ref within ref valid", + "data": 1, + "valid": true + }, + { + "description": "ref within ref invalid", + "data": "a", + "valid": false + } + ] + }, + { + "description": "change resolution scope", + "schema": { + "id": "http://localhost:1234/", + "items": { + "id": "folder/", + "items": {"$ref": "folderInteger.json"} + } + }, + "tests": [ + { + "description": "changed scope ref valid", + "data": [[1]], + "valid": true + }, + { + "description": "changed scope ref invalid", + "data": [["a"]], + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/required.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/required.json new file mode 100644 index 000000000..612f73f34 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/required.json @@ -0,0 +1,39 @@ +[ + { + "description": "required validation", + "schema": { + "properties": { + "foo": {}, + "bar": {} + }, + "required": ["foo"] + }, + "tests": [ + { + "description": "present required property is valid", + "data": {"foo": 1}, + "valid": true + }, + { + "description": "non-present required property is invalid", + "data": {"bar": 1}, + "valid": false + } + ] + }, + { + "description": "required default validation", + "schema": { + "properties": { + "foo": {} + } + }, + "tests": [ + { + "description": "not required by default", + "data": {}, + "valid": true + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/type.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/type.json new file mode 100644 index 000000000..db42a44d3 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/type.json @@ -0,0 +1,330 @@ +[ + { + "description": "integer type matches integers", + "schema": {"type": "integer"}, + "tests": [ + { + "description": "an integer is an integer", + "data": 1, + "valid": true + }, + { + "description": "a float is not an integer", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an integer", + "data": "foo", + "valid": false + }, + { + "description": "an object is not an integer", + "data": {}, + "valid": false + }, + { + "description": "an array is not an integer", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an integer", + "data": true, + "valid": false + }, + { + "description": "null is not an integer", + "data": null, + "valid": false + } + ] + }, + { + "description": "number type matches numbers", + "schema": {"type": "number"}, + "tests": [ + { + "description": "an integer is a number", + "data": 1, + "valid": true + }, + { + "description": "a float is a number", + "data": 1.1, + "valid": true + }, + { + "description": "a string is not a number", + "data": "foo", + "valid": false + }, + { + "description": "an object is not a number", + "data": {}, + "valid": false + }, + { + "description": "an array is not a number", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a number", + "data": true, + "valid": false + }, + { + "description": "null is not a number", + "data": null, + "valid": false + } + ] + }, + { + "description": "string type matches strings", + "schema": {"type": "string"}, + "tests": [ + { + "description": "1 is not a string", + "data": 1, + "valid": false + }, + { + "description": "a float is not a string", + "data": 1.1, + "valid": false + }, + { + "description": "a string is a string", + "data": "foo", + "valid": true + }, + { + "description": "an object is not a string", + "data": {}, + "valid": false + }, + { + "description": "an array is not a string", + "data": [], + "valid": false + }, + { + "description": "a boolean is not a string", + "data": true, + "valid": false + }, + { + "description": "null is not a string", + "data": null, + "valid": false + } + ] + }, + { + "description": "object type matches objects", + "schema": {"type": "object"}, + "tests": [ + { + "description": "an integer is not an object", + "data": 1, + "valid": false + }, + { + "description": "a float is not an object", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an object", + "data": "foo", + "valid": false + }, + { + "description": "an object is an object", + "data": {}, + "valid": true + }, + { + "description": "an array is not an object", + "data": [], + "valid": false + }, + { + "description": "a boolean is not an object", + "data": true, + "valid": false + }, + { + "description": "null is not an object", + "data": null, + "valid": false + } + ] + }, + { + "description": "array type matches arrays", + "schema": {"type": "array"}, + "tests": [ + { + "description": "an integer is not an array", + "data": 1, + "valid": false + }, + { + "description": "a float is not an array", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not an array", + "data": "foo", + "valid": false + }, + { + "description": "an object is not an array", + "data": {}, + "valid": false + }, + { + "description": "an array is an array", + "data": [], + "valid": true + }, + { + "description": "a boolean is not an array", + "data": true, + "valid": false + }, + { + "description": "null is not an array", + "data": null, + "valid": false + } + ] + }, + { + "description": "boolean type matches booleans", + "schema": {"type": "boolean"}, + "tests": [ + { + "description": "an integer is not a boolean", + "data": 1, + "valid": false + }, + { + "description": "a float is not a boolean", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not a boolean", + "data": "foo", + "valid": false + }, + { + "description": "an object is not a boolean", + "data": {}, + "valid": false + }, + { + "description": "an array is not a boolean", + "data": [], + "valid": false + }, + { + "description": "a boolean is a boolean", + "data": true, + "valid": true + }, + { + "description": "null is not a boolean", + "data": null, + "valid": false + } + ] + }, + { + "description": "null type matches only the null object", + "schema": {"type": "null"}, + "tests": [ + { + "description": "an integer is not null", + "data": 1, + "valid": false + }, + { + "description": "a float is not null", + "data": 1.1, + "valid": false + }, + { + "description": "a string is not null", + "data": "foo", + "valid": false + }, + { + "description": "an object is not null", + "data": {}, + "valid": false + }, + { + "description": "an array is not null", + "data": [], + "valid": false + }, + { + "description": "a boolean is not null", + "data": true, + "valid": false + }, + { + "description": "null is null", + "data": null, + "valid": true + } + ] + }, + { + "description": "multiple types can be specified in an array", + "schema": {"type": ["integer", "string"]}, + "tests": [ + { + "description": "an integer is valid", + "data": 1, + "valid": true + }, + { + "description": "a string is valid", + "data": "foo", + "valid": true + }, + { + "description": "a float is invalid", + "data": 1.1, + "valid": false + }, + { + "description": "an object is invalid", + "data": {}, + "valid": false + }, + { + "description": "an array is invalid", + "data": [], + "valid": false + }, + { + "description": "a boolean is invalid", + "data": true, + "valid": false + }, + { + "description": "null is invalid", + "data": null, + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tests/draft4/uniqueItems.json b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/uniqueItems.json new file mode 100644 index 000000000..c1f4ab99c --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tests/draft4/uniqueItems.json @@ -0,0 +1,79 @@ +[ + { + "description": "uniqueItems validation", + "schema": {"uniqueItems": true}, + "tests": [ + { + "description": "unique array of integers is valid", + "data": [1, 2], + "valid": true + }, + { + "description": "non-unique array of integers is invalid", + "data": [1, 1], + "valid": false + }, + { + "description": "numbers are unique if mathematically unequal", + "data": [1.0, 1.00, 1], + "valid": false + }, + { + "description": "unique array of objects is valid", + "data": [{"foo": "bar"}, {"foo": "baz"}], + "valid": true + }, + { + "description": "non-unique array of objects is invalid", + "data": [{"foo": "bar"}, {"foo": "bar"}], + "valid": false + }, + { + "description": "unique array of nested objects is valid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : false}}} + ], + "valid": true + }, + { + "description": "non-unique array of nested objects is invalid", + "data": [ + {"foo": {"bar" : {"baz" : true}}}, + {"foo": {"bar" : {"baz" : true}}} + ], + "valid": false + }, + { + "description": "unique array of arrays is valid", + "data": [["foo"], ["bar"]], + "valid": true + }, + { + "description": "non-unique array of arrays is invalid", + "data": [["foo"], ["foo"]], + "valid": false + }, + { + "description": "1 and true are unique", + "data": [1, true], + "valid": true + }, + { + "description": "0 and false are unique", + "data": [0, false], + "valid": true + }, + { + "description": "unique heterogeneous types are valid", + "data": [{}, [1], true, null, 1], + "valid": true + }, + { + "description": "non-unique heterogeneous types are invalid", + "data": [{}, [1], true, null, {}, 1], + "valid": false + } + ] + } +] diff --git a/src/s3select/rapidjson/bin/jsonschema/tox.ini b/src/s3select/rapidjson/bin/jsonschema/tox.ini new file mode 100644 index 000000000..5301222a8 --- /dev/null +++ b/src/s3select/rapidjson/bin/jsonschema/tox.ini @@ -0,0 +1,8 @@ +[tox] +minversion = 1.6 +envlist = py27 +skipsdist = True + +[testenv] +deps = jsonschema +commands = {envpython} bin/jsonschema_suite check diff --git a/src/s3select/rapidjson/bin/types/alotofkeys.json b/src/s3select/rapidjson/bin/types/alotofkeys.json new file mode 100644 index 000000000..3fc052e34 --- /dev/null +++ b/src/s3select/rapidjson/bin/types/alotofkeys.json @@ -0,0 +1,502 @@ +{ + "4BABQZ5SZJSO3KFKBOG36EIXXTOF34HVFCELHA2DWOMIL44K": null, + "RSZNOTRIJFCHRKG4IKNOW4ZEBMVXPDBYBXBGDGNWTSVLMJ2U": null, + "AOGQPY32FQ7T7WZWQPON3X6GU74GOYI6HHVNPATDTBXRUQ4G": null, + "3PMTZEGLZNHSOWWJ23BE6PWOXD2VZRDN7MMLUMQ4EIRERVCG": null, + "PD2FMQGI5HTGK6MT76OYS2ER2LXFBON44WOMELDY5MRKQI6I": null, + "6L6QMMVSE4UQLB4OGX3LVDRNGAL6MOJ6S3RBBUSQ3F5PPHYR": null, + "LYVVXT7U7WN7PGGUHCLFXVOBJBSSR6ES2P7AY7XGBXEBLTDD": null, + "G5RWOLHDDZOXYEFGGSVWG3C2UHYDW6UOFVBQQLQJVZNCF4TB": null, + "3QPIK2M3ZPICZQFQTX22A7VDCAFIGAX2PXIXKDOZX7XUM32R": null, + "JR75L2BXOA5LVLNKT4EEZO2P45OHWRPMMWMFENTFFIY7A2V3": null, + "TESL546MN7IR7AT3C5HRSESIFHZ5NW6TNRWZXZ43OSRYOZKP": null, + "62EJKIAFWGFGPUS6YP2X6E26AV2TZCTCAJMZNWBBNFRPCCRN": null, + "5ZDD3KPTPGE2CAWR3MTFUSBMGQAS4ZP5WZKXJTXUNFSYABD6": null, + "XQ7TMN5YMQLAND54B4VIVWJAHU3TNZKT2S4SVRW6WKHNJBX2": null, + "O456GV3HBAWFDQRCECX6HY3PBTP6WNQIDSKVP2MZIPV3TCBL": null, + "WXCN25EBQH5WWN2JBHWNFNDUTYSFDLIAOWO5AE6D5HDO7VNE": null, + "THO3I3KDRIMTD6DKNIETIOWDY7FXQ5GJ3P2KUQJWYAEL3LXV": null, + "7OMI7VIOKFRZATMNLGIWH7ZBDARZ6ARXIAH5S3GPG5KV52IC": null, + "ESUPY3ELKCEGFRSYFGPGFBJOAUGXMYZ6XCWXDFOKHBJHNGVR": null, + "TNXSJIEFJLUFUDTR2S5LV73PD6ACFYNHOCRZPUSDPXDD3B7M": null, + "T6TISG6P4W66F37634QU4BNJY4RZ77QXXNPGTYH5LCCRDSX6": null, + "QTVAA56JKNDYTMV7DXAIL4QVLZPW3UHGLTKDI2BED6S3MGDQ": null, + "DTJREAQBCS6I2AJ6MOGTPIXK3ADB4BPNDIHI2YSQP6Y2BMH7": null, + "XDGH2OYCTAJ75IEMZ32O644YLT73PRYDORFKBLYEMCHOQ7Q6": null, + "4KDDQZRBLNS33DRHZHDMENCWGMNFEJGBZJXSGIQW7VBWOTHT": null, + "5KSH3GKWFNXV55KI2FPUDSD57O25N52UTZAAYDFVMFUSEE6O": null, + "7AGEUBM5FQZ2JCMUSKTEI6COA3Q5CE5WYB7TP2F5OX3ETMTK": null, + "HFHZ5ZE5TC45W4WIF6H7ONTHXKAVWRY2LXN2GN2TXZPIP6PQ": null, + "S3U2JJBPKZHZNOM3SWVFQ7OMS7F5M2KDJHHHZKXHZXQRNUSE": null, + "YHJBGJ6T6A7PMK5AYXDITDV37BJJIM4TP7I2XHSVYN76724O": null, + "TH42A7M3645OUKC54JQMDB5BTGS3URFUCYJ2VOEM6IAGZ5QQ": null, + "OYBKULFLWL2MER745QTDL2X2HJNR77QGH2M2P6TSCHVGUJLV": null, + "JDU37GHQUOCYA5I5LFS3WAEKKR6C55XJCCLJCCCQJEGUJEP6": null, + "CB5HEJZNJ2SWZM426TWIWLHTWAPWPT2AVVHBILEVGFD6NPHI": null, + "D4A5SJA2VRB4JGJFC7PHT35S7XAMHPZZ2PZC5YYVV7RLKSUQ": null, + "BBVT6NRRU55KNRK5G745FNMZVIFHVZJICEMODF4ZBJFQ3EGL": null, + "XBV57OEMT4GYPTTH56A6XKF2ZPMXSMHY7L3LUIS5ZZWRP2OB": null, + "GTFJ3NP4VJR6HG2DRNTDKVIWTMIALYUQIQTBJMKVM2I3QKGE": null, + "77BMBFMRGASXE5XXT6BRH2DRBNJMIAUDDMEXIJT3RMHTUPI4": null, + "FWZZMG7I2JWAHX4LBYAK2G4L4TZYLHXMJWIDGT6JC5TEBCPJ": null, + "J3324OXU2BG2NGFMSOYMVKXE6OEJNGENI7EESHDSEWSUVDVV": null, + "C636AVNC5C5EKILXT2AQPXYLSNH7LCAJKVDHP67MVHULFSQN": null, + "OXTDOQG2VIEVYFC23SKOBSSPGE55AVZZADK6PIHP3APVPHND": null, + "JLQVKV4Q2BQHK355NQXZVX4WJBPFOWSD7WIJX2P433KYO4SO": null, + "E4XHPOPWH3PRCV5MGQHR3BCTKZMOK46OH4Q6STZDPF2FG6SD": null, + "J5IP4A3DV3BHGGU3J72JVVTWNIQOLNC6GQES22MVATK5H7BZ": null, + "HHCCDMLNGOU2ABP57ION5TC33UF3NUHL2L3WUYCGZDWLDNTL": null, + "54Q67RURG4THOANPT3RAVF72TGKJE425GC5GD3EOKPY6MKVW": null, + "TG3BH3HBKFEXAUM5O67VVDTXZA6MHWSVNNLXLXIL2SE2ZEDO": null, + "Q5KJ25G2A4CWNGPPYXBZM6QVYA466MQX3HCUMUO5Z24D5JX3": null, + "QQZET7VFHGJROUQZSWSRDI5ADXVH26KEPDVL6PZ36ISHOKMQ": null, + "KWNJME4V365ZIC7SA7BYCVQUAG6URC2T6MHSO3OVDBJAUPFB": null, + "XHQYKGYVLE2PFNXQPD4OUWBASZO5OFVZISCVEFTU6K6KDKHS": null, + "Z4SPXMJIAMYD2H4U4F23FALBDZP6NRLUBDFU4PRGZ4SXGTI2": null, + "HSCK3PEXH3I3XMMEMIHTM7SDJD3GOKXUMCGOL6UXJXLCSCGN": null, + "BIUYMIDY4EVGRLZ6OLJK2CE2FS5TTELIWSEA6UHKUDUYX5LM": null, + "IJJDLN5ANXTMX54P6GW2J2EJGGWG257YEAOZMXUSWK7D76LH": null, + "CLMTO3VSAOBAOBSA5IGOO4W7KEMLOFUOIR564IBKMJA7RWEY": null, + "JU5DNSHLUW34DT3IQ36JBV6M7EPILLMBALURDAB2KJXF6HQB": null, + "VXZXWLNQZFJPNQVPTOFWUPLORZD2XRAFXRVRNLUYTPBO22U5": null, + "HNACM55ZSGJ2FGRMOTXLVVLA32BC333QGC4HFCDSZ335YA4N": null, + "6J5GIOVKU4PKHHU35DWL3D7GTJFF75FF4FKSP2HPGJ7SQ2DP": null, + "O3NJM537IQSKKWM3K7EOQSNQDTR6XKUA7ZR7CWYHYYLHKH63": null, + "B4QMXK2EAR5E7KGHLODCP56DX5HW7IQVXWHFFCZ4SPSSNGJK": null, + "A5AUZBXKF67OXO34ZSEGVXI5PAIG6W2VG3U5C2Q72SNWYNEI": null, + "ZGDQ2AA2IFBSU3APKAFL5AO4C4DXY2YBEHXE5UPPVCTFZ36K": null, + "N3XZ5FYZEO3ZX37OMUJX3PIWEEV7TVEXX64KKEZEJICKXMTB": null, + "3EVOEEWTD7OABLQJIJYLOSVHBS4SB6QGX7GPDFGWI2DGAWKR": null, + "HNAEL3D7E2S7IYLKLIK4CGI56DRGAXA5S6KG3RX75PMJ6BVI": null, + "VGVW32CIRX3M45J2CPCUPPHNRGNG55MKAU7YF3CDNMGONW2T": null, + "QV5MW2W6WQSHNC6OYMWJAWEQM7LHXRMGWCJ7RI5WQ3JGHARW": null, + "IND2PUTLFWXTEUY4MMEXCFJA7JN7DODE5HVWC5CL5ED5IEUB": null, + "W2IA75XHJRBRKXLHGB7LXD7ECYEZI4V5N5I37UFXJMFWQMYR": null, + "AWTZO6OG6TCOUVYYJCWVP2JYEXRXZ7S7F7QKUKZS7JLPKN3H": null, + "TCARJATK42Y66SPMGOZ2LHLT2ZPZW7MHGXL5IVTS272FJV4U": null, + "XVHBOY5WQDOTWXVFZYQKZ6GNRWMITJDDLXSJ2T3UWF6PFOHL": null, + "CY5FGDYLB4UFR4AJRGLGPQT3W3OERGCXC4JHYKJ4HKSFTGK6": null, + "B3SJGD67GKIEAOZISX7HWENPDBYJHNJ47JCREGXQ6G2RXPUZ": null, + "LWVJYH7M5KXMLPFAHTMF6FKT3PSIW2GRC37AHF65PQY7OUE4": null, + "UUFKWC2DOV4ZQHPDPQPRCBEYNAX6OFZ7ZVJNYGW5YZCMSQIS": null, + "K5EC26CUN365DZ3LE2NHOINGZHXQ752A3VTPN5IMSRYSTOMT": null, + "22RV6RSSZIAFXOZIRAWJAIMMVHYWGL2TY42U3TG2SPFN3I6P": null, + "Q7VEOUC52OLXL53CR4XQSGGR5QZ2QXZTRCBACHQFP2HKN4SZ": null, + "OZ2ZBCTBC32VOHHBDABY2U462OHUEUS724RUS7Z6TW5K5ZFQ": null, + "EYXYWTX2UYI6MUK5L65WSTX2FDOJASIMG6ER22NLABNGAEMI": null, + "U4FJU7RQMXXDMHG7B5WFLXCZBNE5PMV43CE5X4RJSJUABT3U": null, + "K3T56AL7IXTAGTVIWZHYRKVPHLLD7UVHV4UNU76F764VGY75": null, + "U2BRKWY2RBYV5S3XVZTUZXT55CXMB45KDMNFMVY6LENW2DH5": null, + "YKLPZ7SDAG2O6NSJFLVGFVCYMY4WZKXQGHH7OO2BKGGVLQNP": null, + "WSC2BHA7H6Q62HJIIGQFX2OU64QX4AEU2RZQVIC7LSIO2JSJ": null, + "QIFNFKPJJCYPITMAYDAOEXBVEDAKBBR3DV5ZB7DAVBIAWI5K": null, + "NMBGIDIK3BMS5ZPZS6ATTID5BOAXZAH2VUED52JSC5XGI42P": null, + "LTSG7BGZVBLLXM5U2QDW5LNNPM3B5EQZPHES7JXU2EAQG266": null, + "5MZMVLLM7YHR4PTQCGDGWFQQLNN532WMTFGX5CFTDURBYEOH": null, + "UOMT2ERDBVXC3LRYKCVVUNROBWPGFHFWKFCW65HAPXN2H4FD": null, + "RFYZPAIVYHTITTR5AKOBAMYKOA3VSKRTK4P4ZOS7JFSVEY53": null, + "QQJGQV6BSW6PL4DZGQDWWVTF7U5MEVPQABOA4IRP7NOD4V4V": null, + "EFOSJBHVPSGTB3O374JFJW6MVW47ODOZQNKYSWHR5W6UZECP": null, + "YTL42MLIGIUD6Q3AMVMJ6ZMWNSXSUWCKV4ZUQWSGTEOATQC4": null, + "F5IL5OV3Y6E4QEE7JMQTKV6ULJ5AQQKQPZ23VXK72AV2P7XG": null, + "AZEV37T65EWVWQJSISCHTYHLWRXWCR6XD4LJ4KFLJ6RAOPF5": null, + "T5TAAFPNZLVDYHSNNHIJW4KBZWNFT5CMIPIWW3EFKPU4REYG": null, + "W326OLSKXRLU6MEIVUTKFFHFGXEH3VM43F353L3NHQP6HE2Q": null, + "MIIUZQ4KGTLA66VIE7WPN4T43SR6Q42YUKWEP6467AYWKU62": null, + "AXSJHLTL4FXCMLLJTQS4HIBRGUY6ATR3GZPV4MGXLWNFHDYU": null, + "MC2CMWSKD2HMTVIWCMSPZWHEGW73RWEZKU3IFZJM33IW3VI6": null, + "ZGOZHC22WZN6LSY3KK4HK6RF6F73VWSB7U47KZSLTYOQZAVH": null, + "HU26VJYM5YNEXCOCWCVEQNNZ2WAPFEVYK67JZOHMSZIOUWJN": null, + "6ZA46O27SWCAX5MQUHZYFV5UNQUDLZG4LDA6VILK6YRQMMZ5": null, + "LMGGW3CAN4T6DSVJZB46LOBI6KTZN7CKHNZ6BMWRBL5PVYXX": null, + "RZKIP3F5SY2P4NWANAQZJHPH34CU3GMQ4VCN4QXMP7ZBSQ43": null, + "CMUAX53FME5Y62VC7T7ZOUZMOPHBDFVLMVVMHYDDBZEHMOOA": null, + "ORTA47K5MLIHEUXQTFFTQNW2RMYQSTVDJXUNIF334SAJJYMC": null, + "XEGLAWIOOPE25FDXHN65P4FYJVB46M4NGGXFAWZ5VDWBBMU4": null, + "WZGXOCCN6GENKYYAYCH6UQD45BIPSYMQOZYAYRU3S2JNJLU3": null, + "MXDDSZA6VTTYU56ONDE4QZMB3L2D7A5SCRCIVBYYVIKFDFLU": null, + "JJMW475CTRXRR4R3GEZ77STHMKHTQZYFZHFUFXEB77SV2W3H": null, + "J3TNJVNF7QSTIJDEEZMLTQECNOES4PXQALSR5ZPYDTIVVKUB": null, + "Q5EHPI6GHPPZBATKHYYEXNDSYMW6QVAVKKHC2XO7RU7XXKQ3": null, + "B6WGKJEZH7XBZ4VFFRCBFYKC2Z2ZQXMY2HJQUH4LVI3EDMMU": null, + "NZ737IT3LUIMH56R66WFETEHFDOZSNVPTHMQTW3JHVTN562C": null, + "B52PWLRNPFN73AA63O6JFLEYSPFQEIHQ6AI6YC7KWOYFE5OW": null, + "7UTTRFE2I5WB2XZA37L6F7RWCII6S6HLXZRTLSJYFOENAYPI": null, + "TJJDGG7R4RNVAOXWRZRZB5K7W2Y6XB7LUYBDOY6H5IDRM3ML": null, + "TOG35JU7ULNRY3DE2XYDZ25WZETRSO5WSFFYSZT5IIALO3ZP": null, + "2QZKK4CMZNIKUWZZB22ASDR2BYNRAMTNS7MVLBA7Z7RDKZDV": null, + "US4C6FXHKR4GCRU6IJQHSAJXLNQGUDCDEPEQDU5C5D76I6XX": null, + "QOPUXM3ZKXTPVGMVVDMUZZ75KH2S7DKYXSFCQ3R5RYO5WP2J": null, + "GZ2T37SKRE3ZX7FARFWWF3WG443LVP5X6ENDLDHO7GBWYHHM": null, + "VSOOUSBMGIPEVAPYAGWZOLDUW5HSTRMTBRTUYLQNHKVUBLJ7": null, + "45HJFJQ3YKDBFDZPNDO46YT7DLG754XZWMGJQ7YPJXQ4G4N4": null, + "4KY77KV2OWWFEVIBSUZRGZF2V47BEFFHIHNMAQVK65E34ZF3": null, + "NB334WI2DNPLWHGXBNHSU4436ZYDQ4D2S3JMLDOM35QINZTR": null, + "7K23M4FJGIQFWUMPRDZIK32MF7HZULYYSS5Z7N7QTEJGET3D": null, + "ZBMNFKSEG2PXKJZIXIK2MHJQ2ONRJUJVCDBOCHNERPGMN2NQ": null, + "YMCOX2NMBDL4J6Z7JBEWHFSCWON4ZSBSBU2WONEYYOYRA75K": null, + "GDOVKPAWZFHLAPQ5YHCFWL4NAMC5G2DDXFWUTR27XQ7LEOOQ": null, + "CYBYK7ESXTUUHYQVPMDI7VWAZO5TVGLIB3GB7NYRYVDLMYKG": null, + "4IYLX3IDNUJ2DWT4RM3QJ3IMVE22X67EW5KWSMZHIU4W2W5B": null, + "EBWXJZ3PX7LE4JNB2XWJJNXL5QBVSJQSXAUJMJ34YJKR3JJU": null, + "LEKOXMXHU57JTRZUKMCW4WDCAKEOXPHJ34ULXN5P6DIEOYLL": null, + "BESPMR4LBE3G4MTWR22CVBYH6NW43HO4ILTSV3P543JZUBD7": null, + "5SYIBXIHGJGE4WHL2HYUNK3X4JUGOJOUMKVJMMXSQDKJZHFJ": null, + "XN42HP3QOV34GMJA5VINVW3O7KWW2GV7VDKAZDFBCC6SSHNQ": null, + "326BDEDWGYW3IMEHP63I6LVGSRMS6DUUNMPY3YVWXCH3YA67": null, + "FYNTVFBPC37FYGOXFIXJP57FNX5MYDGUWIMUYMFOJSOXRRDS": null, + "7DRCBIQP4EXAVNEMWOZHAEZ2W2EIMKD77PH6JJWP2BDN6NFN": null, + "7Z7LWVFB2Z26EVYZPLQAOQ7LXLADTHUA7QGKDRFLXRQ3ZJUX": null, + "EOZ2S4T75U62LD4QUZNTOHP7SNVJUNNSE7WWGHCMC75O4XPW": null, + "TVG4ZY3YVNQV7WPZ2CEW26QTGWUBVJV7FTRF4TE54446J5SI": null, + "MQ62OHPXMGGASRXKOH5MEVGLYHKNWBT3DC7XSXPXFHFXFO5C": null, + "MBRTEJLOZ6U43EOO2IS3AHNDCT7WUEK4XN5ZRMTPBKUFXUWU": null, + "24WJGDPNT4E7SQT2IBSTHJGYBMEBKS7VPGJYBRRAT5YXNBC4": null, + "3KD7I6FOTRB4U2JBT7CIJOPD5XHFWHESYJJQTQVUQ3IGIPVZ": null, + "25XHQ7A3DWKVDBX2ZFNIHKOGJCXY73N4Q6PUBEWGH2I55XVP": null, + "GE3YTUBPOT5CFJU2LQVMZVC67NFNLXVWNTV4ERN6BHVGCGYL": null, + "VXE2WHW6UWRE4WYTAVFAQ75IBYUPNZVMHJC44DGDPIAEOVVE": null, + "5JRWFOAEX5TNCAMYGF44C72EWF5NTXIRSVST5J3N6N5SLGFF": null, + "TYNIMWTDY2D565BJUNMFXTJHBUMWOTD4YSAFILKXPKX6FKRO": null, + "RDUDIY6N4RRUA6YEBBBFPBNYFZQUWRVURNYGJPEU6EHJA64H": null, + "MMRLX63PFJLWBJUTXCSLALIGK5YOHTLAY64WFQIYQJCX4QID": null, + "P4T7UPQNUAFMAJ3G4KBRHOQP5GCJP46XXYKPTTENUI36YQEP": null, + "VNAKVK3A4TN7WEZAJBJVMUVIKIUWCNH7B373DP7WAM7ZXYDD": null, + "VAPNA5BJL7OF7VRVSUEFAG6RZWENO5VOGMFVN6AB4A7H4VU3": null, + "TLVHDKN7326OHNXMBBJIVQW5FFFGPXSUR2IVTMPLOLPPJQW2": null, + "LD4OK3CY7MQGHUMQOMPAJY2NZUASJLSLWVSIIKIYYYAFYHIK": null, + "DXHC3XJCJJG2SMU4O2HDPMJHO4PNNYGIMLB5KSCQPNLBAJER": null, + "SANGKO55HOXMBC627JYHVBE3FH6KJL74ITOVF5GYODRRMEMP": null, + "TOQW7HYYWSFH3NKL7SITPX5H4HLAH7BKL35ECCAILLJ5B4TA": null, + "WUKAWAQHSBKAUAYEQ4UA5PKFB4676VQNQFLXUIX6UCDFZ472": null, + "BDU5VYNLNHR6HOLMZI4XSDERPTMVJ4LBUX5XP6W2BQWH3NLR": null, + "R6BT3RGKODHZN2AEX26XHNSLCHGPGMQ7IS2ONRTZEPJECW7A": null, + "E7Z4FLW3UW2ALRLPSMHQWJWBK7VWS63H3AUZZL6LHCIG3Q5B": null, + "FUZAITDO5EH4BU3ZAN55R2RQZ75LRAYI4X3MEJKJD44VHOT7": null, + "7SZ7VZ5O2OFPJL3K5JJKH3C2ZYAJCWW5GYXSLVFHRRATZDFA": null, + "6H7VKVPSP4MHB6P7H5KLQQN3Q6ZSS65OMK6GJ3JIUMHQINMC": null, + "QNCN75MNVAVH2OQR3JE53SGCKLXPSB2XBTZ55J3AX37AV5HT": null, + "JCSYRKMHDGVUVZO65VQVAV5SGQS5IRS4UGFNFKMYP6CXMHXN": null, + "JXX5VCQU2Q73TK5ICSFX3QIGA6E4IFRTGKPZZY32UTB2RY2Z": null, + "BMUAPYFGRJO7ZQAMMSSEADU2RC3LPAAXTORXLSIUCXCSSC2P": null, + "3SPFCAR2V2PQA3RWOY5ZZXI2V6UEUCZWL6SNCGEAGNR2JQZV": null, + "KUW7Z4ZHRUX6DI6Y3ME7A33SXUAQPXFAHRG4IEU32ETMGTLC": null, + "64F67UZGQHZUXLN6HCATAAX2FUQNK2WVOEJGBQ27H5DVZFC3": null, + "GHMJSW2TE6E3JLFDD7T6FI67HBDHNDVLGEKATAO5G33TID57": null, + "6BZEOJR372ZLNXUMEQQUKHHDCAOE5W4YDT3VWGI3YYPYDC5R": null, + "62JOKD5O25I7DBDFMM2BRQP4HI2VJTUHMEF3G3C7JFJF2VNL": null, + "NEF5ANHSBNEXLOP7FFH7ZVHPDOCHQQ6EYOG64JDZNIHBT44L": null, + "ZKLJACJIQT6M7KUY3VWTMQ4WD7RETAWN7LDUB7UQA3NZHZLC": null, + "VBZVHDFHE464JTYWCLYNAA65RDMVURJHVZHWRL3IKTNT6AH6": null, + "FHBYZO5SUBQ56J72DWYOUZSDKXE3SKDRWBEGLQPHWUGVSW5B": null, + "HHWRIAY52UXIOIKQOL3PBERZFDCQXAAUIDT4RTZF2VETEY3Q": null, + "JALKMRCQEIXX3JPLOACUZ2DKA5I2RWSSSIYDVSURW475XHR7": null, + "IMQUFG6JBGWA7R3D3NRMJNOF5MKE2NU4H2LAI6UPIHUEY2ZD": null, + "GWSUUFLKG23Z4BXTLB2HJHYVRWAWHKV5MA5RVOEE77Z65ILK": null, + "ILKEEWZSHVZSQ5M5VAZH6MJPBVQVV63SCQSX73YGTOQZBFKB": null, + "TBU3SS7AG7QISWIK2KKNE77ISJUEVH3ZV7QZJAEHLMAOUCEZ": null, + "EPN2PRVPXZGZ6WRX5ZMG6UPIM2V2NEA4BBC7ZDAIVCEKMHR5": null, + "Z7GJAUSWDAH2JUMVX6IZB2PRSIUHYUKXGKJDM7FXVFDJNDUU": null, + "APOEQP3DLJGKFU7424CJJBFDTWODGF45H7HSXT3GO2UC3VCI": null, + "LCYBWI4HYCSVGBSWWDJYDCWQZGJP2KVSXUUJBO3XFUWOS4SA": null, + "KOTWM653HSOQ2JHNAZGZZA5FGBBJCCBYPDVDE7WDXXIHTULF": null, + "TIPJO4GHBUJQKWKVHK5RF2NI5Z6FAIEBGFPR5L4SSCLS6IE6": null, + "QJZMGE4B6UPJ35KTTNIAHWTFV7DFQZ2QMF6DLHB2AHZQ45CD": null, + "SDV2RDMAXCYWHJJRPTEIZVE6SJST7KQJB57AXCWFVO54E2GX": null, + "4NW6WJDWXCXG2TS24H4I7WF2IGROPO4UBN2HJ64M3CWBU2M7": null, + "GJPMFWNBHZ63VB5XWIM52UO22ANEXDYLHTF24LGBC7XXI4SH": null, + "I3TPFLVZ47TOOMM2G35JFQAYHLAIU3OXV4SXZEP67DNGYXN3": null, + "QMPKHHYWWSV32R2LHCWFKBFDXUDUXTZ7FRZK3TCF25CSFNWD": null, + "J3C6XZSMIXH2SQMBUEVWEI6UZVX2GJZCAYSPD74BBUUK45RS": null, + "WHK5HZ42VH6IJ4U4EUKKVLRAID2FH2YISR7IV6FMNHQPPSH7": null, + "C62NUQB2FUJTY5VRNI6ND26FXCUVACSUTTR6NZMNQPYK6357": null, + "2JXXJE7WN7QWO6X3ESQCSDDBTZXOY43L5AODQUIR5P6Y4PZB": null, + "FQVJ4Y4GES3RGRABQCJIDIYEUNTIGETQ5EOXW25SZSYNJENT": null, + "IH2YJNGRU5Y3ODZHMWNV3TIU2MGCNLIPV75QL3JL4I7PH5ZI": null, + "RYUBMVYE4PL6JJOSBM57NE2RFKCY2EB3EQR3QU45MJHEX5IN": null, + "KRVCQJ6VSHM52MLDNS65PKDYHNBJAHCONQCNLXBD76LDCOFY": null, + "W7MB2FYKYWXDXNOKVWJW7TSUANZIGE25NABNIAK7VLPLKQQH": null, + "QUJNQSAHDXMNHKHHIRBEFONX6NRV4NA7NKFRDI72ZKVZXR32": null, + "SRQAJHTEQYVNHCJDTYHA72VSYS4FPTHXGPFYP6CQRTEUIWVN": null, + "GYQMORZKT4JGWGOD4KBEMUB3XZNUM7H4G5IRA6SYDZOGAPVF": null, + "73P37OKSAJ5SWM5NJ2QWCTKPTFNLORRRGBNJWR7BTCRLKNCO": null, + "D7YVGR63MRZ5YS3UTCUZ7REPWGB6EMGNI3LXZUDAYBSZVGHZ": null, + "JNUZON5EE4CF5UIPXIAU5HKQSBN6O2C3OXJ5IT6HPZMUBXRK": null, + "3UMKRHCWRV2WNUWPF4WKESLI6EOHPFC6FOXF2MGP6E7GPKF7": null, + "ZPWCDBHEOCZRBAVIQNGRQ4WNKSE4XCXWH3PQSBJWVTMLP6AD": null, + "RCOP6UXD6CG5XYUXFXT7HDAWQA2LRA52R2NVABFBB574N62D": null, + "6EHQW3VGWSY5MQKBQ4PWU2YD4KKXPBUFJCBEEY6GKOGGGT4P": null, + "V72EUDLMYSS47DCO7XIEYQO4S6KK7ME4C6VN6IWLZALPDIR2": null, + "EX6JHHNN4R7BQVBTVXYRD54J6BLOJTRHM64QBK3DHUWW37KF": null, + "6GGFMOEZN5PBE67AJM6XJKDL7V6X26X3TH2WVOO4X2MEQJKO": null, + "LYM2NDKVFTJ2IJV2G5HTDDXFDAAHVHVMVTNGBAOABW4JLB7V": null, + "5GPR3EHGCAFLKH4CTOZK3JBHCJSEEFD2Y5GS5Y3B5FPXAK6H": null, + "3TVZ6BP47YPHI4HKRIK43AJPRVM5UO736FF7WEXI6FJMTKY7": null, + "XDL7LZWG4CIO574WINYHCXMGRRZV5BMZQH6GPTTVPBWGV4MD": null, + "OHDX42IVKOGGQVXFE7Q3DKX2HNXGIZRAZ5TVVKQ34BO7UKPB": null, + "QHSOVA5SCRL5AK65IALQZWLHPSMLOOHHS6JN3LHDCN7DEHJ7": null, + "EI2J32TUZKPKSWOYH7EPPKHISCJ5SPTUFJXENKZJFAPEFQJL": null, + "3FFPGZBSH57RTBR326VUSL4G3DELAIPWCCHB77LFG5CBS2YU": null, + "VOLJPHGJOQKGKQ3PQGRLYJBZCIF3T355GXCQQKV34USNOXNX": null, + "ZASYP4G3K3DX6MMU2CK2P6GJ2PKRQFQGFVEZGTMIRAZBDMOC": null, + "MSLOXTCY6MPU6XRGIJ7ZGFBB5J4RTGTEC2UW2LO5MIKPXFJO": null, + "DM3ZAC4JV4IDN7ZZ2OLHAUCUEWYTMLZSQQEARJF4JVBUTE2D": null, + "KWIXQOXXGHTUPBDCHXV6ET4YZXIDCYZEQTFIRHD7DTMSGZ3X": null, + "SOF4BRIWEU5XLSXVFE6IHVVDYG73RK5HJACKPUNFRNEDHRWS": null, + "4FECMJE2AGQGN54LFXTIQFZC6ZVJN3LY62YCS4E65PMW2K5J": null, + "VDCWL2B5OEDDB2YGM7Y23WLJPJBFESITFU5AWDPUKDUMFPBO": null, + "W6VC6MV4GBWJ7IDAX4DQHWJSBUJHJN7ADFJ53NBVND3TXUCZ": null, + "Z3TTBMVW3FCTJPLHXITOVK4LPLUFJJY3CIYKJ4QY2DANJ53R": null, + "O6N3PZGXI5B6NBTOFPB5WWIRJ66O4FYLSGHDIJLVPT25YPWK": null, + "RXCNDGG7CDEMAOGCALTPXWLUL7A67D3JSKOZSZEQBLDW2F3S": null, + "XVKBWW7HRXDBW3YXSBMO7WVEUPVQ7LRZ44RVFI27PYZO3NTS": null, + "XUSRB4YQDOYJALL7CK2OYFPL7GKI6XOFYHP7HTW5H3PF333V": null, + "PCCHIGPV6SWW2O4YRPMFMNF5YVW6QY5IF7JPYAULF5WPTYKB": null, + "MHRU7JFEPHHUAULYL34RAEAGBU2ARZG63TGIIS7MHEQUKWPY": null, + "Y6EDYRAB7V6NAP57DRIKQ3SB57XBPN7MAWD7F3DM4DKWIAMA": null, + "JQXEFOTP5HPBTKL4VAXYCMJFZVGSAM3JVLFJPQ6KHVLCRXFI": null, + "U53PDNGH4IKMP4PW6AJV6K5Q43PYB6VUZ3IJVEKZK32IR5WJ": null, + "52CB2E7VQJ3JJ2SXPHQZZMER64TM2JQBSW3JMX7XITCNSWDT": null, + "3JLBHZFBPZQTO3MLCW6S5N3RIR42N6RGDHMP4U6IO6STOOVT": null, + "YTFVKDUY6LHHBY5JBBTT75RYI73Y2Y2DFT5PBMOLEVBJEN4Y": null, + "TAHMINQIUDTCEBCJ4UH2PUXO5TYMIIZTH4BX26S4NRMPFD6Q": null, + "4VJIQ6FLWV6ONBHRWDR34KXCTHL7HIXSQAF3FAKOMZ2C7QV7": null, + "IQWFFVGP6CPSAQWMKA3SWYOXAUL2YCD3EJYRQ56S3VXWAMUF": null, + "J2FABCRQ7HZFV4FKZKY2UOXRUO4FYXANTTWL27ANRYY6XZC2": null, + "G3TOY5CIOYIELRC2S35CGAS2E36TDLTO5XYXHFVKZNDFQC6F": null, + "SDKQB4B47LL6CAFDDYJWDS4X7COKTZOCQ6ELJBL2YF6RHZJC": null, + "L76D3LKKTUAWNPDXKTWE7JCFCRFVI4UX6NKQS3CAA2OWVF4K": null, + "QVYYK2GQF7DHSJACSOZPOQUCWWIYTRGEWMBIR5RRCV2EPQ5X": null, + "UM3PJVMZNDU2GJ6KVY5VQ2HPGMSJKVAQBDRKZKHBPBRCU5SO": null, + "RDIJQSPXHAUB7XSQQPOL3CNUR5AJAJEWAFYFSDO5G4QWZQCV": null, + "3CAVVDQOCSMOPC54JWKI5ITUIHAOV6SKAIIAAPAAJLBDTXEM": null, + "JHLSLTDSRVKDHWRGT55OZ5NC6YHGD35WHW4GPK77VZWPNVKN": null, + "6K266T5MHDGFA2XECSFBMTQDEE2C4S45S53XBODESR4ERZQ2": null, + "VJVWAFFKWR2KSTFIWGAKDUG5VQTO3BPTI7YFYZWZSBMMQOIN": null, + "LBSXWCQNCHYUG5M255T3W72NBPR5MBEPO7DFBCD277HSH4DU": null, + "WHJGXILLMK23VGXI2OY76SYJQOQHQOM6OSLT4BWU2KSD3OAD": null, + "DVNEV7ANOMDRQDHTEMD6CXCTO7NOY2MQQOLI47U4DDOJ2A2M": null, + "TV752NASHG7FG2JOGS5P6QLJ7E4W5NX7F3OYYRPOEFZ2NREE": null, + "S4DHHYVQ4RQ7HBAXW5ZKISZMULMLCFTSBFICXNLI2OD7YNF6": null, + "HSRHOS56TX6EEDHUZX7K4K6Y3R2UNHRINXWVK5WBWHDPE5HF": null, + "I67YQZJ4PXHFWHNHMK5SKUEFIL4EEP5B4BH3TJNHZPQOUXDM": null, + "5AJTMUDFBWW3EGM2TH2YAJYXIR6GKM7RPBYBRV6KEQPDDLM6": null, + "6V3SAQQC44I3CSDERRKA2533GYIWWHUZVR67JAWIBPJJBDBI": null, + "5I6EFWIWLDNJQAZNZN326TUHCUY5YOD4ITIT6NL7LWIK6RP4": null, + "NVT5LE35FIT6LKWBI5XZO2Q7CTQBFJ3IOIAFNXI7PGVHEE5W": null, + "E5I534XUV7GRNCMHBVFI7FMTSPYBGXKOFVXXSEQYYLVGCASL": null, + "3XDVU325YQTEXC7HFJKKKH7CTCCNNJZMV6VRT5GVED7HFKMZ": null, + "3UJYDR6QUVSSCRHJT6WWNEHC5OMYXOPL3EF26PU2A5HESFG5": null, + "UPRVTQXNXYCZG4JZIR7GZCCYTXR5VTUR2OKAJXEWGATCSIOH": null, + "2QLXXZPU57ZXMLJHEYDS6IHFLQHOKANOE5URI2TRNFNSIFUG": null, + "JFL3SN7LZ7M4RUZXRTYFQTUMYWYHO4P3ZKBGFDC2GGWZBPEA": null, + "SBLHOHHAOCNEVQI3UPBY5S4UKTTIH3DEJEDJHWMJ6VEWWTCL": null, + "TIE3GNWA2BE2WGFA7Y3KEHF4IF77M5XHZB3DIQLOE3GG4VQM": null, + "BB7XBWIYV33TZGTKHTBL4PDPH5ZQ6X7ZCMHS3KIQEJOLOXVH": null, + "6WO2JPOCRLCUSXS7BHNKFBDGFSEXCWYUFPK5SDZJTFJAEJRV": null, + "RCJUMHWKL3IBJ4ZVWHK4RCZ4RCVVTMG5ZO2KWZOIVZLJTSMT": null, + "YIXCNMIMZBA7NK2A5QOCLE77QFF6QDS7NGIHKMILIUB37EMH": null, + "ZSI25IY4L2U7CRPBLOYY5TCSAVG22XHHZFC7JZCRAVY46BWH": null, + "HUSGTJENHNIBJ7VSWZPOWFHHKKYH7H2YSP32LLQ2N7CWKRME": null, + "WTIJK6LZPBOCIJFBZEG26BETKTY5PJKQK5D3M5WVPWVSV7LN": null, + "ITSWONDXALFBD4WMGSMRKQXCVTL7JRKVFEHOAOODRQEFFSWC": null, + "IY3RQGYC2ME2TEUBYAQG5WJ7WOAJV5GTO6P3FKXWOLSJWGCD": null, + "MDSHVZ5WHCTCYB34ZABEUJJRXHQDKO5MSC5YVTGPMNJRXQK7": null, + "KHNSXQTOSCRSTX63S7OVO2LGMD7OVR6PZIGEKL5ZDYPCEKK6": null, + "LECVNJKLFT6P2HWX3H7ZC5DKJSSRZ7PWZVBN735K7I45SOX4": null, + "HFEO55KM3XH34UWCRYM5CFNQ6OFRAKM3U6TABNQDP74DT4JQ": null, + "QZOWC3TAAU67PVSBRJOOVZCBSRIOZCMLPB3FH4GS37WOSTEZ": null, + "TSB72AJ4HHOBEYK4CGFX3W4RW3SIECQYJMYISHTPPCGQNLFD": null, + "6AB4YKYVMU2PXRABAUBBBF4BJ3IOFKYWBJ2IMFMRVLCBI4S4": null, + "E3TI3V725PEP7U2CYZUUKJEBPAHOEI5SYCR3YZCMGD5EGYHF": null, + "APKJUBCO5NHY6QBYNA2ADB5TTPLCNZMHG7HGXXOLRBOZD46Y": null, + "QKL26OQG6L54OCKFPLMXI6M3EG2HI4EG34D7BNI5SBZG6OF7": null, + "W4KKIH4RPYXL4JZY24JWLHOATFNENBMSEQQ3DI7WW4PQIJQ3": null, + "4XJPWCDQXS6MSKI4EMFPENOX5FV7KMKKZ77LV6GJ6S7ZBVB2": null, + "LWMCTL5CEAVQDT5PEXFKRK7Q264CNVV6AU657OQ3SSPCDGSQ": null, + "CBMCT6STEYDOVXT6OW34OXGZBN2A77OGBPDRN5AZK4RXNEV5": null, + "OUGRMVL7PTQ3GJNWQ5WP7XXNYBIMVWKSNQ2QZH5RZRDEGUND": null, + "XKVAYVNQL7KW5EHGEWYRPSWDXNUEKP3YC3OXGEHKG7PGNZHL": null, + "ZLO6BOTXUEEW7UOENY2NVIFLSG37YPUETQBYYCQBJM5VXNG4": null, + "QMZJWAJYYE6WZQX3OKY34BPU7ZZN6ECDNIZOIXREE6AP3WJF": null, + "B5FU7VNVUSA3ODDEUDVTKE7GWPU3JQXWHRWYGLT6VFKSFYAA": null, + "HLHSAJUSHU5EY26UTR2UDJAM3BIHYYHF46MNLTQZJWDAUCDO": null, + "N73SLU2EDTPQ54MY5NWT7KPSVTO3TOGPO3DGX4HQYPZMFYJV": null, + "7B47MDGTJA3P2WX3KWLLESWTC7RJUSVUSBI42SATEYZVPL5K": null, + "YYLWTHMS5POBP3WVX5Q4NXQ77STWJTAHE6QK7GYMBIJU3TSX": null, + "JI7Q6GUWUTSJPHKUII5IVOUZ2QQ53EWNWCUM4PKECXWSVSEK": null, + "XR4W6GZYNYIDAFU7MWMIGFGF63OLKU4FWQZ4RAN3HWNXUINB": null, + "3KX2TVZZAQSYQHLDSWMVZVF4UAXYONTXXFWSGI6CJ56DXU6O": null, + "UR6JGWK36D4CU63DYI722UFUKLB2S52ZI4OAVZM7CVGGY3SW": null, + "VGQOGZH3H5IAFESOYWHOQGU5FHP4BJAUUK2B7AKDCJX3PUE5": null, + "Y5GO3VITRDTHWTMUULEA44BVX3GHVLIWFMTNUY2APWRL3JLD": null, + "F7U4AV4VU7YAEDK6SI64JJUNEHG2MEFLKNOI77IVDQS7BGJK": null, + "D77762UIMSS52GNAPWFCEEFPWGYLBPKWMBN75S3HCOI2SYCL": null, + "NNRBK2PM7FI7MVFBSUUCZVFTDOKXLNVK6I4MMUXU4AKDPTCK": null, + "YSGZXEZQRGZ3DSMNCNH6GSWGCRWQSIRD3IOR5E3XEUH5RORJ": null, + "P6KRZXZTESTNZHYLZFTDLZMIGIN5H74H2KYUTNRIC3JWCNJ2": null, + "QK36OWDC6RHQIASJXU2HZVIBARNIESSCWKICTRQ4B3OFUB6D": null, + "JQBWWRLDDMH3HACHKR7EKXFCAAR5E62DX3ALK22Y5AFA4JDZ": null, + "WWOLYDEZIQARIEC65MFSVB5RH3236B3E2YSGNEN2QY6A2G54": null, + "QT5UVU5QUPEY7VCTTW26JTO2FBUUBCRBYZORWGTYQZ2JZSCH": null, + "PJQFD75BGF35X4N33WD423KSDLIAWJNAZUBXQTUGHOW4PTXJ": null, + "A5VRG2DRN3CKKTP2DN5YNILXVCZRTKFXWILKWLZ6PNVKJTM5": null, + "TY4YPLWS4MDSKPG2HHIRSAWK37LFVB357RGGBRFP2P332HJL": null, + "SUC7ZGB6YKDYNAP6NTUZVEVNDL22KBCRZIGAWSOBUAL55LDE": null, + "Z5I7WKVA6754S4G7QWWXYTRZ3SGEG4B3KG5MLHP3GJDI3H7M": null, + "VLGCWOT665AT2R6EMOAVHNKVM2NKPSV4KI4CMNEUZ2YMI3UP": null, + "YJMQW3C2NIIZKTJY34XRL4HQ5A7EUMLMXTJFHHRE3NR3QGZF": null, + "TOEB56GVW7OQ7QLL25ZY3ABP4ZQS2ZZMIJRNTILE5CWA2IZB": null, + "5RGLCKE6D2MM5YH74OJBDHTOZIM7LN3EIYBLXVF4PGNBZON4": null, + "QMQPPFLB6NCEBYCQ5U2YWVYWRKZSFJLCDAAPYSPURDLXAU6G": null, + "UTAFBURT6XHHZV3Y5OZJBJJQT4342SSCOLWT35GZIJUPWTTM": null, + "XCIXTAB5SB5EMQLZW7GCBUS2N3XU44YELMLSYIFAJHHGP3VS": null, + "LWL5AM7Q4JPEDEORTLNDWUF5X4AIUI4QC5S4CWUXKXIWP6FP": null, + "M4RONO5HAPE25Q46SSRBVQLEXPCVQLKOKX2NYQWX2SNNGEVB": null, + "UVSHBLSXOHEF3AGG5PDTTDFVXNQPRHCNUTXYDEXJVXI7JCPL": null, + "C3PB24XOHCI52DU64XQL2V2OKZZYG5B4T6PUU44DZCH4DMSS": null, + "VGACAPDUB2J7KLW5PA474JQZWZ6QCDYYB2I32ZFYGXR64M2Y": null, + "NNDVFWEC2OE56D5PLJWEEVG25TMXCXISOUOYDOUEMUZRMZK4": null, + "57TIZBR3DXRX74YXCSJ2RXLZRXKX3K7H6WPS7DVONX7DOJY3": null, + "RFSCST6ITGG53EAZEBXD2VFQTJ53ATEOORQV6SQG5OSDR3FM": null, + "YLIXTKYNMODZNBM3L2EL435GD2LRJ5XAJBDZSYCU3OPZ4N4V": null, + "FBEPPECF3L4RB6QBQLGL44JDBCQCTQ5MOFYFCUQVNL4DCQYV": null, + "PAJST32KEXY6I2Y57OASSUFLF2BLPQQ7NZMVN6EVR7JS5LY3": null, + "76MHL43MEQWH6R552TULI3TLBOR22YDMJC5ZYQVWCNI4BWF2": null, + "A6KZM4OXBKW2NJ7X545F4LIDSC7LIAFYJ4CJSWW2BWSIRWUY": null, + "VND76C7TCKQT6R4X56OD4UYSOBZGC5BQ3LR6RXOX6LA3I5F6": null, + "CE2NODHXCRS4ML26HTI77Q57R7ZXKZO433LHHA66I5U3Y5GP": null, + "3DGAXWQDLZBPUYZPBGMRZG5DOBPTIHKAXFSCBLMEQHZ2A4W2": null, + "NAOTRV3ZNB2PK6RZJZ4UEQVF5M3YISGJNFZQQWPV2S5RL7XM": null, + "HLCSR65OBO7BJQPOA65Q6BRDVFPOL7FJII2LOANRJNUM2DDU": null, + "YCBI6X4JLTHKGAFR7XYKELWE7JW6VHLMFJIWF2ZC7BPCQFFR": null, + "QEF3LB5GFEMHAUDKPPNYGRKUUV6PAWU5XXYCFIHXI7PLGVGW": null, + "ABBQK5JKJZLMX4KGFODWSEHOPDTRHGDZCBO3ULBVOGUIAAGI": null, + "HZLWUQNBAZJSDZEB6IPXQIUMVWUPYVMVP2N72NJ4MOZFUKGT": null, + "7SQCYMGSMYW47TXUWC2J5674L4CRDIAO34D342D7IJ6OE23F": null, + "YAQCDYBXIOY3KZGFJCPS5VD7YQBPBFFFYEA4DPWFWJCWCJXA": null, + "HCZ7SSO422NW6O3ARCBUGNBCMUVEAHXMVKAJSDBHAQSFXIMV": null, + "DPX2FNJNMFQT34DLAOEIN4KMWJYLOEGV7U4VDH635AG6UA5T": null, + "QUO6FPOFFXUUKAZXYRN7N2MMT7IOJEG6NLFIH7B5JI5V2Y44": null, + "UJ6G5JMOINYVRLVISHWTGQDDDWA6X3QDFICKY4QQIHG3QMF4": null, + "ZR3VRUOZMQE4EMVT2WDB45TJ7KB5AGU5UBBPNL2A2D255MOL": null, + "2AFEUH4R6YAJZEODKJBMLDM4ANLCKRU2C33HFSVU2LLXZW5Y": null, + "3S4PV2VOBFB6GFRPG5SB3EHMZE5M7VAAFRJ3JQYHZEFTEKFX": null, + "6EFK4THSCBEG4LDSVE5N5FXSQJTYB5SQ7LKJRBL6IYIREWTN": null, + "HHYCWLKLIII7MJ4MYU7CJZ4YPOOUVWOKLXHZV5NT6LU7WWGX": null, + "XYRSXFI6XRY3YACAIVIZJAVKFTZPRH5FXD7E4P4LYUGX6I5U": null, + "6W72FMK5AP56TNCZ3LE5OTYZ3WYPARBB5AOXDVHGCBOTWZTO": null, + "7YNUW4DUCHUDJSSAWSYOYM2QXWTVSJWGDPIG2EAABTU4QLU5": null, + "HNVXP5XULHDT666ND2M3X2APGXOBCB7SCQB2D7MFQKKNVOS4": null, + "MLCMV4777C55OEVW3SFO4VHH56O7BSIDLZFYYTY3JXNN4DWG": null, + "MSNRSOCYC3HQCUXRLCBYFYQOMJFBDOSHJ3HYYYOHEPODETEE": null, + "BJB2U3W5ZF7WQVTL6R2F542WSS6FQDSVDMXNYWIC5PHED4HH": null, + "E26RFAVZYOV5WZ6WQDVINCGNG6ZYU2XCV4FPEKR45IASGARQ": null, + "4BK33GLSBFLRZHOHECAVVYT3LJHSQ5RFBSMKLMGTK4Z5RGZO": null, + "5I3526BP3QPLNDBEIPVQL2GOAMRBAYWOMILMQK5IT7RES3EV": null, + "IBHOZ4VNFYMLMUNOZIGK743IVASI3DXHCY2RH6SO4EKNGR4A": null, + "CCZT4EOMTISCMIVGMB2ZRUGFIR3R6WKU3ISSJ3VZVA6SBLFC": null, + "OGJUH7B3WKG3W2UFEBL63KLQGPSPRNIHUUKWTKQMBN5QG42E": null, + "Y6JCIA2AYVA3RDOUQFYWI4EMF64H5FIFNAHSKZ6LXCRXCFGW": null, + "VX3OCLLJZPGXWTLGERIMK5IS4OXKU65SMC4YS5JZND6VEPO7": null, + "UXWF26BRES53JKXYXEG5DWJXCR6USGPBWQBDJEVEBA2PPUAI": null, + "IB5SSNMYSFCNB4ODT5OQ2GAGPIVDWOBEI3P3EBWI7AUGC7BR": null, + "ASIAQKC3VSFJE7ZW472ZOAXX2T7JTCLZBN5BYEOAE7E67F5Z": null, + "BKQ3GY255BDDVZ52IIR5K3NFIEKV6GXBVTX3ROY3IN7XDAHA": null, + "JTV5ULWFJJOSFTX32FA6DJWADX5UL3NV4RZZS3Z64IPXDZNK": null, + "GSZ7MZXCFKAWFBXKRVYUDULPJEH3WSI2K634LAAA36M2FRF3": null, + "HSPTZMNCONTGJGIUWP7ZR277AYWTDIKPAWO4RODOIHQGEUF3": null, + "4ABCWRBBUAO5TVOSOZDF3KMCUCKIUCRJSBGH4WGKDHWH3LLN": null, + "K3BK4XFUTDJLS7KY4WJBS7RTZ65HY4N5NJ6AMKNKGO3K6DXG": null, + "S4VZEKYRNOXUITHJENCBKJN6CC6QV7Y4MIHQ6NLN24OJFMBP": null, + "MRM4HMHS2KAISLXU2XYFQCQH7XRVVC3EXSP6JU7FIM2DJHVV": null, + "QMNCR2JQYOST5MD3HI2I4MCTSJDFCAGUTEE6XKM2THC4WXI3": null, + "ANIF4DT5IA4IY7M5OISD4IW4J2TDVHHFIPEONUU4CV75LOFZ": null, + "TQHJIX3NKO5CMVRNOG4WP3YDSGPLTTCRBA3RDBPECWO6EN5U": null, + "6KI4L4RRXZ6WL3TRMCZLAA2W7AQRXDCC43O6AGYJ75NUEQO6": null, + "VL57QTQMHQOAX5MFQTX7GUWOECHVTLYJHIBRKMWIRF4QMN6M": null, + "JFTGBEP2LSZGDDFGV6IV2JAS3J3HB7BDRB6WEYHSC5EIFNA3": null, + "WLEZIN5PPCJE4W2LGEPW4N6AWQ4RLE2AOGFBTETY5HNRSZCY": null, + "LUJRHNRDNK7YOKST7KRVQGVE2ERU3LUVPZLC5YYLCUAX2EEU": null, + "F2OOXAP3FFVMQMJP5IVDLRVV6IP2NUTGT5MGZCJMR2IFNA55": null, + "SVL525L4TVBTLMH22DTXVCNECAZVUTMMYDTA3UQGV7U6P3YU": null, + "546BD77L33PUPQ7TW3GJJVVJTYKHIIKF7YOO4SSGIIIOIJ2Z": null, + "FCZRNOURTDJI2BE7HJ3P4MMY4WYAPFFBCTVFXVTYSK4UB4JK": null, + "S2U5XQEC7I4HOUUR6HFXUF2PR2CLNF5UEIPJHTNF2JM5BZUJ": null, + "JTQGSOTIPVYKGYJBUQC3Y44RWE372S7MPMFDETMH6OEGUJWA": null, + "W3DTWDN6YOEPVUJVUDNPWCLQMXXCLXQPVYU27675LZM4ONDF": null, + "LJ7P7AJNNHJE24PNWQDK7J4VGGNZKKR3OPVRFV5A4U6LRFRW": null, + "I4QO3SZC4455G5PQIJPUUNI4A2BPJKTH5MBA7LN3HRIW6EFZ": null, + "NNR3EXDDDPBTOKTRBPR5SO4OFPXU376ZIEHA6YHEJK57ZRGH": null, + "4MBIV5HD4ZMXY5NIKZQIFKFO7S642PC7CWVX7ATXAXWQWNGU": null, + "A6S5KTN66UWYBWG5CZXJVCJ2F2EA22BCZDFQMM523DU7VFBG": null, + "AOAPML4IEVJZSZUOONTTDYSEN465IHW7MZXHSQ55E47TJ2NW": null, + "ZIW5DXTPGQLTTRHPRQB7SADQPCSTXQRMKHZIXA6T6YW2BMRS": null, + "OUHF6P7JPB5Z2C2E5MEPNQ5R3NY56KNQFHG3RYGWXBYKRRI7": null, + "M3JXOC3CTIEMVHTQW7HB2WQ7L7Q54AWFY6F2UBTSZMDXHTDI": null, + "WQQA7JW5NTSAI73WVQMMAJ4IO6OKZR32GTQTMUWE2HLC7DRH": null, + "WGDVRLD5YWTXFOFTGBBEFCG455EK3BZCZEE2POAX56O3EOQW": null, + "6KAR3LTBYTJ6WRGOUQ2TEPZKWVVBPGCO4OVAN2ADNDNLTOSM": null, + "VEWL2DORBATRWF5HJ7LG66NYWMXH37JJU6XWVGJNDVL3OSAM": null, + "B4HXCGMG5S3VEOZR5IUYOZAEFL6WPRLXB26SLWZHRY3WL3ZU": null, + "REKI5EIO6TNXBWJIENJDQ5CAYEYZC2GXVPAOIWGVXKN2K3OE": null, + "RBOY2SVFDRIKJZWTWVEGSJLHGIMAZXIF5HNBZAKPVRTEFR3A": null, + "SIOSHIHS52CHIB73RFONJOM2HJBTRLGGDFW6JAWTR6UPJBVB": null, + "ZGHKTW43CXC3CBOLSENIMDQUIR22VNXSE6HFBT6ZUA3TPODL": null, + "XA576OGJNZK3AXE3FJLSSGN5MGRK4FJJ2XX3UHHFI6NO7P25": null, + "22VL7B7ZI53VMYEYHKBI7XYNKRQW634B6RLBRXRJN4CQJA6G": null, + "YVHEMU6OLF3FI2MTMZ25QKT5F2OUO3H6CX4WRNEXVH4BO3WZ": null, + "3VJOBPUJI5DUFHNRM456UWSAHIZYXICY7ZFYMECCUHDTG444": null, + "BXLYOEBYW7R4MWPNTOZ2ZJPDDXBHV7JMH75NCD26VGS2VRH7": null, + "GCZOZTTLMHTAKVUQ4TSDOOPHEGK6PH4JQ3ULKZCUIEDW2HLX": null, + "TE6B4JOPAY2CEQ46KJV7N6MY7OAI4VA3UBBXGPOELN7KY5T7": null, + "62BPAKDHZ7ZRJOMVZ2SX2WAKFQATUQLKDQ7YGACJPL2NP4UR": null, + "6U3MTBST24MIPS3HSZJPTBSZJ54T63GMZGZQPGFBS7JGBUVX": null, + "SXBYC5BKXYT4DFNVSS4OVRYLNA5JY4TXMVTIFSCRT6Q5C265": null, + "TAMMZOCU5GDDOIRG6U53NW3UUEUGI3QWW6YY6GWNE7WIHRNE": null, + "O37BSKPNGDUUN24XFSCR2IQHWFYUZJBPWSNC4L43MH5HV272": null, + "AZFWV3I3DANXV3HJB66QMCFJ3UTVJGS3R3IP7VROF2D5JF6U": null, + "W2BLY5MU4PA7HSENYW4MO6VHHPBXUFCFMSBWTCL5F3BIPBFT": null, + "ILFL3JG2XQGZCFBJHZFQRZMLGHZWVRBX3M5Z3HXW2A4GDMNK": null, + "O7VDC3EDG5I567RK5BKXMKA2R6XPHQQPDXQTCZP7BHKV5BK6": null, + "5ZMIJOTL3NPRKBEPS7542ICCNHHUJ2BNOZ7LSL2ICTCUFGSC": null, + "FCTKLPX7PYRSVVBH4KN46HEYLJEYSRTQ3PCUU5RY3BROUZKU": null, + "7EAMJ4C7NUA3XMDDOF3YRJA7Q2BFXFCI4J24ZXSVE7REBRFU": null, + "NOQ6OL6G7QD4ILQ3FAEMEMROBCK4OK3LX3CRMHMA6GG5YIYK": null, + "N3XXFRDRYPWGZ3LDGLIXGLLJYKEHPW4565BUUL67OLMXP554": null, + "GFHEEY3HHESZDI2YHTDDCBKGE3ZPWPXW2WE5AKNL2Y2TJCNJ": null, + "JTUQIEPPBM2QC6XBY3KP2NSDM6WUCJMORCBXNU6ZPXPOHEIW": null, + "V5VTVJ36JZDDO5FIOEZNAKYWPRQSUSTEZAKARDOHQEIFLACE": null, + "KUTOUNOS2QL4O6HF266ANCBNYUSIT66TANXCGYALCPZXULQH": null, + "CJILUW3VUBZCSO5DXTSE4HSVW5UJAJHHCSHGHADKCQE5OTBU": null, + "HD72YVUCR4IY5MG3E73UM35ARFCUIHEPIMUSQXOKLVTT2V67": null, + "ELVTZXW3ZAKR76K6IV6ZHX7WTMKPKIFRPHIU3LK67WQR6IQG": null, + "NIZTNRLCIS4JDLMOTEAKPA6B2JOE76ZBQDEUUGXENEJBYJFV": null, + "FE743ELPJYDYWTJ374PZVE4TNGZPDZWSNUO5PATNYUYBDTBJ": null, + "FIT3JHSZMFJ7N2E7BL6PHEUUHSBHU55YBIH763TGSZQTIAZY": null, + "AP7SXAQ6HW373QPCHKKA4R7NFFUCD7CFB7EERZBNXRGM2Z7R": null, + "BAW6TALQRWXK2OWS2O2UV72BKZGBZCUPR6AXKFZ7WHZXZXWX": null, + "SM3FE7H4NI76AS5YCH6O34LTROZKJ7FEP4IKL33JUBI7FWZJ": null, + "BNO2EJXVMAPFDVF34NBTPSIYKI3UPFI5G2K6KGVU35TIHOQL": null, + "INHCLRC6WPTE2U7OQRSSRGCN4B5K4BUVSGFHEKG5DWJWICOF": null, + "OEHWM7QZ3H67QC4ZQY2USKQA62NLMESA543KWPBUKV7N65TQ": null, + "BN3U5KDJP3QGOSBX3TH5R2DR6PZA5Z7BEGVG6MYRW5GWUCGT": null, + "ID7S7JEGBCI7ES3ZN7PIW5NEP67WTL5H5IB6WVRYS47EEEJ2": null, + "BG4MHZNCCLZQN563CZ2D72CPT5TASZZ6N6L4JOW2XPR7GIDQ": null, + "IAQKAEEUCCT3ZZY5LCO4AZW6F6ZNGUAF56UCD43OPOKSMBHR": null, + "LFCSYYMAFZ5IAB5O4QIEN5GERYIGTH4JH266LORQ36SB2A26": null, + "J7W62H5B42N3YUEFS5F2MECESYBUVHXSIGRMZL6ZNQMCYQMQ": null, + "J24FG5J3MUQOCDAVVCHM6BJWZW4Y7VCSC73DW32TMKIUOXPB": null, + "DGJE6OPBSV3JW45P5WC3EGUMIETVT7MIZX4EA5SCY2F3JKFL": null, + "KWTJYJJT2LMBHE6WH4LNJEHNMNJVOEQ7SLLXGJKWPGOFDYHQ": null, + "7XZDDO3CCV34UECRKEM344EDKKUFD6YDUJ7EW4OAETABTYWV": null, + "BPS2OSY2SOAFFPRY24IEXGZVBEUNVIWZYTVVZRUWT7XM727T": null, + "6GRVGHI3FMCQ5MQ4JR7ORANBXK6GMSI4XRQVE35LPH44XROO": null, + "MZHRR2BS6HII6JD3H32DPSYTTHCVXE4WSC7NUURU43Z5SD56": null, + "QJBSWHFQKABG4CALELT62JWLMW2JVZP35RDYHWHQPZYTIX5M": null, + "K2Y3YRBFB7F5PJJUFJDH5Z5NL2MYOQWGT5T5VI5SP7TVM5NW": null, + "QLONNH4NMZX5WLEJPQEWJECL5JTLTWDSK234NU5H55GA6PFG": null, + "NSSRUR3GI6B7NBK77ZQIIHOA4TEEA5UXVVMRWVLMRIP6SN3T": null, + "WB2TXRE7EPSBGACXUA4YE23M4WLMG3PVRMD2OOCIHNGQVDRY": null, + "3MDCQC5BPGFGGFDO4C4IY53NPTWZMRK5MWLJG2KX7OWVQNFO": null +} diff --git a/src/s3select/rapidjson/bin/types/booleans.json b/src/s3select/rapidjson/bin/types/booleans.json new file mode 100644 index 000000000..2dcbb5fe8 --- /dev/null +++ b/src/s3select/rapidjson/bin/types/booleans.json @@ -0,0 +1,102 @@ +[ + true, + true, + false, + false, + true, + true, + true, + false, + false, + true, + false, + false, + true, + false, + false, + false, + true, + false, + false, + true, + true, + false, + true, + true, + true, + false, + false, + false, + true, + false, + true, + false, + false, + true, + true, + true, + true, + true, + true, + false, + false, + true, + false, + false, + false, + true, + true, + false, + true, + true, + false, + true, + false, + true, + true, + true, + false, + false, + false, + true, + false, + false, + false, + true, + true, + false, + true, + true, + true, + true, + true, + true, + true, + true, + false, + false, + false, + false, + false, + true, + true, + true, + true, + true, + true, + true, + false, + false, + false, + true, + false, + false, + false, + true, + true, + true, + false, + false, + true, + false +] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/types/floats.json b/src/s3select/rapidjson/bin/types/floats.json new file mode 100644 index 000000000..12b94a11d --- /dev/null +++ b/src/s3select/rapidjson/bin/types/floats.json @@ -0,0 +1,102 @@ +[ + 135.747111636, + 123.377054008, + 140.527504552, + -72.299143906, + -23.851678949, + 73.586193519, + -158.299382442, + 177.477876032, + 32.268518982, + -139.560009969, + 115.203105183, + -106.025823607, + 167.224138231, + 103.378383732, + -97.498486285, + 18.184723416, + 69.137075711, + 33.849002681, + -120.185228215, + -20.841408615, + -172.659492727, + -2.691464061, + 22.426164066, + -98.416909437, + -31.603082708, + -85.072296561, + 108.620987395, + -43.127078238, + -126.473562057, + -158.595489097, + -57.890678254, + -13.254016573, + -85.024504709, + 171.663552644, + -146.495558248, + -10.606748276, + -118.786969354, + 153.352057804, + -45.215545083, + 37.038725288, + 106.344071897, + -64.607402031, + 85.148030911, + 28.897784566, + 39.51082061, + 20.450382102, + -113.174943618, + 71.60785784, + -168.202648062, + -157.338200017, + 10.879588527, + -114.261694831, + -5.622927072, + -173.330830616, + -29.47002003, + -39.829034201, + 50.031545162, + 82.815735508, + -119.188760828, + -48.455928081, + 163.964263034, + 46.30378861, + -26.248889762, + -47.354615322, + 155.388677633, + -166.710356904, + 42.987233558, + 144.275297374, + 37.394383186, + -122.550388725, + 177.469945914, + 101.104677413, + 109.429869885, + -104.919625624, + 147.522756541, + -81.294703727, + 122.744731363, + 81.803603684, + 26.321556167, + 147.045441354, + 147.256895816, + -174.211095908, + 52.518769316, + -78.58250334, + -173.356685435, + -107.728209264, + -69.982325771, + -113.776095893, + -35.785267074, + -105.748545976, + -30.206523864, + -76.185311723, + -126.400112781, + -26.864958639, + 56.840053629, + 93.781553535, + -116.002949803, + -46.617140948, + 176.846840093, + -144.24821335 +] diff --git a/src/s3select/rapidjson/bin/types/guids.json b/src/s3select/rapidjson/bin/types/guids.json new file mode 100644 index 000000000..9d7f5dbc8 --- /dev/null +++ b/src/s3select/rapidjson/bin/types/guids.json @@ -0,0 +1,102 @@ +[ + "d35bf0d4-8d8f-4e17-a5c3-ad9bfd675266", + "db402774-eeb6-463b-9986-c458c44d8b5a", + "2a2e4101-b5f2-40b8-8750-e03f01661e60", + "76787cfa-f4eb-4d62-aaad-e1d588d00ad5", + "fd73894b-b500-4a7c-888c-06b5bd9cec65", + "cce1862a-cf31-4ef2-9e23-f1d23b4e6163", + "00a98bb0-2b6e-4368-8512-71c21aa87db7", + "ab9a8d69-cec7-4550-bd35-3ed678e22782", + "f18b48e1-5114-4fbe-9652-579e8d66950e", + "4efe3baa-7ac5-4d6a-a839-6b9cfe825764", + "b4aec119-5b0a-434c-b388-109816c482a5", + "e0ef0cbb-127a-4a28-9831-5741b4295275", + "d50286a5-cb7b-4c9e-be99-f214439bae8c", + "a981094c-f1ac-42ed-a9fa-86404c7210ff", + "2a34ee57-5815-4829-b77b-eeebaa8fe340", + "a0530d44-48f8-4eff-b9ea-8810c4308351", + "c6f91509-83e1-4ea1-9680-e667fbfd56ee", + "cab11402-dcdd-4454-b190-6da124947395", + "283d159c-2b18-4856-b4c7-5059252eaa15", + "146157c6-72a8-4051-9991-cb6ea6743d81", + "aef6f269-7306-4bd2-83f7-6d5605b5dc9a", + "37fe6027-d638-4017-80a9-e7b0567b278e", + "5003d731-33fb-4159-af61-d76348a44079", + "e0e06979-5f80-4713-9fe0-8a4d60dc89f8", + "7e85bdc3-0345-4cb6-9398-ccab06e79976", + "f2ebf5af-6568-4ffe-a46d-403863fd4b66", + "e0b5bb1c-b4dd-4535-9a9e-3c73f1167d46", + "c852d20b-6bcb-4b12-bd57-308296c64c5a", + "7ac3ae82-1818-49cd-a8a4-5ac77dfafd46", + "138004a9-76e2-4ad7-bd42-e74dabdbb803", + "ab25b5be-96be-45b0-b765-947b40ec36a6", + "08404734-fd57-499e-a4cf-71e9ec782ede", + "8dfdeb16-248b-4a21-bf89-2e22b11a4101", + "a0e44ef0-3b09-41e8-ad5d-ed8e6a1a2a67", + "a7981e49-188d-414a-9779-b1ad91e599d1", + "329186c0-bf27-4208-baf7-c0a0a5a2d5b7", + "cb5f3381-d33e-4b30-b1a9-f482623cad33", + "15031262-ca73-4e3c-bd0a-fcf89bdf0caf", + "6d7333d1-2e8c-4d78-bfde-5be47e70eb13", + "acaa160c-670a-4e8f-ac45-49416e77d5f9", + "228f87eb-cde4-4106-808b-2dbf3c7b6d2e", + "2ff830a3-5445-4d8e-b161-bddd30666697", + "f488bedd-ff6e-4108-b9a7-07f6da62f476", + "2e12b846-0a34-478e-adf7-a438493803e6", + "6686b8ef-7446-4d86-bd8c-df24119e3bfe", + "e474a5c5-5793-4d41-b4ab-5423acc56ef1", + "ac046573-e718-44dc-a0dc-9037eeaba6a9", + "6b0e9099-cf53-4d5a-8a71-977528628fcf", + "d51a3f22-0ff9-4087-ba9b-fcee2a2d8ade", + "bdc01286-3511-4d22-bfb8-76d01203d366", + "ca44eb84-17ff-4f27-8f1e-1bd25f4e8725", + "4e9a8c2f-be0b-4913-92d2-c801b9a50d04", + "7685d231-dadd-4041-9165-898397438ab7", + "86f0bf26-d66a-44d8-99f5-d6768addae3b", + "2ca1167c-72ba-45a0-aa42-faf033db0d0b", + "199a1182-ea55-49ff-ba51-71c29cdd0aac", + "be6a4dd2-c821-4aa0-8b83-d64d6644b5b2", + "4c5f4781-7f80-4daa-9c20-76b183000514", + "513b31bd-54fb-4d12-a427-42a7c13ff8e1", + "8e211bcb-d76c-4012-83ad-74dd7d23b687", + "44d5807e-0501-4f66-8779-e244d4fdca0a", + "db8cd555-0563-4b7b-b00c-eada300a7065", + "cb14d0c9-46cc-4797-bd3a-752b05629f07", + "4f68b3ef-ac9b-47a0-b6d7-57f398a5c6a5", + "77221aae-1bcf-471c-be45-7f31f733f9d6", + "42a7cac8-9e80-4c45-8c71-511d863c98ea", + "f9018d22-b82c-468c-bdb5-8864d5964801", + "75f4e9b8-62a2-4f21-ad8a-e19eff0419bc", + "9b7385c8-8653-4184-951c-b0ac1b36b42e", + "571018aa-ffbf-4b42-a16d-07b57a7f5f0e", + "35de4a2f-6bf1-45aa-b820-2a27ea833e44", + "0b8edb20-3bb4-4cb4-b089-31957466dbab", + "97da4778-9a7b-4140-a545-968148c81fb7", + "969f326c-8f2a-47c5-b41c-d9c2f06c9b9d", + "ae211037-8b53-4b17-bfc8-c06fc7774409", + "12c5c3c4-0bd5-45d3-bc1d-d04a3c65d3e6", + "ec02024f-ce43-4dd3-8169-a59f7baee043", + "5b6afe77-ce48-47ca-90a0-25cd10ca5ffd", + "2e3a61d4-6b8f-4d2f-ba86-878b4012efd8", + "19a88a67-a5d3-4647-898f-1cde07bce040", + "6db6f420-b5c8-48b9-bbb2-8864fe6fed65", + "5a45dbde-7b53-4f6b-b864-e3b63be3708a", + "c878321b-8a02-4239-9981-15760c2e7d15", + "4e36687f-8bf6-4b12-b496-3a8e382d067e", + "a59a63cd-43c0-4c6e-b208-6dbca86f8176", + "303308c4-2e4a-45b5-8bf3-3e66e9ad05a1", + "8b58fdf1-43a6-4c98-9547-6361b50791af", + "a3563591-72ed-42b5-8e41-bac1d76d70cf", + "38db8c78-3739-4f6e-8313-de4138082114", + "86615bea-7e73-4daf-95da-ae6b9eee1bbb", + "35d38e3e-076e-40dd-9aa8-05be2603bd59", + "9f84c62d-b454-4ba3-8c19-a01878985cdc", + "6721bbae-d765-4a06-8289-6fe46a1bf943", + "0837796f-d0dd-4e50-9b7c-1983e6cc7c48", + "021eb7d7-e869-49b9-80c3-9dd16ce2d981", + "819c56f8-e040-475d-aad5-c6d5e98b20aa", + "3a61ef02-735e-4229-937d-b3777a3f4e1f", + "79dfab84-12e6-4ec8-bfc8-460ae71e4eca", + "a106fabf-e149-476c-8053-b62388b6eb57", + "9a3900a5-bfb4-4de0-baa5-253a8bd0b634" +] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/types/integers.json b/src/s3select/rapidjson/bin/types/integers.json new file mode 100644 index 000000000..5dd05e097 --- /dev/null +++ b/src/s3select/rapidjson/bin/types/integers.json @@ -0,0 +1,102 @@ +[ + 8125686, + 8958709, + 5976222, + 1889524, + 7968493, + 1357486, + 118415, + 7081097, + 4635968, + 7555332, + 2270233, + 3428352, + 8699968, + 2087333, + 7861337, + 7554440, + 2017031, + 7981692, + 6060687, + 1877715, + 3297474, + 8373177, + 6158629, + 7853641, + 3004441, + 9650406, + 2695251, + 1180761, + 4988426, + 6043805, + 8063373, + 6103218, + 2848339, + 8188690, + 9235573, + 5949816, + 6116081, + 6471138, + 3354531, + 4787414, + 9660600, + 942529, + 7278535, + 7967399, + 554292, + 1436493, + 267319, + 2606657, + 7900601, + 4276634, + 7996757, + 8544466, + 7266469, + 3301373, + 4005350, + 6437652, + 7717672, + 7126292, + 8588394, + 2127902, + 7410190, + 1517806, + 4583602, + 3123440, + 7747613, + 5029464, + 9834390, + 3087227, + 4913822, + 7550487, + 4518144, + 5862588, + 1778599, + 9493290, + 5588455, + 3638706, + 7394293, + 4294719, + 3837830, + 6381878, + 7175866, + 8575492, + 1415229, + 1453733, + 6972404, + 9782571, + 4234063, + 7117418, + 7293130, + 8057071, + 9345285, + 7626648, + 3358911, + 4574537, + 9371826, + 7627107, + 6154093, + 5392367, + 5398105, + 6956377 +] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/types/mixed.json b/src/s3select/rapidjson/bin/types/mixed.json new file mode 100644 index 000000000..43e9a1d7b --- /dev/null +++ b/src/s3select/rapidjson/bin/types/mixed.json @@ -0,0 +1,592 @@ +[ + { + "favoriteFruit": "banana", + "greeting": "Hello, Kim! You have 10 unread messages.", + "friends": [ + { + "name": "Higgins Rodriquez", + "id": 0 + }, + { + "name": "James Floyd", + "id": 1 + }, + { + "name": "Gay Stewart", + "id": 2 + } + ], + "range": [ + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9 + ], + "tags": [ + "pariatur", + "ad", + "eiusmod", + "sit", + "et", + "velit", + "culpa" + ], + "longitude": -57.919246, + "latitude": -36.022812, + "registered": "Friday, March 21, 2014 9:13 PM", + "about": "Laborum nulla aliquip ullamco proident excepteur est officia ipsum. Eiusmod exercitation minim ex do labore reprehenderit aliqua minim qui excepteur reprehenderit cupidatat. Sint enim exercitation duis id consequat nisi enim magna. Commodo aliqua id ipsum sit magna enim. Veniam officia in labore fugiat veniam ea laboris ex veniam duis.\r\n", + "address": "323 Pulaski Street, Ronco, North Carolina, 7701", + "phone": "+1 (919) 438-2678", + "email": "kim.griffith@cipromox.biz", + "company": "CIPROMOX", + "name": { + "last": "Griffith", + "first": "Kim" + }, + "eyeColor": "green", + "age": 26, + "picture": "http://placehold.it/32x32", + "balance": "$1,283.55", + "isActive": false, + "guid": "10ab0392-c5e2-48a3-9473-aa725bad892d", + "index": 0, + "_id": "551b91198238a0bcf9a41133" + }, + { + "favoriteFruit": "banana", + "greeting": "Hello, Skinner! You have 9 unread messages.", + "friends": [ + { + "name": "Rhonda Justice", + "id": 0 + }, + { + "name": "Audra Castaneda", + "id": 1 + }, + { + "name": "Vicky Chavez", + "id": 2 + } + ], + "range": [ + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9 + ], + "tags": [ + "dolore", + "enim", + "sit", + "non", + "exercitation", + "fugiat", + "adipisicing" + ], + "longitude": -60.291407, + "latitude": -84.619318, + "registered": "Friday, February 7, 2014 3:17 AM", + "about": "Consectetur eiusmod laboris dolore est ullamco nulla in velit quis esse Lorem. Amet aliqua sunt aute occaecat veniam officia in duis proident aliqua cupidatat mollit. Sint eu qui anim duis ut anim duis eu cillum. Cillum nostrud adipisicing tempor Lorem commodo sit in ad qui non et irure qui. Labore eu aliquip id duis eiusmod veniam.\r\n", + "address": "347 Autumn Avenue, Fidelis, Puerto Rico, 543", + "phone": "+1 (889) 457-2319", + "email": "skinner.maddox@moltonic.co.uk", + "company": "MOLTONIC", + "name": { + "last": "Maddox", + "first": "Skinner" + }, + "eyeColor": "green", + "age": 22, + "picture": "http://placehold.it/32x32", + "balance": "$3,553.10", + "isActive": false, + "guid": "cfbc2fb6-2641-4388-b06d-ec0212cfac1e", + "index": 1, + "_id": "551b91197e0abe92d6642700" + }, + { + "favoriteFruit": "strawberry", + "greeting": "Hello, Reynolds! You have 5 unread messages.", + "friends": [ + { + "name": "Brady Valdez", + "id": 0 + }, + { + "name": "Boyer Golden", + "id": 1 + }, + { + "name": "Gladys Knapp", + "id": 2 + } + ], + "range": [ + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9 + ], + "tags": [ + "commodo", + "eiusmod", + "cupidatat", + "et", + "occaecat", + "proident", + "Lorem" + ], + "longitude": 140.866287, + "latitude": 1.401032, + "registered": "Monday, October 20, 2014 8:01 AM", + "about": "Deserunt elit consequat ea dolor pariatur aute consectetur et nulla ipsum ad. Laboris occaecat ipsum ad duis et esse ea ut voluptate. Ex magna consequat pariatur amet. Quis excepteur non mollit dolore cillum dolor ex esse veniam esse deserunt non occaecat veniam. Sit amet proident proident amet. Nisi est id ut ut adipisicing esse fugiat non dolor aute.\r\n", + "address": "872 Montague Terrace, Haena, Montana, 3106", + "phone": "+1 (974) 410-2655", + "email": "reynolds.sanford@combot.biz", + "company": "COMBOT", + "name": { + "last": "Sanford", + "first": "Reynolds" + }, + "eyeColor": "green", + "age": 21, + "picture": "http://placehold.it/32x32", + "balance": "$3,664.47", + "isActive": true, + "guid": "f9933a9c-c41a-412f-a18d-e727c569870b", + "index": 2, + "_id": "551b91197f170b65413a06e3" + }, + { + "favoriteFruit": "banana", + "greeting": "Hello, Neva! You have 7 unread messages.", + "friends": [ + { + "name": "Clara Cotton", + "id": 0 + }, + { + "name": "Ray Gates", + "id": 1 + }, + { + "name": "Jacobs Reese", + "id": 2 + } + ], + "range": [ + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9 + ], + "tags": [ + "magna", + "labore", + "incididunt", + "velit", + "ea", + "et", + "eiusmod" + ], + "longitude": -133.058479, + "latitude": 87.803677, + "registered": "Friday, May 9, 2014 5:41 PM", + "about": "Do duis occaecat ut officia occaecat officia nostrud reprehenderit ex excepteur aute anim in reprehenderit. Cupidatat nulla eiusmod nulla non minim veniam aute nulla deserunt adipisicing consectetur veniam. Sit consequat ex laboris aliqua labore consectetur tempor proident consequat est. Fugiat quis esse culpa aliquip. Excepteur laborum aliquip sunt eu cupidatat magna eiusmod amet nisi labore aliquip. Ut consectetur esse aliquip exercitation nulla ex occaecat elit do ex eiusmod deserunt. Ex eu voluptate minim deserunt fugiat minim est occaecat ad Lorem nisi.\r\n", + "address": "480 Eagle Street, Fostoria, Oklahoma, 2614", + "phone": "+1 (983) 439-3000", + "email": "neva.barker@pushcart.us", + "company": "PUSHCART", + "name": { + "last": "Barker", + "first": "Neva" + }, + "eyeColor": "brown", + "age": 36, + "picture": "http://placehold.it/32x32", + "balance": "$3,182.24", + "isActive": true, + "guid": "52489849-78e1-4b27-8b86-e3e5ab2b7dc8", + "index": 3, + "_id": "551b9119a13061c083c878d5" + }, + { + "favoriteFruit": "banana", + "greeting": "Hello, Rodgers! You have 6 unread messages.", + "friends": [ + { + "name": "Marguerite Conway", + "id": 0 + }, + { + "name": "Margarita Cunningham", + "id": 1 + }, + { + "name": "Carmela Gallagher", + "id": 2 + } + ], + "range": [ + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9 + ], + "tags": [ + "ipsum", + "magna", + "amet", + "elit", + "sit", + "occaecat", + "elit" + ], + "longitude": -125.436981, + "latitude": 19.868524, + "registered": "Tuesday, July 8, 2014 8:09 PM", + "about": "In cillum esse tempor do magna id ad excepteur ex nostrud mollit deserunt aliqua. Minim aliqua commodo commodo consectetur exercitation nulla nisi dolore aliqua in. Incididunt deserunt mollit nostrud excepteur. Ipsum fugiat anim deserunt Lorem aliquip nisi consequat eu minim in ex duis.\r\n", + "address": "989 Varanda Place, Duryea, Palau, 3972", + "phone": "+1 (968) 578-2974", + "email": "rodgers.conner@frenex.net", + "company": "FRENEX", + "name": { + "last": "Conner", + "first": "Rodgers" + }, + "eyeColor": "blue", + "age": 23, + "picture": "http://placehold.it/32x32", + "balance": "$1,665.17", + "isActive": true, + "guid": "ed3b2374-5afe-4fca-9325-8a7bbc9f81a0", + "index": 4, + "_id": "551b91197bcedb1b56a241ce" + }, + { + "favoriteFruit": "strawberry", + "greeting": "Hello, Mari! You have 10 unread messages.", + "friends": [ + { + "name": "Irwin Boyd", + "id": 0 + }, + { + "name": "Dejesus Flores", + "id": 1 + }, + { + "name": "Lane Mcmahon", + "id": 2 + } + ], + "range": [ + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9 + ], + "tags": [ + "esse", + "aliquip", + "excepteur", + "dolor", + "ex", + "commodo", + "anim" + ], + "longitude": -17.038176, + "latitude": 17.154663, + "registered": "Sunday, April 6, 2014 4:46 AM", + "about": "Excepteur veniam occaecat sint nulla magna in in officia elit. Eiusmod qui dolor fugiat tempor in minim esse officia minim consequat. Lorem ullamco labore proident ipsum id pariatur fugiat consectetur anim cupidatat qui proident non ipsum.\r\n", + "address": "563 Hendrickson Street, Westwood, South Dakota, 4959", + "phone": "+1 (980) 434-3976", + "email": "mari.fleming@beadzza.org", + "company": "BEADZZA", + "name": { + "last": "Fleming", + "first": "Mari" + }, + "eyeColor": "blue", + "age": 21, + "picture": "http://placehold.it/32x32", + "balance": "$1,948.04", + "isActive": true, + "guid": "6bd02166-3b1f-4ed8-84c9-ed96cbf12abc", + "index": 5, + "_id": "551b9119b359ff6d24846f77" + }, + { + "favoriteFruit": "strawberry", + "greeting": "Hello, Maxine! You have 7 unread messages.", + "friends": [ + { + "name": "Sullivan Stark", + "id": 0 + }, + { + "name": "Underwood Mclaughlin", + "id": 1 + }, + { + "name": "Kristy Carlson", + "id": 2 + } + ], + "range": [ + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9 + ], + "tags": [ + "commodo", + "ipsum", + "quis", + "non", + "est", + "mollit", + "exercitation" + ], + "longitude": -105.40635, + "latitude": 37.197993, + "registered": "Tuesday, January 20, 2015 12:30 AM", + "about": "Proident ullamco Lorem est consequat consectetur non eiusmod esse nostrud pariatur eiusmod enim exercitation eiusmod. Consequat duis elit elit minim ullamco et dolor eu minim do tempor esse consequat excepteur. Mollit dolor do voluptate nostrud quis anim cillum velit tempor eiusmod adipisicing tempor do culpa. Eu magna dolor sit amet nisi do laborum dolore nisi. Deserunt ipsum et deserunt non nisi.\r\n", + "address": "252 Boulevard Court, Brenton, Tennessee, 9444", + "phone": "+1 (950) 466-3377", + "email": "maxine.moreno@zentia.tv", + "company": "ZENTIA", + "name": { + "last": "Moreno", + "first": "Maxine" + }, + "eyeColor": "brown", + "age": 24, + "picture": "http://placehold.it/32x32", + "balance": "$1,200.24", + "isActive": false, + "guid": "ce307a37-ca1f-43f5-b637-dca2605712be", + "index": 6, + "_id": "551b91195a6164b2e35f6dc8" + }, + { + "favoriteFruit": "strawberry", + "greeting": "Hello, Helga! You have 5 unread messages.", + "friends": [ + { + "name": "Alicia Vance", + "id": 0 + }, + { + "name": "Vinson Phelps", + "id": 1 + }, + { + "name": "Francisca Kelley", + "id": 2 + } + ], + "range": [ + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9 + ], + "tags": [ + "nostrud", + "eiusmod", + "dolore", + "officia", + "sint", + "non", + "qui" + ], + "longitude": -7.275151, + "latitude": 75.54202, + "registered": "Wednesday, October 1, 2014 6:35 PM", + "about": "Quis duis ullamco velit qui. Consectetur non adipisicing id magna anim. Deserunt est officia qui esse. Et do pariatur incididunt anim ad mollit non. Et eiusmod sunt fugiat elit mollit ad excepteur anim nisi laboris eiusmod aliquip aliquip.\r\n", + "address": "981 Bush Street, Beaulieu, Vermont, 3775", + "phone": "+1 (956) 506-3807", + "email": "helga.burch@synkgen.name", + "company": "SYNKGEN", + "name": { + "last": "Burch", + "first": "Helga" + }, + "eyeColor": "blue", + "age": 22, + "picture": "http://placehold.it/32x32", + "balance": "$3,827.89", + "isActive": false, + "guid": "ff5dfea0-1052-4ef2-8b66-4dc1aad0a4fb", + "index": 7, + "_id": "551b911946be8358ae40e90e" + }, + { + "favoriteFruit": "banana", + "greeting": "Hello, Shaw! You have 5 unread messages.", + "friends": [ + { + "name": "Christian Cardenas", + "id": 0 + }, + { + "name": "Cohen Pennington", + "id": 1 + }, + { + "name": "Mary Lindsay", + "id": 2 + } + ], + "range": [ + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9 + ], + "tags": [ + "occaecat", + "ut", + "occaecat", + "magna", + "exercitation", + "incididunt", + "irure" + ], + "longitude": -89.102972, + "latitude": 89.489596, + "registered": "Thursday, August 21, 2014 5:00 PM", + "about": "Amet cupidatat quis velit aute Lorem consequat pariatur mollit deserunt et sint culpa excepteur duis. Enim proident duis qui ex tempor sunt nostrud occaecat. Officia sit veniam mollit eiusmod minim do aute eiusmod fugiat qui anim adipisicing in laboris. Do tempor reprehenderit sunt laborum esse irure dolor ad consectetur aute sit id ipsum. Commodo et voluptate anim consequat do. Minim laborum ad veniam ad minim incididunt excepteur excepteur aliqua.\r\n", + "address": "237 Pierrepont Street, Herbster, New York, 3490", + "phone": "+1 (976) 455-2880", + "email": "shaw.zamora@shadease.me", + "company": "SHADEASE", + "name": { + "last": "Zamora", + "first": "Shaw" + }, + "eyeColor": "blue", + "age": 38, + "picture": "http://placehold.it/32x32", + "balance": "$3,440.82", + "isActive": false, + "guid": "ac5fdb0e-e1fb-427e-881d-da461be0d1ca", + "index": 8, + "_id": "551b9119af0077bc28a2de25" + }, + { + "favoriteFruit": "apple", + "greeting": "Hello, Melissa! You have 5 unread messages.", + "friends": [ + { + "name": "Marion Villarreal", + "id": 0 + }, + { + "name": "Kate Rose", + "id": 1 + }, + { + "name": "Hines Simon", + "id": 2 + } + ], + "range": [ + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9 + ], + "tags": [ + "amet", + "veniam", + "mollit", + "ad", + "cupidatat", + "deserunt", + "Lorem" + ], + "longitude": -52.735052, + "latitude": 16.258838, + "registered": "Wednesday, April 16, 2014 7:56 PM", + "about": "Aute ut culpa eiusmod tempor duis dolor tempor incididunt. Nisi non proident excepteur eiusmod incididunt nisi minim irure sit. In veniam commodo deserunt proident reprehenderit et consectetur ullamco quis nulla cupidatat.\r\n", + "address": "642 Halsey Street, Blandburg, Kansas, 6761", + "phone": "+1 (941) 539-3851", + "email": "melissa.vaughn@memora.io", + "company": "MEMORA", + "name": { + "last": "Vaughn", + "first": "Melissa" + }, + "eyeColor": "brown", + "age": 24, + "picture": "http://placehold.it/32x32", + "balance": "$2,399.44", + "isActive": true, + "guid": "1769f022-a7f1-4a69-bf4c-f5a5ebeab2d1", + "index": 9, + "_id": "551b9119b607c09c7ffc3b8a" + } +] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/types/nulls.json b/src/s3select/rapidjson/bin/types/nulls.json new file mode 100644 index 000000000..7a636ec87 --- /dev/null +++ b/src/s3select/rapidjson/bin/types/nulls.json @@ -0,0 +1,102 @@ +[ + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null, + null +] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/types/paragraphs.json b/src/s3select/rapidjson/bin/types/paragraphs.json new file mode 100644 index 000000000..8ab3e1c56 --- /dev/null +++ b/src/s3select/rapidjson/bin/types/paragraphs.json @@ -0,0 +1,102 @@ +[ + "Commodo ullamco cupidatat nisi sit proident ex. Cillum pariatur occaecat in officia do commodo nisi cillum tempor minim. Ad dolor ut et aliquip fugiat eu officia cupidatat occaecat consectetur eiusmod veniam enim officia.\r\n", + "Adipisicing cillum laborum nisi irure. Cillum dolor proident duis nulla qui mollit dolore reprehenderit mollit. Irure nulla dolor ipsum irure nulla quis laboris do.\r\n", + "Est adipisicing consectetur incididunt in. Occaecat ea magna ex consequat irure sit laborum cillum officia magna sunt do exercitation aliquip. Laboris id aute in dolore reprehenderit voluptate non deserunt laborum.\r\n", + "Consectetur eu aute est est occaecat adipisicing sint enim dolor eu. Tempor amet id non mollit eu consectetur cillum duis. Eu labore velit nulla ipsum commodo consequat aliquip. Cupidatat commodo dolore mollit enim sit excepteur nisi duis laboris deserunt esse.\r\n", + "Incididunt ullamco est fugiat enim fugiat. Do sit mollit anim ad excepteur eu laboris exercitation officia labore nulla ut. Voluptate non voluptate cillum sit et voluptate anim duis velit consequat aliquip dolor. Elit et et esse laboris consectetur officia eiusmod aliquip nisi est. Qui labore dolore ad dolor.\r\n", + "Anim adipisicing est irure proident sit officia ullamco voluptate sunt consectetur duis mollit excepteur veniam. Nostrud ut duis aute exercitation officia et quis elit commodo elit tempor aute aliquip enim. Est officia non cillum consequat voluptate ipsum sit voluptate nulla id.\r\n", + "Ipsum enim consectetur aliquip nulla commodo ut ex aliqua elit duis do. Officia et sunt aliqua dolor minim voluptate veniam esse elit enim. Adipisicing reprehenderit duis ex magna non in fugiat sunt ipsum nostrud fugiat aliquip. Labore voluptate id officia voluptate eu. Magna do nostrud excepteur sunt aliqua adipisicing qui.\r\n", + "Est occaecat non non cupidatat laborum qui. Veniam sit est voluptate labore sit irure consectetur fugiat. Anim enim enim fugiat exercitation anim ad proident esse in aliqua. Laboris ut aute culpa ullamco.\r\n", + "Sit et aliquip cupidatat deserunt eiusmod sint aliquip occaecat nostrud aliqua elit commodo ut magna. Amet sit est deserunt id duis in officia pariatur cupidatat ex. Mollit duis est consequat nulla aute velit ipsum sit consectetur pariatur ut non ex ipsum. Tempor esse velit pariatur reprehenderit et nostrud commodo laborum mollit labore.\r\n", + "Aliquip irure quis esse aliquip. Ex non deserunt culpa aliqua ad anim occaecat ad. Lorem consectetur mollit eu consectetur est non nisi non ipsum. Qui veniam ullamco officia est ut excepteur. Nulla elit dolore cupidatat aliqua enim Lorem elit consequat eiusmod non aliqua eu in. Pariatur in culpa labore sint ipsum consectetur occaecat ad ex ipsum laboris aliquip officia. Non officia eiusmod nisi officia id id laboris deserunt sunt enim magna mollit sit.\r\n", + "Mollit velit laboris laborum nulla aliquip consequat Lorem non incididunt irure. Eu voluptate sint do consectetur tempor sit Lorem in. Laborum eiusmod nisi Lorem ipsum dolore do aute laborum occaecat aute sunt. Sit laborum in ea do ipsum officia irure cillum irure nisi laboris. Ad anim deserunt excepteur ea veniam eiusmod culpa velit veniam. Commodo incididunt ea Lorem eu enim esse nisi incididunt mollit.\r\n", + "Velit proident sunt aute dolore reprehenderit culpa. Pariatur reprehenderit commodo ad ea voluptate anim nulla ipsum eu irure fugiat aliqua et. Adipisicing incididunt anim excepteur voluptate minim qui culpa. Sunt veniam enim reprehenderit magna magna. Sit ad amet deserunt ut aute dolore ad minim.\r\n", + "Esse ullamco sunt mollit mollit. Eu enim dolore laboris cupidatat. Cupidatat adipisicing non aute exercitation fugiat. Non ut cillum labore fugiat aliquip ex duis quis consectetur ut nisi Lorem amet qui. Proident veniam amet qui reprehenderit duis qui. Nisi culpa sit occaecat ullamco occaecat laborum fugiat ut. Non duis deserunt culpa duis.\r\n", + "Id ipsum eiusmod laboris non est ipsum deserunt labore duis reprehenderit deserunt. Sint tempor fugiat eiusmod nostrud in ut laborum esse in nostrud sit deserunt nostrud reprehenderit. Cupidatat aliqua qui anim consequat eu quis consequat consequat elit ipsum pariatur. Cupidatat in dolore velit quis. Exercitation cillum ullamco ex consectetur commodo tempor incididunt exercitation labore ad dolore. Minim incididunt consequat adipisicing esse eu eu voluptate.\r\n", + "Anim sint eiusmod nisi anim do deserunt voluptate ut cillum eiusmod esse ex reprehenderit laborum. Dolore nulla excepteur duis excepteur. Magna nisi nostrud duis non commodo velit esse ipsum Lorem incididunt. Nulla enim consequat ad aliqua. Incididunt irure culpa nostrud ea aute ex sit non ad esse.\r\n", + "Ullamco nostrud cupidatat adipisicing anim fugiat mollit eu. Et ut eu in nulla consequat. Sunt do pariatur culpa non est.\r\n", + "Pariatur incididunt reprehenderit non qui excepteur cillum exercitation nisi occaecat ad. Lorem aliquip laborum commodo reprehenderit sint. Laboris qui ut veniam magna quis et et ullamco voluptate. Tempor reprehenderit deserunt consequat nisi. Esse duis sint in tempor. Amet aute cupidatat in sint et.\r\n", + "Est officia nisi dolore consequat irure et excepteur. Sit qui elit tempor magna qui cillum anim amet proident exercitation proident. Eu cupidatat laborum consectetur duis ullamco irure nulla. Adipisicing culpa non reprehenderit anim aute.\r\n", + "Eu est laborum culpa velit dolore non sunt. Tempor magna veniam ea sit non qui Lorem qui exercitation aliqua aliqua et excepteur eiusmod. Culpa aute anim proident culpa adipisicing duis tempor elit aliquip elit nulla laboris esse dolore. Sit adipisicing non dolor eiusmod occaecat cupidatat.\r\n", + "Culpa velit eu esse sunt. Laborum irure aliqua reprehenderit velit ipsum fugiat officia dolor ut aute officia deserunt. Ipsum sit quis fugiat nostrud aliqua cupidatat ex pariatur et. Cillum proident est irure nisi dolor aliqua deserunt esse occaecat velit dolor.\r\n", + "Exercitation nulla officia sit eiusmod cillum eu incididunt officia exercitation qui Lorem deserunt. Voluptate Lorem minim commodo laborum esse in duis excepteur do duis aliquip nisi voluptate consectetur. Amet tempor officia enim ex esse minim reprehenderit.\r\n", + "Laboris sint deserunt ad aute incididunt. Anim officia sunt elit qui laborum labore commodo irure non. Mollit adipisicing ullamco do aute nulla eu laborum et quis sint aute adipisicing amet. Aliqua officia irure nostrud duis ex.\r\n", + "Eiusmod ipsum aliqua reprehenderit esse est non aute id veniam eiusmod. Elit consequat ad sit tempor elit eu incididunt quis irure ad. Eu incididunt veniam consequat Lorem nostrud cillum officia ea consequat ad cillum. Non nisi irure cupidatat incididunt pariatur incididunt. Duis velit officia ad cillum qui. Aliquip consequat sint aute nisi cillum. Officia commodo nisi incididunt laborum nisi voluptate aliquip Lorem cupidatat anim consequat sit laboris.\r\n", + "Veniam cupidatat et incididunt mollit do ex voluptate veniam nostrud labore esse. Eiusmod irure sint fugiat esse. Aute irure consectetur ut mollit nulla sint esse. Lorem ut quis ex proident nostrud mollit nostrud ea duis duis in magna anim consectetur.\r\n", + "Irure culpa esse qui do dolor fugiat veniam ad. Elit commodo aute elit magna incididunt tempor pariatur velit irure pariatur cillum et ea ad. Ad consequat ea et ad minim ut sunt qui commodo voluptate. Laboris est aliquip anim reprehenderit eu officia et exercitation. Occaecat laboris cupidatat Lorem ullamco in nostrud commodo ipsum in quis esse ex.\r\n", + "Incididunt officia quis voluptate eiusmod esse nisi ipsum quis commodo. Eiusmod dolore tempor occaecat sit exercitation aliqua minim consequat minim mollit qui ad nisi. Aute quis irure adipisicing veniam nisi nisi velit deserunt incididunt anim nostrud.\r\n", + "Voluptate exercitation exercitation id minim excepteur excepteur mollit. Fugiat aute proident nulla ullamco ea. Nisi ea culpa duis dolore veniam anim tempor officia in dolore exercitation exercitation. Dolore quis cillum adipisicing sunt do nulla esse proident ad sint.\r\n", + "Laborum ut mollit sint commodo nulla laborum deserunt Lorem magna commodo mollit tempor deserunt ut. Qui aliquip commodo ea id. Consectetur dolor fugiat dolor excepteur eiusmod. Eu excepteur ex aute ex ex elit ex esse officia cillum exercitation. Duis ut labore ea nostrud excepteur. Reprehenderit labore aute sunt nisi quis Lorem officia. Ad aliquip cupidatat voluptate exercitation voluptate ad irure magna quis.\r\n", + "Tempor velit veniam sit labore elit minim do elit cillum eiusmod sunt excepteur nisi. Aliquip est deserunt excepteur duis fugiat incididunt veniam fugiat. Pariatur sit irure labore et minim non. Cillum quis aute anim sint laboris laboris ullamco exercitation nostrud. Nulla pariatur id laborum minim nisi est adipisicing irure.\r\n", + "Irure exercitation laboris nostrud in do consectetur ad. Magna aliqua Lorem culpa exercitation sint do culpa incididunt mollit eu exercitation. Elit tempor Lorem dolore enim deserunt. Anim et ullamco sint ullamco mollit cillum officia et. Proident incididunt laboris aliquip laborum sint veniam deserunt eu consequat deserunt voluptate laboris. Anim Lorem non laborum exercitation voluptate. Cupidatat reprehenderit culpa Lorem fugiat enim minim consectetur tempor quis ad reprehenderit laboris irure.\r\n", + "Deserunt elit mollit nostrud occaecat labore reprehenderit laboris ex. Esse reprehenderit adipisicing cillum minim in esse aliquip excepteur ex et nisi cillum quis. Cillum labore ut ex sunt. Occaecat proident et mollit magna consequat irure esse. Dolor do enim esse nisi ad.\r\n", + "Pariatur est anim cillum minim elit magna adipisicing quis tempor proident nisi laboris incididunt cupidatat. Nulla est adipisicing sit adipisicing id nostrud amet qui consequat eiusmod tempor voluptate ad. Adipisicing non magna sit occaecat magna mollit ad ex nulla velit ea pariatur. Irure labore ad ea exercitation ex cillum.\r\n", + "Lorem fugiat eu eu cillum nulla tempor sint. Lorem id officia nulla velit labore ut duis ad tempor non. Excepteur quis aute adipisicing nisi nisi consectetur aliquip enim Lorem id ullamco cillum sint voluptate. Qui aliquip incididunt tempor aliqua voluptate labore reprehenderit. Veniam eiusmod elit occaecat voluptate tempor culpa consectetur ea ut exercitation eiusmod exercitation qui.\r\n", + "Aliqua esse pariatur nulla veniam velit ea. Aliquip consectetur tempor ex magna sit aliquip exercitation veniam. Dolor ullamco minim commodo pariatur. Et amet reprehenderit dolore proident elit tempor eiusmod eu incididunt enim ullamco. Adipisicing id officia incididunt esse dolor sunt cupidatat do deserunt mollit do non. Magna ut officia fugiat adipisicing quis ea cillum laborum dolore ad nostrud magna minim est. Dolor voluptate officia proident enim ea deserunt eu voluptate dolore proident laborum officia ea.\r\n", + "Culpa aute consequat esse fugiat cupidatat minim voluptate voluptate eiusmod irure anim elit. Do eiusmod culpa laboris consequat incididunt minim nostrud eiusmod commodo velit ea ullamco proident. Culpa pariatur magna ut mollit nisi. Ea officia do magna deserunt minim nisi tempor ea deserunt veniam cillum exercitation esse.\r\n", + "Anim ullamco nostrud commodo Lorem. Do sunt laborum exercitation proident proident magna. Lorem officia laborum laborum dolor sunt duis commodo Lorem. Officia aute adipisicing ea cupidatat ea dolore. Aliquip adipisicing pariatur consectetur aliqua sit amet officia reprehenderit laborum culpa. Occaecat Lorem eu nisi do Lorem occaecat enim eiusmod laboris id quis. Ad mollit adipisicing sunt adipisicing esse.\r\n", + "Laborum quis sit adipisicing cupidatat. Veniam Lorem eiusmod esse esse sint nisi labore elit et. Deserunt aliqua mollit ut commodo aliqua non incididunt ipsum reprehenderit consectetur. Eiusmod nulla minim laboris Lorem ea Lorem aute tempor pariatur in sit. Incididunt culpa ut do irure amet irure cupidatat est anim anim culpa occaecat. Est velit consectetur eiusmod veniam reprehenderit officia sunt occaecat eiusmod ut sunt occaecat amet.\r\n", + "Elit minim aute fugiat nulla ex quis. Labore fugiat sint nostrud amet quis culpa excepteur in. Consectetur exercitation cupidatat laborum sit. Aute nisi eu aliqua est deserunt eiusmod commodo dolor id. Mollit laborum esse sint ipsum voluptate reprehenderit velit et. Veniam aliquip enim in veniam Lorem voluptate quis deserunt consequat qui commodo ut excepteur aute.\r\n", + "Dolore deserunt veniam aute nisi labore sunt et voluptate irure nisi anim ea. Magna nisi quis anim mollit nisi est dolor do ex aliquip elit aliquip ipsum minim. Dolore est officia nostrud eiusmod ex laborum ea amet est. Officia culpa non est et tempor consectetur exercitation tempor eiusmod enim. Ea tempor laboris qui amet ex nisi culpa dolore consectetur incididunt sunt sunt. Lorem aliquip incididunt magna do et ullamco ex elit aliqua eiusmod qui. Commodo amet dolor sint incididunt ex veniam non Lorem fugiat.\r\n", + "Officia culpa enim voluptate dolore commodo. Minim commodo aliqua minim ex sint excepteur cupidatat adipisicing eu irure. Anim magna deserunt anim Lorem non.\r\n", + "Cupidatat aliquip nulla excepteur sunt cupidatat cupidatat laborum cupidatat exercitation. Laboris minim ex cupidatat culpa elit. Amet enim reprehenderit aliqua laborum est tempor exercitation cupidatat ex dolore do. Do incididunt labore fugiat commodo consectetur nisi incididunt irure sit culpa sit. Elit aute occaecat qui excepteur velit proident cillum qui aliqua ex do ex. Dolore irure ex excepteur veniam id proident mollit Lorem.\r\n", + "Ad commodo cillum duis deserunt elit officia consectetur veniam eiusmod. Reprehenderit et veniam ad commodo reprehenderit magna elit laboris sunt non quis. Adipisicing dolor aute proident ea magna sunt et proident in consectetur.\r\n", + "Veniam exercitation esse esse veniam est nisi. Minim velit incididunt sint aute dolor anim. Fugiat cupidatat id ad nisi in voluptate dolor culpa eiusmod magna eiusmod amet id. Duis aliquip labore et ex amet amet aliquip laborum eiusmod ipsum. Quis qui ut duis duis. Minim in voluptate reprehenderit aliqua.\r\n", + "Elit ut pariatur dolor veniam ipsum consequat. Voluptate Lorem mollit et esse dolore mollit Lorem ad. Elit nostrud eu Lorem labore mollit minim cupidatat officia quis minim dolore incididunt. In cillum aute cillum ut.\r\n", + "Commodo laborum deserunt ut cupidatat pariatur ullamco in esse anim exercitation cillum duis. Consectetur incididunt sit esse Lorem in aute. Eiusmod mollit Lorem consequat minim reprehenderit laborum enim excepteur irure nisi elit. Laborum esse proident aute aute proident adipisicing laborum. Pariatur tempor duis incididunt qui velit pariatur ut officia ea mollit labore dolore. Cillum pariatur minim ullamco sunt incididunt culpa id ullamco exercitation consectetur. Ea exercitation consequat reprehenderit ut ullamco velit eu ad velit magna excepteur eiusmod.\r\n", + "Eu deserunt magna laboris laborum laborum in consequat dolore. Officia proident consectetur proident do occaecat minim pariatur officia ipsum sit non velit officia cillum. Laborum excepteur labore eu minim eiusmod. Sit anim dolore cillum ad do minim culpa sit est ad.\r\n", + "Cupidatat dolor nostrud Lorem sint consequat quis. Quis labore sint incididunt officia tempor. Fugiat nostrud in elit reprehenderit dolor. Nisi sit enim officia minim est adipisicing nulla aute labore nulla nostrud cupidatat est. Deserunt dolore qui irure Lorem esse voluptate velit qui nostrud.\r\n", + "Fugiat Lorem amet nulla nisi qui amet laboris enim cillum. Dolore occaecat exercitation id labore velit do commodo ut cupidatat laborum velit fugiat mollit. Ut et aliqua pariatur occaecat. Lorem occaecat dolore quis esse enim cupidatat exercitation ut tempor sit laboris fugiat adipisicing. Est tempor ex irure consectetur ipsum magna labore. Lorem non quis qui minim nisi magna amet aliquip ex cillum fugiat tempor.\r\n", + "Aliquip eiusmod laborum ipsum deserunt velit esse do magna excepteur consectetur exercitation sit. Minim ullamco reprehenderit commodo nostrud exercitation id irure ex qui ullamco sit esse laboris. Nulla cillum non minim qui cillum nisi aute proident. Dolor anim culpa elit quis excepteur aliqua eiusmod. Elit ea est excepteur consectetur sunt eiusmod enim id commodo irure amet et pariatur laboris. Voluptate magna ad magna dolore cillum cillum irure laboris ipsum officia id Lorem veniam.\r\n", + "Esse sunt elit est aliquip cupidatat commodo deserunt. Deserunt pariatur ipsum qui ad esse esse magna qui cillum laborum. Exercitation veniam pariatur elit amet enim.\r\n", + "Esse quis in id elit nulla occaecat incididunt. Et amet Lorem mollit in veniam do. Velit mollit Lorem consequat commodo Lorem aliquip cupidatat. Minim consequat nostrud nulla in nostrud.\r\n", + "Cillum nulla et eu est nostrud quis elit cupidatat dolor enim excepteur exercitation nisi voluptate. Nulla dolore non ex velit et qui tempor proident id deserunt nisi eu. Tempor ad Lorem ipsum reprehenderit in anim. Anim dolore ullamco enim deserunt quis ex id exercitation velit. Magna exercitation fugiat mollit pariatur ipsum ex consectetur nostrud. Id dolore officia nostrud excepteur laborum. Magna incididunt elit ipsum pariatur adipisicing enim duis est qui commodo velit aute.\r\n", + "Quis esse ex qui nisi dolor. Ullamco laborum dolor esse laboris eiusmod ea magna laboris ea esse ut. Dolore ipsum pariatur veniam sint mollit. Lorem ea proident fugiat ullamco ut nisi culpa eu exercitation exercitation aliquip veniam laborum consectetur.\r\n", + "Pariatur veniam laboris sit aliquip pariatur tempor aute sunt id et ut. Laboris excepteur eiusmod nisi qui quis elit enim ut cupidatat. Et et laborum in fugiat veniam consectetur ipsum laboris duis excepteur ullamco aliqua dolor Lorem. Aliqua ex amet sint anim cupidatat nisi ipsum anim et sunt deserunt. Occaecat culpa ut tempor cillum pariatur ex tempor.\r\n", + "Dolor deserunt eiusmod magna do officia voluptate excepteur est cupidatat. Veniam qui cupidatat amet anim est qui consectetur sit commodo commodo ea ad. Enim ad adipisicing qui nostrud. Non nulla esse ullamco nulla et ex.\r\n", + "Id ullamco ea consectetur est incididunt deserunt et esse. Elit nostrud voluptate eiusmod ut. Excepteur adipisicing qui cupidatat consequat labore id. Qui dolor aliqua do dolore do cupidatat labore ex consectetur ea sit cillum. Sint veniam eiusmod in consectetur consequat fugiat et mollit ut fugiat esse dolor adipisicing.\r\n", + "Ea magna proident labore duis pariatur. Esse cillum aliquip dolor duis fugiat ea ex officia ea irure. Sint elit nisi pariatur sunt nostrud exercitation ullamco culpa magna do.\r\n", + "Minim aliqua voluptate dolor consequat sint tempor deserunt amet magna excepteur. Irure do voluptate magna velit. Nostrud in reprehenderit magna officia nostrud. Cupidatat nulla irure laboris non fugiat ex ex est cupidatat excepteur officia aute velit duis. Sit voluptate id ea exercitation deserunt culpa voluptate nostrud est adipisicing incididunt. Amet proident laborum commodo magna ipsum quis.\r\n", + "Ipsum consectetur consectetur excepteur tempor eiusmod ea fugiat aute velit magna in officia sunt. Sit ut sunt dolore cupidatat dolor adipisicing. Veniam nisi adipisicing esse reprehenderit amet aliqua voluptate ex commodo occaecat est voluptate mollit sunt. Pariatur aliqua qui qui in dolor. Fugiat reprehenderit sit nostrud do sint esse. Tempor sit irure adipisicing ea pariatur duis est sit est incididunt laboris quis do. Et voluptate anim minim aliquip excepteur consequat nisi anim pariatur aliquip ut ipsum dolor magna.\r\n", + "Cillum sit labore excepteur magna id aliqua exercitation consequat laborum Lorem id pariatur nostrud. Lorem qui est labore sint cupidatat sint excepteur nulla in eu aliqua et. Adipisicing velit do enim occaecat laboris quis excepteur ipsum dolor occaecat Lorem dolore id exercitation.\r\n", + "Incididunt in laborum reprehenderit eiusmod irure ex. Elit duis consequat minim magna. Esse consectetur aliquip cillum excepteur excepteur fugiat. Sint tempor consequat minim reprehenderit consectetur adipisicing dolor id Lorem elit non. Occaecat esse quis mollit ea et sint aute fugiat qui tempor. Adipisicing tempor duis non dolore irure elit deserunt qui do.\r\n", + "Labore fugiat eiusmod sint laborum sit duis occaecat. Magna in laborum non cillum excepteur nostrud sit proident pariatur voluptate voluptate adipisicing exercitation occaecat. Ad non dolor aute ex sint do do minim exercitation veniam laborum irure magna ea. Magna do non quis sit consequat Lorem aliquip.\r\n", + "Velit anim do laborum laboris laborum Lorem. Sunt do Lorem amet ipsum est sint velit sit do voluptate mollit veniam enim. Commodo do deserunt in pariatur ut elit sint elit deserunt ea. Ad dolor anim consequat aliquip ut mollit nostrud tempor sunt mollit elit. Reprehenderit laboris labore excepteur occaecat veniam adipisicing cupidatat esse. Ad enim aliquip ea minim excepteur magna. Sint velit veniam pariatur qui dolor est adipisicing ex laboris.\r\n", + "Ea cupidatat ex nulla in sunt est sit dolor enim ad. Eu tempor consequat cupidatat consequat ex incididunt sint culpa. Est Lorem Lorem non cupidatat sunt ut aliqua non nostrud do ullamco. Reprehenderit ad ad nulla nostrud do nulla in. Ipsum adipisicing commodo mollit ipsum exercitation. Aliqua ea anim anim est elit. Ea incididunt consequat minim ad sunt eu cillum.\r\n", + "Tempor quis excepteur eiusmod cupidatat ipsum occaecat id et occaecat. Eiusmod magna aliquip excepteur id amet elit. Ullamco dolore amet anim dolor enim ea magna magna elit. Occaecat magna pariatur in deserunt consectetur officia aliquip ullamco ex aute anim. Minim laborum eu sit elit officia esse do irure pariatur tempor et reprehenderit ullamco labore.\r\n", + "Sit tempor eu minim dolore velit pariatur magna duis reprehenderit ea nulla in. Amet est do consectetur commodo do adipisicing adipisicing in amet. Cillum id ut commodo do pariatur duis aliqua nisi sint ad irure officia reprehenderit. Mollit labore id enim fugiat ullamco irure mollit cupidatat. Quis nisi amet labore eu dolor occaecat commodo aliqua laboris deserunt excepteur deserunt officia. Aliqua non ut sit ad. Laborum veniam ad velit minim dolore ea id magna dolor qui in.\r\n", + "Dolore nostrud ipsum aliqua pariatur id reprehenderit enim ad eiusmod qui. Deserunt anim commodo pariatur excepteur velit eu irure nulla ex labore ipsum aliqua minim aute. Id consequat amet tempor aliquip ex elit adipisicing est do. Eu enim Lorem consectetur minim id irure nulla culpa. Consectetur do consequat aute tempor anim. Qui ad non elit dolor est adipisicing nisi amet cillum sunt quis anim laboris incididunt. Incididunt proident adipisicing labore Lorem.\r\n", + "Et reprehenderit ea officia veniam. Aliquip ullamco consequat elit nisi magna mollit id elit. Amet amet sint velit labore ad nisi. Consectetur tempor id dolor aliqua esse deserunt amet. Qui laborum enim proident voluptate aute eu aute aute sit sit incididunt eu. Sunt ullamco nisi nostrud labore commodo non consectetur quis do duis minim irure. Tempor sint dolor sint aliquip dolore nostrud fugiat.\r\n", + "Aute ullamco quis nisi ut excepteur nostrud duis elit. Veniam ex ad incididunt veniam voluptate. Commodo dolore ullamco sit sint adipisicing proident amet aute duis deserunt.\r\n", + "Labore velit eu cillum nisi. Laboris do cupidatat et non duis cillum. Ullamco dolor tempor cupidatat voluptate laborum ullamco ea duis.\r\n", + "Deserunt consequat aliqua duis aliquip nostrud nostrud dolore nisi. Culpa do sint laborum consectetur ipsum quis laborum laborum pariatur eiusmod. Consectetur laboris ad ad ut quis. Ullamco laboris qui velit id laborum voluptate qui aute nostrud aliquip ea.\r\n", + "Ad cillum anim ex est consectetur mollit id in. Non enim aliquip consequat qui deserunt commodo cillum ad laborum fugiat. Dolor deserunt amet laborum tempor adipisicing voluptate dolor pariatur dolor cillum. Eu mollit ex sunt officia veniam qui est sunt proident. Non aliqua qui elit eu cupidatat ex enim ex proident. Lorem sit minim ullamco officia cupidatat duis minim. Exercitation laborum deserunt voluptate culpa tempor quis nulla id pariatur.\r\n", + "Nostrud quis consectetur ut aliqua excepteur elit consectetur occaecat. Occaecat voluptate Lorem pariatur consequat ullamco fugiat minim. Anim voluptate eu eu cillum tempor dolore aliquip aliqua. Fugiat incididunt ut tempor amet minim. Voluptate nostrud minim pariatur non excepteur ullamco.\r\n", + "Dolore nulla velit officia exercitation irure laboris incididunt anim in laborum in fugiat ut proident. Fugiat aute id consequat fugiat officia ut. Labore sint amet proident amet sint nisi laboris amet id ullamco culpa quis consequat proident. Magna do fugiat veniam dolore elit irure minim. Esse ullamco excepteur labore tempor labore fugiat dolore nisi cupidatat irure dolor pariatur. Magna excepteur laboris nisi eiusmod sit pariatur mollit.\r\n", + "In enim aliquip officia ea ad exercitation cillum culpa occaecat dolore Lorem. Irure cillum commodo adipisicing sunt pariatur ea duis fugiat exercitation laboris culpa ullamco aute. Ut voluptate exercitation qui dolor. Irure et duis elit consequat deserunt proident.\r\n", + "Officia ea Lorem sunt culpa id et tempor excepteur enim deserunt proident. Dolore aliquip dolor laboris cillum proident velit. Et culpa occaecat exercitation cupidatat irure sint adipisicing excepteur pariatur incididunt ad occaecat. Qui proident ipsum cillum minim. Quis ut culpa irure aliqua minim fugiat. In voluptate cupidatat fugiat est laborum dolor esse in pariatur voluptate.\r\n", + "Voluptate enim ipsum officia aute ea adipisicing nisi ut ex do aliquip amet. Reprehenderit enim voluptate tempor ex adipisicing culpa. Culpa occaecat voluptate dolor mollit ipsum exercitation labore et tempor sit ea consectetur aliqua. Elit elit sit minim ea ea commodo do tempor cupidatat irure dolore. Occaecat esse adipisicing anim eiusmod commodo fugiat mollit amet. Incididunt tempor tempor qui occaecat cupidatat in.\r\n", + "Ut qui anim velit enim aliquip do ut nulla labore. Mollit ut commodo ut eiusmod consectetur laboris aliqua qui voluptate culpa fugiat incididunt elit. Lorem ullamco esse elit elit. Labore amet incididunt ea nulla aliquip eiusmod. Sit nulla est voluptate officia ipsum aute aute cillum tempor deserunt. Laboris commodo eiusmod labore sunt aute excepteur ea consectetur reprehenderit veniam nisi. Culpa nisi sint sunt sint tempor laboris dolore cupidatat.\r\n", + "Duis cillum qui nisi duis amet velit ad cillum ut elit aute sint ad. Amet laboris pariatur excepteur ipsum Lorem aliqua veniam Lorem quis mollit cupidatat aliqua exercitation. Pariatur ex ullamco sit commodo cillum eiusmod ut proident elit cillum. Commodo ut ipsum excepteur occaecat sint elit consequat ex dolor adipisicing consectetur id ut ad. Velit sit eiusmod est esse tempor incididunt consectetur eiusmod duis commodo veniam.\r\n", + "Ut sunt qui officia anim laboris exercitation Lorem quis laborum do eiusmod officia. Enim consectetur occaecat fugiat cillum cillum. Dolore dolore nostrud in commodo fugiat mollit consequat occaecat non et et elit ullamco. Sit voluptate minim ut est culpa velit nulla fugiat reprehenderit eu aliquip adipisicing labore. Sit minim minim do dolor dolor. Lorem Lorem labore exercitation magna veniam eiusmod do.\r\n", + "Fugiat dolor adipisicing quis aliquip aute dolore. Qui proident anim elit veniam ex aliquip eiusmod ipsum sunt pariatur est. Non fugiat duis do est officia adipisicing.\r\n", + "Nulla deserunt do laboris cupidatat veniam do consectetur ipsum elit veniam in mollit eu. Ea in consequat cupidatat laboris sint fugiat irure. In commodo esse reprehenderit deserunt minim velit ullamco enim eu cupidatat tempor ex. Ullamco in non id culpa amet occaecat culpa nostrud id. Non occaecat culpa magna incididunt.\r\n", + "Enim laboris ex mollit reprehenderit eiusmod exercitation magna. Exercitation Lorem ex mollit non non culpa labore enim. Adipisicing labore dolore incididunt do amet aliquip excepteur ad et nostrud officia aute veniam voluptate. Fugiat enim eiusmod Lorem esse. Minim ullamco commodo consequat ex commodo aliqua eu nulla eu. Veniam non enim nulla ut Lorem nostrud minim sint duis.\r\n", + "Enim duis consectetur in ullamco cillum veniam nulla amet. Exercitation nisi sunt sunt duis in culpa nisi magna ex id ipsum laboris reprehenderit qui. Officia pariatur qui ex fugiat veniam et sunt sit nostrud. Veniam ullamco tempor fugiat minim Lorem proident velit in eiusmod elit. Enim minim excepteur aute aliquip ex magna commodo dolore qui et labore. Proident eu aliquip cillum dolor. Nostrud ipsum ut irure consequat fugiat nulla proident occaecat laborum.\r\n", + "Amet duis eiusmod sunt adipisicing esse ex nostrud consectetur voluptate cillum. Ipsum occaecat sit et anim velit irure ea incididunt cupidatat ullamco in nisi quis. Esse officia ipsum commodo qui quis qui do. Commodo aliquip amet aute sit sit ut cupidatat elit nostrud.\r\n", + "Laboris laboris sit mollit cillum nulla deserunt commodo culpa est commodo anim id anim sit. Officia id consectetur velit incididunt est dolor sunt ipsum magna aliqua consectetur. Eiusmod pariatur minim deserunt cupidatat veniam Lorem aliquip sunt proident eu Lorem sit dolor fugiat. Proident qui ut ex in incididunt nulla nulla dolor ex laboris ea ad.\r\n", + "Ex incididunt enim labore nulla cupidatat elit. Quis ut incididunt incididunt non irure commodo do mollit cillum anim excepteur. Qui consequat laborum dolore elit tempor aute ut nulla pariatur eu ullamco veniam. Nisi non velit labore in commodo excepteur culpa nulla tempor cillum. Ipsum qui sit sint reprehenderit ut labore incididunt dolor aliquip sunt. Reprehenderit occaecat tempor nisi laborum.\r\n", + "Lorem officia ullamco eu occaecat in magna eiusmod consectetur nisi aliqua mollit esse. Ullamco ex aute nostrud pariatur do enim cillum sint do fugiat nostrud culpa tempor. Do aliquip excepteur nostrud culpa eu pariatur eiusmod cillum excepteur do. Est sunt non quis cillum voluptate ex.\r\n", + "Deserunt consectetur tempor irure mollit qui tempor et. Labore enim eu irure laboris in. Nisi in tempor ex occaecat amet cupidatat laboris occaecat amet minim ut magna incididunt id. Consequat cillum laborum commodo mollit. Et magna culpa sunt dolore consequat laboris et sit. Deserunt qui voluptate excepteur dolor. Eu qui amet est proident.\r\n", + "Eu elit minim eiusmod occaecat eu nostrud dolor qui ut elit. Sunt dolore proident ea eu do eiusmod fugiat incididunt pariatur duis amet Lorem nisi ut. Adipisicing quis veniam cupidatat Lorem sint culpa sunt veniam sint. Excepteur eu exercitation est magna pariatur veniam dolore qui fugiat labore proident eiusmod cillum. Commodo reprehenderit elit proident duis sint magna.\r\n", + "Ut aliquip pariatur deserunt nostrud commodo ad proident est exercitation. Sit minim do ea enim sint officia nisi incididunt laborum. Ex amet duis commodo fugiat. Ut aute tempor deserunt irure occaecat aliquip voluptate cillum aute elit qui nostrud.\r\n", + "Irure et quis consectetur sit est do sunt aliquip eu. Cupidatat pariatur consequat dolore consectetur. Adipisicing magna velit mollit occaecat do id. Nisi pariatur cupidatat cillum incididunt excepteur consectetur excepteur do laborum deserunt irure pariatur cillum.\r\n", + "Adipisicing esse incididunt cillum est irure consequat irure ad aute voluptate. Incididunt do occaecat nostrud do ipsum pariatur Lorem qui laboris et pariatur. Est exercitation dolor culpa ad velit ut et.\r\n", + "Sit eiusmod id enim ad ex dolor pariatur do. Ullamco occaecat quis dolor minim non elit labore amet est. Commodo velit eu nulla eiusmod ullamco. Incididunt anim pariatur aute eiusmod veniam tempor enim officia elit id. Elit Lorem est commodo dolore nostrud. Labore et consectetur do exercitation veniam laboris incididunt aliqua proident dolore ea officia cupidatat. Velit laboris aliquip deserunt labore commodo.\r\n", + "Proident nostrud labore eu nostrud. Excepteur ut in velit labore ea proident labore ea sint cillum. Incididunt ipsum consectetur officia irure sit pariatur veniam id velit officia mollit. Adipisicing magna voluptate velit excepteur enim consectetur incididunt voluptate tempor occaecat fugiat velit excepteur labore. Do do incididunt qui nisi voluptate enim. Laboris aute sit voluptate cillum pariatur minim excepteur ullamco mollit deserunt.\r\n", + "Excepteur laborum adipisicing nisi elit fugiat tempor. Elit laboris qui enim labore duis. Proident tempor in consectetur proident excepteur do ex laboris sit.\r\n", + "Dolore do ea incididunt do duis dolore eu labore nisi cupidatat voluptate amet incididunt minim. Nulla pariatur mollit cupidatat adipisicing nulla et. Dolor aliquip in ex magna excepteur. Nulla consequat minim consequat ullamco dolor laboris ullamco eu reprehenderit duis nostrud pariatur.\r\n", + "Id nisi labore duis qui. Incididunt laboris tempor aute do sit. Occaecat excepteur est mollit ea in mollit ullamco est amet reprehenderit.\r\n", + "Aute labore ipsum velit non voluptate eiusmod et reprehenderit cupidatat occaecat. Lorem tempor tempor consectetur exercitation qui nostrud sunt cillum quis ut non dolore. Reprehenderit consequat reprehenderit laborum qui pariatur anim et officia est cupidatat enim velit velit.\r\n", + "Commodo ex et fugiat cupidatat non adipisicing commodo. Minim ad dolore fugiat mollit cupidatat aliqua sunt dolor sit. Labore esse labore velit aute enim. Nulla duis incididunt est aliquip consectetur elit qui incididunt minim minim labore amet sit cillum.\r\n" +] \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/types/readme.txt b/src/s3select/rapidjson/bin/types/readme.txt new file mode 100644 index 000000000..da1dae675 --- /dev/null +++ b/src/s3select/rapidjson/bin/types/readme.txt @@ -0,0 +1 @@ +Test data obtained from https://github.com/xpol/lua-rapidjson/tree/master/performance diff --git a/src/s3select/rapidjson/bin/unittestschema/address.json b/src/s3select/rapidjson/bin/unittestschema/address.json new file mode 100644 index 000000000..c3cf64261 --- /dev/null +++ b/src/s3select/rapidjson/bin/unittestschema/address.json @@ -0,0 +1,139 @@ +{ + "type": "object", + "properties": { + "version": { + "$ref": "#/definitions/decimal_type" + }, + "address": { + "$ref": "#/definitions/address_type" + }, + "phones": { + "type": "array", + "minItems": 1, + "maxItems": 2, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/phone_type" + } + }, + "names": { + "type": "array", + "items": [ + { "type": "string" }, + { "type": "string" } + ], + "additionalItems": false + }, + "extra": { + "type": "object", + "patternProperties": { + "^S_": { "type": "string" } + } + }, + "gender": { + "type": "string", + "enum": ["M", "F"] + } + }, + "additionalProperties": false, + "dependencies": { + "address": [ "version" ], + "names": { + "properties": { + "version": { "$ref": "#/definitions/decimal_type" } + }, + "required": ["version"] + } + }, + "definitions": { + "address_type": { + "type": "object", + "properties": { + "number": { + "$ref": "#/definitions/positiveInt_type" + }, + "street1": { + "type": "string" + }, + "street2": { + "type": ["string", "null"] + }, + "street3": { + "not": { "type": ["boolean", "number", ",integer", "object", "null"] } + }, + "city": { + "type": "string", + "maxLength": 10, + "minLength": 4 + }, + "area": { + "oneOf": [ + { "$ref": "#/definitions/county_type" }, + { "$ref": "#/definitions/province_type" } + ] + }, + "country": { + "allOf": [ + { "$ref": "#/definitions/country_type" } + ] + }, + "postcode": { + "anyOf": [ + { "type": "string", "pattern": "^[A-Z]{2}[0-9]{1,2} [0-9][A-Z]{2}$" }, + { "type": "string", "pattern": "^[0-9]{5}$" } + ] + } + }, + "minProperties": 7, + "required": [ + "number", + "street1", + "city" + ] + }, + "country_type": { + "type": "string", + "enum": ["UK", "Canada"] + }, + "county_type": { + "type": "string", + "enum": ["Sussex", "Surrey", "Kent"] + }, + "province_type": { + "type": "string", + "enum": ["Quebec", "BC", "Alberta"] + }, + "date_type": { + "pattern": "^([0-9]([0-9]([0-9][1-9]|[1-9]0)|[1-9]00)|[1-9]000)(-(0[1-9]|1[0-2])(-(0[1-9]|[1-2][0-9]|3[0-1]))?)?$", + "type": "string" + }, + "positiveInt_type": { + "minimum": 0, + "exclusiveMinimum": true, + "maximum": 100, + "exclusiveMaximum": true, + "type": "integer" + }, + "decimal_type": { + "multipleOf": 1.0, + "type": "number" + }, + "time_type": { + "pattern": "^([01][0-9]|2[0-3]):[0-5][0-9]:([0-5][0-9]|60)(\\.[0-9]+)?$", + "type": "string" + }, + "unsignedInt_type": { + "type": "integer", + "minimum": 0, + "maximum": 99999 + }, + "phone_type": { + "pattern": "^[0-9]*-[0-9]*", + "type": "string" + }, + "url_type": { + "pattern": "^\\S*$", + "type": "string" + } + } +} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/unittestschema/allOf_address.json b/src/s3select/rapidjson/bin/unittestschema/allOf_address.json new file mode 100644 index 000000000..fd501f66d --- /dev/null +++ b/src/s3select/rapidjson/bin/unittestschema/allOf_address.json @@ -0,0 +1,7 @@ +{ + "allOf": [ + { + "$ref": "http://localhost:1234/address.json#" + } + ] +} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/unittestschema/anyOf_address.json b/src/s3select/rapidjson/bin/unittestschema/anyOf_address.json new file mode 100644 index 000000000..5c90308f4 --- /dev/null +++ b/src/s3select/rapidjson/bin/unittestschema/anyOf_address.json @@ -0,0 +1,7 @@ +{ + "anyOf": [ + { + "$ref": "http://localhost:1234/address.json#" + } + ] +} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/unittestschema/idandref.json b/src/s3select/rapidjson/bin/unittestschema/idandref.json new file mode 100644 index 000000000..ad485d29f --- /dev/null +++ b/src/s3select/rapidjson/bin/unittestschema/idandref.json @@ -0,0 +1,69 @@ +{ + "id": "http://example.com/root.json", + "definitions": { + "A": { + "id": "#foo", + "type": "integer" + }, + "B": { + "id": "other.json", + "definitions": { + "X": { + "id": "#bar", + "type": "boolean" + }, + "Y": { + "$ref": "#/definitions/X" + }, + "W": { + "$ref": "#/definitions/Y" + }, + "Z": { + "$ref": "#bar" + }, + "N": { + "properties": { + "NX": { + "$ref": "#/definitions/X" + } + } + } + } + } + }, + "properties": { + "PA1": { + "$ref": "http://example.com/root.json#/definitions/A" + }, + "PA2": { + "$ref": "#/definitions/A" + }, + "PA3": { + "$ref": "#foo" + }, + "PX1": { + "$ref": "#/definitions/B/definitions/X" + }, + "PX2Y": { + "$ref": "#/definitions/B/definitions/Y" + }, + "PX3Z": { + "$ref": "#/definitions/B/definitions/Z" + }, + "PX4": { + "$ref": "http://example.com/other.json#/definitions/X" + }, + "PX5": { + "$ref": "other.json#/definitions/X" + }, + "PX6": { + "$ref": "other.json#bar" + }, + "PX7W": { + "$ref": "#/definitions/B/definitions/W" + }, + "PX8N": { + "$ref": "#/definitions/B/definitions/N" + } + } +} \ No newline at end of file diff --git a/src/s3select/rapidjson/bin/unittestschema/oneOf_address.json b/src/s3select/rapidjson/bin/unittestschema/oneOf_address.json new file mode 100644 index 000000000..a5baadd2a --- /dev/null +++ b/src/s3select/rapidjson/bin/unittestschema/oneOf_address.json @@ -0,0 +1,7 @@ +{ + "oneOf": [ + { + "$ref": "http://localhost:1234/address.json#" + } + ] +} \ No newline at end of file diff --git a/src/s3select/rapidjson/contrib/natvis/LICENSE b/src/s3select/rapidjson/contrib/natvis/LICENSE new file mode 100644 index 000000000..f57da96cf --- /dev/null +++ b/src/s3select/rapidjson/contrib/natvis/LICENSE @@ -0,0 +1,45 @@ +The MIT License (MIT) + +Copyright (c) 2017 Bart Muzzin + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +Derived from: + +The MIT License (MIT) + +Copyright (c) 2015 mojmir svoboda + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/src/s3select/rapidjson/contrib/natvis/README.md b/src/s3select/rapidjson/contrib/natvis/README.md new file mode 100644 index 000000000..9685c7f7c --- /dev/null +++ b/src/s3select/rapidjson/contrib/natvis/README.md @@ -0,0 +1,7 @@ +# rapidjson.natvis + +This file can be used as a [Visual Studio Visualizer](https://docs.microsoft.com/en-gb/visualstudio/debugger/create-custom-views-of-native-objects) to aid in visualizing rapidjson structures within the Visual Studio debugger. Natvis visualizers are supported in Visual Studio 2012 and later. To install, copy the file into this directory: + +`%USERPROFILE%\Documents\Visual Studio 2012\Visualizers` + +Each version of Visual Studio has a similar directory, it must be copied into each directory to be used with that particular version. In Visual Studio 2015 and later, this can be done without restarting Visual Studio (a new debugging session must be started). diff --git a/src/s3select/rapidjson/contrib/natvis/rapidjson.natvis b/src/s3select/rapidjson/contrib/natvis/rapidjson.natvis new file mode 100644 index 000000000..e7bd44b6e --- /dev/null +++ b/src/s3select/rapidjson/contrib/natvis/rapidjson.natvis @@ -0,0 +1,38 @@ + + + + + null + true + false + {(const Ch*)data_.ss.str,na} + {(const Ch*)((size_t)data_.s.str & 0x0000FFFFFFFFFFFF),na} + {data_.n.i.i} + {data_.n.u.u} + {data_.n.i64} + {data_.n.u64} + {data_.n.d} + Object members={data_.o.size} + Array members={data_.a.size} + + data_.o.size + data_.o.capacity + + data_.o.size + + (rapidjson::GenericMember<$T1,$T2>*)(((size_t)data_.o.members) & 0x0000FFFFFFFFFFFF) + + + data_.a.size + data_.a.capacity + + data_.a.size + + (rapidjson::GenericValue<$T1,$T2>*)(((size_t)data_.a.elements) & 0x0000FFFFFFFFFFFF) + + + + + + + diff --git a/src/s3select/rapidjson/doc/CMakeLists.txt b/src/s3select/rapidjson/doc/CMakeLists.txt new file mode 100644 index 000000000..c5345ba69 --- /dev/null +++ b/src/s3select/rapidjson/doc/CMakeLists.txt @@ -0,0 +1,27 @@ +find_package(Doxygen) + +IF(NOT DOXYGEN_FOUND) + MESSAGE(STATUS "No Doxygen found. Documentation won't be built") +ELSE() + file(GLOB SOURCES ${CMAKE_CURRENT_LIST_DIR}/../include/*) + file(GLOB MARKDOWN_DOC ${CMAKE_CURRENT_LIST_DIR}/../doc/*.md) + list(APPEND MARKDOWN_DOC ${CMAKE_CURRENT_LIST_DIR}/../readme.md) + + CONFIGURE_FILE(Doxyfile.in Doxyfile @ONLY) + CONFIGURE_FILE(Doxyfile.zh-cn.in Doxyfile.zh-cn @ONLY) + + file(GLOB DOXYFILES ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile*) + + add_custom_command(OUTPUT html + COMMAND ${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile + COMMAND ${DOXYGEN_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/Doxyfile.zh-cn + COMMAND ${CMAKE_COMMAND} -E touch ${CMAKE_CURRENT_BINARY_DIR}/html + DEPENDS ${MARKDOWN_DOC} ${SOURCES} ${DOXYFILES} + WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/../ + ) + + add_custom_target(doc ALL DEPENDS html) + install(DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/html + DESTINATION ${DOC_INSTALL_DIR} + COMPONENT doc) +ENDIF() diff --git a/src/s3select/rapidjson/doc/Doxyfile.in b/src/s3select/rapidjson/doc/Doxyfile.in new file mode 100644 index 000000000..6e79f9371 --- /dev/null +++ b/src/s3select/rapidjson/doc/Doxyfile.in @@ -0,0 +1,2369 @@ +# Doxyfile 1.8.7 + +# This file describes the settings to be used by the documentation system +# doxygen (www.doxygen.org) for a project. +# +# All text after a double hash (##) is considered a comment and is placed in +# front of the TAG it is preceding. +# +# All text after a single hash (#) is considered a comment and will be ignored. +# The format is: +# TAG = value [value, ...] +# For lists, items can also be appended using: +# TAG += value [value, ...] +# Values that contain spaces should be placed between quotes (\" \"). + +#--------------------------------------------------------------------------- +# Project related configuration options +#--------------------------------------------------------------------------- + +# This tag specifies the encoding used for all characters in the config file +# that follow. The default is UTF-8 which is also the encoding used for all text +# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv +# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv +# for the list of possible encodings. +# The default value is: UTF-8. + +DOXYFILE_ENCODING = UTF-8 + +# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by +# double-quotes, unless you are using Doxywizard) that should identify the +# project for which the documentation is generated. This name is used in the +# title of most generated pages and in a few other places. +# The default value is: My Project. + +PROJECT_NAME = RapidJSON + +# The PROJECT_NUMBER tag can be used to enter a project or revision number. This +# could be handy for archiving the generated documentation or if some version +# control system is used. + +PROJECT_NUMBER = + +# Using the PROJECT_BRIEF tag one can provide an optional one line description +# for a project that appears at the top of each page and should give viewer a +# quick idea about the purpose of the project. Keep the description short. + +PROJECT_BRIEF = "A fast JSON parser/generator for C++ with both SAX/DOM style API" + +# With the PROJECT_LOGO tag one can specify an logo or icon that is included in +# the documentation. The maximum height of the logo should not exceed 55 pixels +# and the maximum width should not exceed 200 pixels. Doxygen will copy the logo +# to the output directory. + +PROJECT_LOGO = + +# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path +# into which the generated documentation will be written. If a relative path is +# entered, it will be relative to the location where doxygen was started. If +# left blank the current directory will be used. + +OUTPUT_DIRECTORY = @CMAKE_CURRENT_BINARY_DIR@ + +# If the CREATE_SUBDIRS tag is set to YES, then doxygen will create 4096 sub- +# directories (in 2 levels) under the output directory of each output format and +# will distribute the generated files over these directories. Enabling this +# option can be useful when feeding doxygen a huge amount of source files, where +# putting all generated files in the same directory would otherwise causes +# performance problems for the file system. +# The default value is: NO. + +CREATE_SUBDIRS = NO + +# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII +# characters to appear in the names of generated files. If set to NO, non-ASCII +# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode +# U+3044. +# The default value is: NO. + +ALLOW_UNICODE_NAMES = NO + +# The OUTPUT_LANGUAGE tag is used to specify the language in which all +# documentation generated by doxygen is written. Doxygen will use this +# information to generate all constant output in the proper language. +# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, +# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), +# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, +# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), +# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, +# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, +# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, +# Ukrainian and Vietnamese. +# The default value is: English. + +OUTPUT_LANGUAGE = English + +# If the BRIEF_MEMBER_DESC tag is set to YES doxygen will include brief member +# descriptions after the members that are listed in the file and class +# documentation (similar to Javadoc). Set to NO to disable this. +# The default value is: YES. + +BRIEF_MEMBER_DESC = YES + +# If the REPEAT_BRIEF tag is set to YES doxygen will prepend the brief +# description of a member or function before the detailed description +# +# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the +# brief descriptions will be completely suppressed. +# The default value is: YES. + +REPEAT_BRIEF = YES + +# This tag implements a quasi-intelligent brief description abbreviator that is +# used to form the text in various listings. Each string in this list, if found +# as the leading text of the brief description, will be stripped from the text +# and the result, after processing the whole list, is used as the annotated +# text. Otherwise, the brief description is used as-is. If left blank, the +# following values are used ($name is automatically replaced with the name of +# the entity):The $name class, The $name widget, The $name file, is, provides, +# specifies, contains, represents, a, an and the. + +ABBREVIATE_BRIEF = "The $name class" \ + "The $name widget" \ + "The $name file" \ + is \ + provides \ + specifies \ + contains \ + represents \ + a \ + an \ + the + +# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then +# doxygen will generate a detailed section even if there is only a brief +# description. +# The default value is: NO. + +ALWAYS_DETAILED_SEC = NO + +# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all +# inherited members of a class in the documentation of that class as if those +# members were ordinary class members. Constructors, destructors and assignment +# operators of the base classes will not be shown. +# The default value is: NO. + +INLINE_INHERITED_MEMB = NO + +# If the FULL_PATH_NAMES tag is set to YES doxygen will prepend the full path +# before files name in the file list and in the header files. If set to NO the +# shortest path that makes the file name unique will be used +# The default value is: YES. + +FULL_PATH_NAMES = YES + +# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. +# Stripping is only done if one of the specified strings matches the left-hand +# part of the path. The tag can be used to show relative paths in the file list. +# If left blank the directory from which doxygen is run is used as the path to +# strip. +# +# Note that you can specify absolute paths here, but also relative paths, which +# will be relative from the directory where doxygen is started. +# This tag requires that the tag FULL_PATH_NAMES is set to YES. + +STRIP_FROM_PATH = + +# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the +# path mentioned in the documentation of a class, which tells the reader which +# header file to include in order to use a class. If left blank only the name of +# the header file containing the class definition is used. Otherwise one should +# specify the list of include paths that are normally passed to the compiler +# using the -I flag. + +STRIP_FROM_INC_PATH = + +# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but +# less readable) file names. This can be useful is your file systems doesn't +# support long names like on DOS, Mac, or CD-ROM. +# The default value is: NO. + +SHORT_NAMES = NO + +# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the +# first line (until the first dot) of a Javadoc-style comment as the brief +# description. If set to NO, the Javadoc-style will behave just like regular Qt- +# style comments (thus requiring an explicit @brief command for a brief +# description.) +# The default value is: NO. + +JAVADOC_AUTOBRIEF = NO + +# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first +# line (until the first dot) of a Qt-style comment as the brief description. If +# set to NO, the Qt-style will behave just like regular Qt-style comments (thus +# requiring an explicit \brief command for a brief description.) +# The default value is: NO. + +QT_AUTOBRIEF = NO + +# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a +# multi-line C++ special comment block (i.e. a block of //! or /// comments) as +# a brief description. This used to be the default behavior. The new default is +# to treat a multi-line C++ comment block as a detailed description. Set this +# tag to YES if you prefer the old behavior instead. +# +# Note that setting this tag to YES also means that rational rose comments are +# not recognized any more. +# The default value is: NO. + +MULTILINE_CPP_IS_BRIEF = NO + +# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the +# documentation from any documented member that it re-implements. +# The default value is: YES. + +INHERIT_DOCS = YES + +# If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce a +# new page for each member. If set to NO, the documentation of a member will be +# part of the file/class/namespace that contains it. +# The default value is: NO. + +SEPARATE_MEMBER_PAGES = NO + +# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen +# uses this value to replace tabs by spaces in code fragments. +# Minimum value: 1, maximum value: 16, default value: 4. + +TAB_SIZE = 4 + +# This tag can be used to specify a number of aliases that act as commands in +# the documentation. An alias has the form: +# name=value +# For example adding +# "sideeffect=@par Side Effects:\n" +# will allow you to put the command \sideeffect (or @sideeffect) in the +# documentation, which will result in a user-defined paragraph with heading +# "Side Effects:". You can put \n's in the value part of an alias to insert +# newlines. + +ALIASES = + +# This tag can be used to specify a number of word-keyword mappings (TCL only). +# A mapping has the form "name=value". For example adding "class=itcl::class" +# will allow you to use the command class in the itcl::class meaning. + +TCL_SUBST = + +# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources +# only. Doxygen will then generate output that is more tailored for C. For +# instance, some of the names that are used will be different. The list of all +# members will be omitted, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_FOR_C = NO + +# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or +# Python sources only. Doxygen will then generate output that is more tailored +# for that language. For instance, namespaces will be presented as packages, +# qualified scopes will look different, etc. +# The default value is: NO. + +OPTIMIZE_OUTPUT_JAVA = NO + +# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran +# sources. Doxygen will then generate output that is tailored for Fortran. +# The default value is: NO. + +OPTIMIZE_FOR_FORTRAN = NO + +# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL +# sources. Doxygen will then generate output that is tailored for VHDL. +# The default value is: NO. + +OPTIMIZE_OUTPUT_VHDL = NO + +# Doxygen selects the parser to use depending on the extension of the files it +# parses. With this tag you can assign which parser to use for a given +# extension. Doxygen has a built-in mapping, but you can override or extend it +# using this tag. The format is ext=language, where ext is a file extension, and +# language is one of the parsers supported by doxygen: IDL, Java, Javascript, +# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran: +# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran: +# Fortran. In the later case the parser tries to guess whether the code is fixed +# or free formatted code, this is the default for Fortran type files), VHDL. For +# instance to make doxygen treat .inc files as Fortran files (default is PHP), +# and .f files as C (default is Fortran), use: inc=Fortran f=C. +# +# Note For files without extension you can use no_extension as a placeholder. +# +# Note that for custom extensions you also need to set FILE_PATTERNS otherwise +# the files are not read by doxygen. + +EXTENSION_MAPPING = + +# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments +# according to the Markdown format, which allows for more readable +# documentation. See http://daringfireball.net/projects/markdown/ for details. +# The output of markdown processing is further processed by doxygen, so you can +# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in +# case of backward compatibilities issues. +# The default value is: YES. + +MARKDOWN_SUPPORT = YES + +# When enabled doxygen tries to link words that correspond to documented +# classes, or namespaces to their corresponding documentation. Such a link can +# be prevented in individual cases by by putting a % sign in front of the word +# or globally by setting AUTOLINK_SUPPORT to NO. +# The default value is: YES. + +AUTOLINK_SUPPORT = YES + +# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want +# to include (a tag file for) the STL sources as input, then you should set this +# tag to YES in order to let doxygen match functions declarations and +# definitions whose arguments contain STL classes (e.g. func(std::string); +# versus func(std::string) {}). This also make the inheritance and collaboration +# diagrams that involve STL classes more complete and accurate. +# The default value is: NO. + +BUILTIN_STL_SUPPORT = NO + +# If you use Microsoft's C++/CLI language, you should set this option to YES to +# enable parsing support. +# The default value is: NO. + +CPP_CLI_SUPPORT = NO + +# Set the SIP_SUPPORT tag to YES if your project consists of sip (see: +# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen +# will parse them like normal C++ but will assume all classes use public instead +# of private inheritance when no explicit protection keyword is present. +# The default value is: NO. + +SIP_SUPPORT = NO + +# For Microsoft's IDL there are propget and propput attributes to indicate +# getter and setter methods for a property. Setting this option to YES will make +# doxygen to replace the get and set methods by a property in the documentation. +# This will only work if the methods are indeed getting or setting a simple +# type. If this is not the case, or you want to show the methods anyway, you +# should set this option to NO. +# The default value is: YES. + +IDL_PROPERTY_SUPPORT = YES + +# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC +# tag is set to YES, then doxygen will reuse the documentation of the first +# member in the group (if any) for the other members of the group. By default +# all members of a group must be documented explicitly. +# The default value is: NO. + +DISTRIBUTE_GROUP_DOC = NO + +# Set the SUBGROUPING tag to YES to allow class member groups of the same type +# (for instance a group of public functions) to be put as a subgroup of that +# type (e.g. under the Public Functions section). Set it to NO to prevent +# subgrouping. Alternatively, this can be done per class using the +# \nosubgrouping command. +# The default value is: YES. + +SUBGROUPING = YES + +# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions +# are shown inside the group in which they are included (e.g. using \ingroup) +# instead of on a separate page (for HTML and Man pages) or section (for LaTeX +# and RTF). +# +# Note that this feature does not work in combination with +# SEPARATE_MEMBER_PAGES. +# The default value is: NO. + +INLINE_GROUPED_CLASSES = YES + +# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions +# with only public data fields or simple typedef fields will be shown inline in +# the documentation of the scope in which they are defined (i.e. file, +# namespace, or group documentation), provided this scope is documented. If set +# to NO, structs, classes, and unions are shown on a separate page (for HTML and +# Man pages) or section (for LaTeX and RTF). +# The default value is: NO. + +INLINE_SIMPLE_STRUCTS = NO + +# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or +# enum is documented as struct, union, or enum with the name of the typedef. So +# typedef struct TypeS {} TypeT, will appear in the documentation as a struct +# with name TypeT. When disabled the typedef will appear as a member of a file, +# namespace, or class. And the struct will be named TypeS. This can typically be +# useful for C code in case the coding convention dictates that all compound +# types are typedef'ed and only the typedef is referenced, never the tag name. +# The default value is: NO. + +TYPEDEF_HIDES_STRUCT = NO + +# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This +# cache is used to resolve symbols given their name and scope. Since this can be +# an expensive process and often the same symbol appears multiple times in the +# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small +# doxygen will become slower. If the cache is too large, memory is wasted. The +# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range +# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 +# symbols. At the end of a run doxygen will report the cache usage and suggest +# the optimal cache size from a speed point of view. +# Minimum value: 0, maximum value: 9, default value: 0. + +LOOKUP_CACHE_SIZE = 0 + +#--------------------------------------------------------------------------- +# Build related configuration options +#--------------------------------------------------------------------------- + +# If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in +# documentation are documented, even if no documentation was available. Private +# class members and static file members will be hidden unless the +# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. +# Note: This will also disable the warnings about undocumented members that are +# normally produced when WARNINGS is set to YES. +# The default value is: NO. + +EXTRACT_ALL = NO + +# If the EXTRACT_PRIVATE tag is set to YES all private members of a class will +# be included in the documentation. +# The default value is: NO. + +EXTRACT_PRIVATE = NO + +# If the EXTRACT_PACKAGE tag is set to YES all members with package or internal +# scope will be included in the documentation. +# The default value is: NO. + +EXTRACT_PACKAGE = NO + +# If the EXTRACT_STATIC tag is set to YES all static members of a file will be +# included in the documentation. +# The default value is: NO. + +EXTRACT_STATIC = NO + +# If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) defined +# locally in source files will be included in the documentation. If set to NO +# only classes defined in header files are included. Does not have any effect +# for Java sources. +# The default value is: YES. + +EXTRACT_LOCAL_CLASSES = YES + +# This flag is only useful for Objective-C code. When set to YES local methods, +# which are defined in the implementation section but not in the interface are +# included in the documentation. If set to NO only methods in the interface are +# included. +# The default value is: NO. + +EXTRACT_LOCAL_METHODS = NO + +# If this flag is set to YES, the members of anonymous namespaces will be +# extracted and appear in the documentation as a namespace called +# 'anonymous_namespace{file}', where file will be replaced with the base name of +# the file that contains the anonymous namespace. By default anonymous namespace +# are hidden. +# The default value is: NO. + +EXTRACT_ANON_NSPACES = NO + +# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all +# undocumented members inside documented classes or files. If set to NO these +# members will be included in the various overviews, but no documentation +# section is generated. This option has no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_MEMBERS = NO + +# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all +# undocumented classes that are normally visible in the class hierarchy. If set +# to NO these classes will be included in the various overviews. This option has +# no effect if EXTRACT_ALL is enabled. +# The default value is: NO. + +HIDE_UNDOC_CLASSES = NO + +# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend +# (class|struct|union) declarations. If set to NO these declarations will be +# included in the documentation. +# The default value is: NO. + +HIDE_FRIEND_COMPOUNDS = NO + +# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any +# documentation blocks found inside the body of a function. If set to NO these +# blocks will be appended to the function's detailed documentation block. +# The default value is: NO. + +HIDE_IN_BODY_DOCS = NO + +# The INTERNAL_DOCS tag determines if documentation that is typed after a +# \internal command is included. If the tag is set to NO then the documentation +# will be excluded. Set it to YES to include the internal documentation. +# The default value is: NO. + +INTERNAL_DOCS = NO + +# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file +# names in lower-case letters. If set to YES upper-case letters are also +# allowed. This is useful if you have classes or files whose names only differ +# in case and if your file system supports case sensitive file names. Windows +# and Mac users are advised to set this option to NO. +# The default value is: system dependent. + +CASE_SENSE_NAMES = NO + +# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with +# their full class and namespace scopes in the documentation. If set to YES the +# scope will be hidden. +# The default value is: NO. + +HIDE_SCOPE_NAMES = NO + +# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of +# the files that are included by a file in the documentation of that file. +# The default value is: YES. + +SHOW_INCLUDE_FILES = YES + +# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each +# grouped member an include statement to the documentation, telling the reader +# which file to include in order to use the member. +# The default value is: NO. + +SHOW_GROUPED_MEMB_INC = NO + +# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include +# files with double quotes in the documentation rather than with sharp brackets. +# The default value is: NO. + +FORCE_LOCAL_INCLUDES = NO + +# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the +# documentation for inline members. +# The default value is: YES. + +INLINE_INFO = YES + +# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the +# (detailed) documentation of file and class members alphabetically by member +# name. If set to NO the members will appear in declaration order. +# The default value is: YES. + +SORT_MEMBER_DOCS = YES + +# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief +# descriptions of file, namespace and class members alphabetically by member +# name. If set to NO the members will appear in declaration order. Note that +# this will also influence the order of the classes in the class list. +# The default value is: NO. + +SORT_BRIEF_DOCS = NO + +# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the +# (brief and detailed) documentation of class members so that constructors and +# destructors are listed first. If set to NO the constructors will appear in the +# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. +# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief +# member documentation. +# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting +# detailed member documentation. +# The default value is: NO. + +SORT_MEMBERS_CTORS_1ST = NO + +# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy +# of group names into alphabetical order. If set to NO the group names will +# appear in their defined order. +# The default value is: NO. + +SORT_GROUP_NAMES = NO + +# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by +# fully-qualified names, including namespaces. If set to NO, the class list will +# be sorted only by class name, not including the namespace part. +# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. +# Note: This option applies only to the class list, not to the alphabetical +# list. +# The default value is: NO. + +SORT_BY_SCOPE_NAME = NO + +# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper +# type resolution of all parameters of a function it will reject a match between +# the prototype and the implementation of a member function even if there is +# only one candidate or it is obvious which candidate to choose by doing a +# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still +# accept a match between prototype and implementation in such cases. +# The default value is: NO. + +STRICT_PROTO_MATCHING = NO + +# The GENERATE_TODOLIST tag can be used to enable ( YES) or disable ( NO) the +# todo list. This list is created by putting \todo commands in the +# documentation. +# The default value is: YES. + +GENERATE_TODOLIST = YES + +# The GENERATE_TESTLIST tag can be used to enable ( YES) or disable ( NO) the +# test list. This list is created by putting \test commands in the +# documentation. +# The default value is: YES. + +GENERATE_TESTLIST = YES + +# The GENERATE_BUGLIST tag can be used to enable ( YES) or disable ( NO) the bug +# list. This list is created by putting \bug commands in the documentation. +# The default value is: YES. + +GENERATE_BUGLIST = YES + +# The GENERATE_DEPRECATEDLIST tag can be used to enable ( YES) or disable ( NO) +# the deprecated list. This list is created by putting \deprecated commands in +# the documentation. +# The default value is: YES. + +GENERATE_DEPRECATEDLIST= YES + +# The ENABLED_SECTIONS tag can be used to enable conditional documentation +# sections, marked by \if ... \endif and \cond +# ... \endcond blocks. + +ENABLED_SECTIONS = $(RAPIDJSON_SECTIONS) + +# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the +# initial value of a variable or macro / define can have for it to appear in the +# documentation. If the initializer consists of more lines than specified here +# it will be hidden. Use a value of 0 to hide initializers completely. The +# appearance of the value of individual variables and macros / defines can be +# controlled using \showinitializer or \hideinitializer command in the +# documentation regardless of this setting. +# Minimum value: 0, maximum value: 10000, default value: 30. + +MAX_INITIALIZER_LINES = 30 + +# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at +# the bottom of the documentation of classes and structs. If set to YES the list +# will mention the files that were used to generate the documentation. +# The default value is: YES. + +SHOW_USED_FILES = YES + +# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This +# will remove the Files entry from the Quick Index and from the Folder Tree View +# (if specified). +# The default value is: YES. + +SHOW_FILES = YES + +# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces +# page. This will remove the Namespaces entry from the Quick Index and from the +# Folder Tree View (if specified). +# The default value is: YES. + +SHOW_NAMESPACES = NO + +# The FILE_VERSION_FILTER tag can be used to specify a program or script that +# doxygen should invoke to get the current version for each file (typically from +# the version control system). Doxygen will invoke the program by executing (via +# popen()) the command command input-file, where command is the value of the +# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided +# by doxygen. Whatever the program writes to standard output is used as the file +# version. For an example see the documentation. + +FILE_VERSION_FILTER = + +# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed +# by doxygen. The layout file controls the global structure of the generated +# output files in an output format independent way. To create the layout file +# that represents doxygen's defaults, run doxygen with the -l option. You can +# optionally specify a file name after the option, if omitted DoxygenLayout.xml +# will be used as the name of the layout file. +# +# Note that if you run doxygen from a directory containing a file called +# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE +# tag is left empty. + +LAYOUT_FILE = + +# The CITE_BIB_FILES tag can be used to specify one or more bib files containing +# the reference definitions. This must be a list of .bib files. The .bib +# extension is automatically appended if omitted. This requires the bibtex tool +# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info. +# For LaTeX the style of the bibliography can be controlled using +# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the +# search path. Do not use file names with spaces, bibtex cannot handle them. See +# also \cite for info how to create references. + +CITE_BIB_FILES = + +#--------------------------------------------------------------------------- +# Configuration options related to warning and progress messages +#--------------------------------------------------------------------------- + +# The QUIET tag can be used to turn on/off the messages that are generated to +# standard output by doxygen. If QUIET is set to YES this implies that the +# messages are off. +# The default value is: NO. + +QUIET = NO + +# The WARNINGS tag can be used to turn on/off the warning messages that are +# generated to standard error ( stderr) by doxygen. If WARNINGS is set to YES +# this implies that the warnings are on. +# +# Tip: Turn warnings on while writing the documentation. +# The default value is: YES. + +WARNINGS = YES + +# If the WARN_IF_UNDOCUMENTED tag is set to YES, then doxygen will generate +# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag +# will automatically be disabled. +# The default value is: YES. + +WARN_IF_UNDOCUMENTED = YES + +# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for +# potential errors in the documentation, such as not documenting some parameters +# in a documented function, or documenting parameters that don't exist or using +# markup commands wrongly. +# The default value is: YES. + +WARN_IF_DOC_ERROR = YES + +# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that +# are documented, but have no documentation for their parameters or return +# value. If set to NO doxygen will only warn about wrong or incomplete parameter +# documentation, but not about the absence of documentation. +# The default value is: NO. + +WARN_NO_PARAMDOC = NO + +# The WARN_FORMAT tag determines the format of the warning messages that doxygen +# can produce. The string should contain the $file, $line, and $text tags, which +# will be replaced by the file and line number from which the warning originated +# and the warning text. Optionally the format may contain $version, which will +# be replaced by the version of the file (if it could be obtained via +# FILE_VERSION_FILTER) +# The default value is: $file:$line: $text. + +WARN_FORMAT = "$file:$line: $text" + +# The WARN_LOGFILE tag can be used to specify a file to which warning and error +# messages should be written. If left blank the output is written to standard +# error (stderr). + +WARN_LOGFILE = + +#--------------------------------------------------------------------------- +# Configuration options related to the input files +#--------------------------------------------------------------------------- + +# The INPUT tag is used to specify the files and/or directories that contain +# documented source files. You may enter file names like myfile.cpp or +# directories like /usr/src/myproject. Separate the files or directories with +# spaces. +# Note: If this tag is empty the current directory is searched. + +INPUT = readme.md \ + CHANGELOG.md \ + include/rapidjson/rapidjson.h \ + include/ \ + doc/features.md \ + doc/tutorial.md \ + doc/pointer.md \ + doc/stream.md \ + doc/encoding.md \ + doc/dom.md \ + doc/sax.md \ + doc/schema.md \ + doc/performance.md \ + doc/internals.md \ + doc/faq.md + +# This tag can be used to specify the character encoding of the source files +# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses +# libiconv (or the iconv built into libc) for the transcoding. See the libiconv +# documentation (see: http://www.gnu.org/software/libiconv) for the list of +# possible encodings. +# The default value is: UTF-8. + +INPUT_ENCODING = UTF-8 + +# If the value of the INPUT tag contains directories, you can use the +# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and +# *.h) to filter out the source-files in the directories. If left blank the +# following patterns are tested:*.c, *.cc, *.cxx, *.cpp, *.c++, *.java, *.ii, +# *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp, +# *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown, +# *.md, *.mm, *.dox, *.py, *.f90, *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, +# *.qsf, *.as and *.js. + +FILE_PATTERNS = *.c \ + *.cc \ + *.cxx \ + *.cpp \ + *.h \ + *.hh \ + *.hxx \ + *.hpp \ + *.inc \ + *.md + +# The RECURSIVE tag can be used to specify whether or not subdirectories should +# be searched for input files as well. +# The default value is: NO. + +RECURSIVE = YES + +# The EXCLUDE tag can be used to specify files and/or directories that should be +# excluded from the INPUT source files. This way you can easily exclude a +# subdirectory from a directory tree whose root is specified with the INPUT tag. +# +# Note that relative paths are relative to the directory from which doxygen is +# run. + +EXCLUDE = ./include/rapidjson/msinttypes/ + +# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or +# directories that are symbolic links (a Unix file system feature) are excluded +# from the input. +# The default value is: NO. + +EXCLUDE_SYMLINKS = NO + +# If the value of the INPUT tag contains directories, you can use the +# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude +# certain files from those directories. +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories for example use the pattern */test/* + +EXCLUDE_PATTERNS = + +# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names +# (namespaces, classes, functions, etc.) that should be excluded from the +# output. The symbol name can be a fully qualified name, a word, or if the +# wildcard * is used, a substring. Examples: ANamespace, AClass, +# AClass::ANamespace, ANamespace::*Test +# +# Note that the wildcards are matched against the file with absolute path, so to +# exclude all test directories use the pattern */test/* + +EXCLUDE_SYMBOLS = internal + +# The EXAMPLE_PATH tag can be used to specify one or more files or directories +# that contain example code fragments that are included (see the \include +# command). + +EXAMPLE_PATH = + +# If the value of the EXAMPLE_PATH tag contains directories, you can use the +# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and +# *.h) to filter out the source-files in the directories. If left blank all +# files are included. + +EXAMPLE_PATTERNS = * + +# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be +# searched for input files to be used with the \include or \dontinclude commands +# irrespective of the value of the RECURSIVE tag. +# The default value is: NO. + +EXAMPLE_RECURSIVE = NO + +# The IMAGE_PATH tag can be used to specify one or more files or directories +# that contain images that are to be included in the documentation (see the +# \image command). + +IMAGE_PATH = ./doc + +# The INPUT_FILTER tag can be used to specify a program that doxygen should +# invoke to filter for each input file. Doxygen will invoke the filter program +# by executing (via popen()) the command: +# +# +# +# where is the value of the INPUT_FILTER tag, and is the +# name of an input file. Doxygen will then use the output that the filter +# program writes to standard output. If FILTER_PATTERNS is specified, this tag +# will be ignored. +# +# Note that the filter must not add or remove lines; it is applied before the +# code is scanned, but not when the output code is generated. If lines are added +# or removed, the anchors will not be placed correctly. + +INPUT_FILTER = + +# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern +# basis. Doxygen will compare the file name with each pattern and apply the +# filter if there is a match. The filters are a list of the form: pattern=filter +# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how +# filters are used. If the FILTER_PATTERNS tag is empty or if none of the +# patterns match the file name, INPUT_FILTER is applied. + +FILTER_PATTERNS = + +# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using +# INPUT_FILTER ) will also be used to filter the input files that are used for +# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). +# The default value is: NO. + +FILTER_SOURCE_FILES = NO + +# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file +# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and +# it is also possible to disable source filtering for a specific pattern using +# *.ext= (so without naming a filter). +# This tag requires that the tag FILTER_SOURCE_FILES is set to YES. + +FILTER_SOURCE_PATTERNS = + +# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that +# is part of the input, its contents will be placed on the main page +# (index.html). This can be useful if you have a project on for instance GitHub +# and want to reuse the introduction page also for the doxygen output. + +USE_MDFILE_AS_MAINPAGE = readme.md + +#--------------------------------------------------------------------------- +# Configuration options related to source browsing +#--------------------------------------------------------------------------- + +# If the SOURCE_BROWSER tag is set to YES then a list of source files will be +# generated. Documented entities will be cross-referenced with these sources. +# +# Note: To get rid of all source code in the generated output, make sure that +# also VERBATIM_HEADERS is set to NO. +# The default value is: NO. + +SOURCE_BROWSER = NO + +# Setting the INLINE_SOURCES tag to YES will include the body of functions, +# classes and enums directly into the documentation. +# The default value is: NO. + +INLINE_SOURCES = NO + +# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any +# special comment blocks from generated source code fragments. Normal C, C++ and +# Fortran comments will always remain visible. +# The default value is: YES. + +STRIP_CODE_COMMENTS = NO + +# If the REFERENCED_BY_RELATION tag is set to YES then for each documented +# function all documented functions referencing it will be listed. +# The default value is: NO. + +REFERENCED_BY_RELATION = NO + +# If the REFERENCES_RELATION tag is set to YES then for each documented function +# all documented entities called/used by that function will be listed. +# The default value is: NO. + +REFERENCES_RELATION = NO + +# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set +# to YES, then the hyperlinks from functions in REFERENCES_RELATION and +# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will +# link to the documentation. +# The default value is: YES. + +REFERENCES_LINK_SOURCE = YES + +# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the +# source code will show a tooltip with additional information such as prototype, +# brief description and links to the definition and documentation. Since this +# will make the HTML file larger and loading of large files a bit slower, you +# can opt to disable this feature. +# The default value is: YES. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +SOURCE_TOOLTIPS = YES + +# If the USE_HTAGS tag is set to YES then the references to source code will +# point to the HTML generated by the htags(1) tool instead of doxygen built-in +# source browser. The htags tool is part of GNU's global source tagging system +# (see http://www.gnu.org/software/global/global.html). You will need version +# 4.8.6 or higher. +# +# To use it do the following: +# - Install the latest version of global +# - Enable SOURCE_BROWSER and USE_HTAGS in the config file +# - Make sure the INPUT points to the root of the source tree +# - Run doxygen as normal +# +# Doxygen will invoke htags (and that will in turn invoke gtags), so these +# tools must be available from the command line (i.e. in the search path). +# +# The result: instead of the source browser generated by doxygen, the links to +# source code will now point to the output of htags. +# The default value is: NO. +# This tag requires that the tag SOURCE_BROWSER is set to YES. + +USE_HTAGS = NO + +# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a +# verbatim copy of the header file for each class for which an include is +# specified. Set to NO to disable this. +# See also: Section \class. +# The default value is: YES. + +VERBATIM_HEADERS = YES + +# If the CLANG_ASSISTED_PARSING tag is set to YES, then doxygen will use the +# clang parser (see: http://clang.llvm.org/) for more accurate parsing at the +# cost of reduced performance. This can be particularly helpful with template +# rich C++ code for which doxygen's built-in parser lacks the necessary type +# information. +# Note: The availability of this option depends on whether or not doxygen was +# compiled with the --with-libclang option. +# The default value is: NO. + +CLANG_ASSISTED_PARSING = NO + +# If clang assisted parsing is enabled you can provide the compiler with command +# line options that you would normally use when invoking the compiler. Note that +# the include paths will already be set by doxygen for the files and directories +# specified with INPUT and INCLUDE_PATH. +# This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. + +CLANG_OPTIONS = + +#--------------------------------------------------------------------------- +# Configuration options related to the alphabetical class index +#--------------------------------------------------------------------------- + +# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all +# compounds will be generated. Enable this if the project contains a lot of +# classes, structs, unions or interfaces. +# The default value is: YES. + +ALPHABETICAL_INDEX = NO + +# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in +# which the alphabetical index list will be split. +# Minimum value: 1, maximum value: 20, default value: 5. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +COLS_IN_ALPHA_INDEX = 5 + +# In case all classes in a project start with a common prefix, all classes will +# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag +# can be used to specify a prefix (or a list of prefixes) that should be ignored +# while generating the index headers. +# This tag requires that the tag ALPHABETICAL_INDEX is set to YES. + +IGNORE_PREFIX = + +#--------------------------------------------------------------------------- +# Configuration options related to the HTML output +#--------------------------------------------------------------------------- + +# If the GENERATE_HTML tag is set to YES doxygen will generate HTML output +# The default value is: YES. + +GENERATE_HTML = YES + +# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a +# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of +# it. +# The default directory is: html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_OUTPUT = html + +# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each +# generated HTML page (for example: .htm, .php, .asp). +# The default value is: .html. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FILE_EXTENSION = .html + +# The HTML_HEADER tag can be used to specify a user-defined HTML header file for +# each generated HTML page. If the tag is left blank doxygen will generate a +# standard header. +# +# To get valid HTML the header file that includes any scripts and style sheets +# that doxygen needs, which is dependent on the configuration options used (e.g. +# the setting GENERATE_TREEVIEW). It is highly recommended to start with a +# default header using +# doxygen -w html new_header.html new_footer.html new_stylesheet.css +# YourConfigFile +# and then modify the file new_header.html. See also section "Doxygen usage" +# for information on how to generate the default header that doxygen normally +# uses. +# Note: The header is subject to change so you typically have to regenerate the +# default header when upgrading to a newer version of doxygen. For a description +# of the possible markers and block names see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_HEADER = ./doc/misc/header.html + +# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each +# generated HTML page. If the tag is left blank doxygen will generate a standard +# footer. See HTML_HEADER for more information on how to generate a default +# footer and what special commands can be used inside the footer. See also +# section "Doxygen usage" for information on how to generate the default footer +# that doxygen normally uses. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_FOOTER = ./doc/misc/footer.html + +# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style +# sheet that is used by each HTML page. It can be used to fine-tune the look of +# the HTML output. If left blank doxygen will generate a default style sheet. +# See also section "Doxygen usage" for information on how to generate the style +# sheet that doxygen normally uses. +# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as +# it is more robust and this tag (HTML_STYLESHEET) will in the future become +# obsolete. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_STYLESHEET = + +# The HTML_EXTRA_STYLESHEET tag can be used to specify an additional user- +# defined cascading style sheet that is included after the standard style sheets +# created by doxygen. Using this option one can overrule certain style aspects. +# This is preferred over using HTML_STYLESHEET since it does not replace the +# standard style sheet and is therefore more robust against future updates. +# Doxygen will copy the style sheet file to the output directory. For an example +# see the documentation. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_STYLESHEET = ./doc/misc/doxygenextra.css + +# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or +# other source files which should be copied to the HTML output directory. Note +# that these files will be copied to the base HTML output directory. Use the +# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these +# files. In the HTML_STYLESHEET file, use the file name only. Also note that the +# files will be copied as-is; there are no commands or markers available. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_EXTRA_FILES = + +# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen +# will adjust the colors in the stylesheet and background images according to +# this color. Hue is specified as an angle on a colorwheel, see +# http://en.wikipedia.org/wiki/Hue for more information. For instance the value +# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 +# purple, and 360 is red again. +# Minimum value: 0, maximum value: 359, default value: 220. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_HUE = 220 + +# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors +# in the HTML output. For a value of 0 the output will use grayscales only. A +# value of 255 will produce the most vivid colors. +# Minimum value: 0, maximum value: 255, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_SAT = 100 + +# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the +# luminance component of the colors in the HTML output. Values below 100 +# gradually make the output lighter, whereas values above 100 make the output +# darker. The value divided by 100 is the actual gamma applied, so 80 represents +# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not +# change the gamma. +# Minimum value: 40, maximum value: 240, default value: 80. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_COLORSTYLE_GAMMA = 80 + +# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML +# page will contain the date and time when the page was generated. Setting this +# to NO can help when comparing the output of multiple runs. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_TIMESTAMP = YES + +# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML +# documentation will contain sections that can be hidden and shown after the +# page has loaded. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_DYNAMIC_SECTIONS = NO + +# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries +# shown in the various tree structured indices initially; the user can expand +# and collapse entries dynamically later on. Doxygen will expand the tree to +# such a level that at most the specified number of entries are visible (unless +# a fully collapsed tree already exceeds this amount). So setting the number of +# entries 1 will produce a full collapsed tree by default. 0 is a special value +# representing an infinite number of entries and will result in a full expanded +# tree by default. +# Minimum value: 0, maximum value: 9999, default value: 100. +# This tag requires that the tag GENERATE_HTML is set to YES. + +HTML_INDEX_NUM_ENTRIES = 100 + +# If the GENERATE_DOCSET tag is set to YES, additional index files will be +# generated that can be used as input for Apple's Xcode 3 integrated development +# environment (see: http://developer.apple.com/tools/xcode/), introduced with +# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a +# Makefile in the HTML output directory. Running make will produce the docset in +# that directory and running make install will install the docset in +# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at +# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html +# for more information. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_DOCSET = NO + +# This tag determines the name of the docset feed. A documentation feed provides +# an umbrella under which multiple documentation sets from a single provider +# (such as a company or product suite) can be grouped. +# The default value is: Doxygen generated docs. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_FEEDNAME = "Doxygen generated docs" + +# This tag specifies a string that should uniquely identify the documentation +# set bundle. This should be a reverse domain-name style string, e.g. +# com.mycompany.MyDocSet. Doxygen will append .docset to the name. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_BUNDLE_ID = org.doxygen.Project + +# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify +# the documentation publisher. This should be a reverse domain-name style +# string, e.g. com.mycompany.MyDocSet.documentation. +# The default value is: org.doxygen.Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_ID = org.doxygen.Publisher + +# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. +# The default value is: Publisher. +# This tag requires that the tag GENERATE_DOCSET is set to YES. + +DOCSET_PUBLISHER_NAME = Publisher + +# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three +# additional HTML index files: index.hhp, index.hhc, and index.hhk. The +# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop +# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on +# Windows. +# +# The HTML Help Workshop contains a compiler that can convert all HTML output +# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML +# files are now used as the Windows 98 help format, and will replace the old +# Windows help format (.hlp) on all Windows platforms in the future. Compressed +# HTML files also contain an index, a table of contents, and you can search for +# words in the documentation. The HTML workshop also contains a viewer for +# compressed HTML files. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_HTMLHELP = NO + +# The CHM_FILE tag can be used to specify the file name of the resulting .chm +# file. You can add a path in front of the file if the result should not be +# written to the html output directory. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_FILE = + +# The HHC_LOCATION tag can be used to specify the location (absolute path +# including file name) of the HTML help compiler ( hhc.exe). If non-empty +# doxygen will try to run the HTML help compiler on the generated index.hhp. +# The file has to be specified with full path. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +HHC_LOCATION = + +# The GENERATE_CHI flag controls if a separate .chi index file is generated ( +# YES) or that it should be included in the master .chm file ( NO). +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +GENERATE_CHI = NO + +# The CHM_INDEX_ENCODING is used to encode HtmlHelp index ( hhk), content ( hhc) +# and project file content. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +CHM_INDEX_ENCODING = + +# The BINARY_TOC flag controls whether a binary table of contents is generated ( +# YES) or a normal table of contents ( NO) in the .chm file. Furthermore it +# enables the Previous and Next buttons. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +BINARY_TOC = NO + +# The TOC_EXPAND flag can be set to YES to add extra items for group members to +# the table of contents of the HTML help documentation and to the tree view. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTMLHELP is set to YES. + +TOC_EXPAND = NO + +# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and +# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that +# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help +# (.qch) of the generated HTML documentation. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_QHP = NO + +# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify +# the file name of the resulting .qch file. The path specified is relative to +# the HTML output folder. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QCH_FILE = + +# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help +# Project output. For more information please see Qt Help Project / Namespace +# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace). +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_NAMESPACE = org.doxygen.Project + +# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt +# Help Project output. For more information please see Qt Help Project / Virtual +# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual- +# folders). +# The default value is: doc. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_VIRTUAL_FOLDER = doc + +# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom +# filter to add. For more information please see Qt Help Project / Custom +# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- +# filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_NAME = + +# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the +# custom filter to add. For more information please see Qt Help Project / Custom +# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- +# filters). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_CUST_FILTER_ATTRS = + +# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this +# project's filter section matches. Qt Help Project / Filter Attributes (see: +# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes). +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHP_SECT_FILTER_ATTRS = + +# The QHG_LOCATION tag can be used to specify the location of Qt's +# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the +# generated .qhp file. +# This tag requires that the tag GENERATE_QHP is set to YES. + +QHG_LOCATION = + +# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be +# generated, together with the HTML files, they form an Eclipse help plugin. To +# install this plugin and make it available under the help contents menu in +# Eclipse, the contents of the directory containing the HTML and XML files needs +# to be copied into the plugins directory of eclipse. The name of the directory +# within the plugins directory should be the same as the ECLIPSE_DOC_ID value. +# After copying Eclipse needs to be restarted before the help appears. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_ECLIPSEHELP = NO + +# A unique identifier for the Eclipse help plugin. When installing the plugin +# the directory name containing the HTML and XML files should also have this +# name. Each documentation set should have its own identifier. +# The default value is: org.doxygen.Project. +# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. + +ECLIPSE_DOC_ID = org.doxygen.Project + +# If you want full control over the layout of the generated HTML pages it might +# be necessary to disable the index and replace it with your own. The +# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top +# of each HTML page. A value of NO enables the index and the value YES disables +# it. Since the tabs in the index contain the same information as the navigation +# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +DISABLE_INDEX = YES + +# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index +# structure should be generated to display hierarchical information. If the tag +# value is set to YES, a side panel will be generated containing a tree-like +# index structure (just like the one that is generated for HTML Help). For this +# to work a browser that supports JavaScript, DHTML, CSS and frames is required +# (i.e. any modern browser). Windows users are probably better off using the +# HTML help feature. Via custom stylesheets (see HTML_EXTRA_STYLESHEET) one can +# further fine-tune the look of the index. As an example, the default style +# sheet generated by doxygen has an example that shows how to put an image at +# the root of the tree instead of the PROJECT_NAME. Since the tree basically has +# the same information as the tab index, you could consider setting +# DISABLE_INDEX to YES when enabling this option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +GENERATE_TREEVIEW = YES + +# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that +# doxygen will group on one line in the generated HTML documentation. +# +# Note that a value of 0 will completely suppress the enum values from appearing +# in the overview section. +# Minimum value: 0, maximum value: 20, default value: 4. +# This tag requires that the tag GENERATE_HTML is set to YES. + +ENUM_VALUES_PER_LINE = 4 + +# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used +# to set the initial width (in pixels) of the frame in which the tree is shown. +# Minimum value: 0, maximum value: 1500, default value: 250. +# This tag requires that the tag GENERATE_HTML is set to YES. + +TREEVIEW_WIDTH = 250 + +# When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open links to +# external symbols imported via tag files in a separate window. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +EXT_LINKS_IN_WINDOW = NO + +# Use this tag to change the font size of LaTeX formulas included as images in +# the HTML documentation. When you change the font size after a successful +# doxygen run you need to manually remove any form_*.png images from the HTML +# output directory to force them to be regenerated. +# Minimum value: 8, maximum value: 50, default value: 10. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_FONTSIZE = 10 + +# Use the FORMULA_TRANPARENT tag to determine whether or not the images +# generated for formulas are transparent PNGs. Transparent PNGs are not +# supported properly for IE 6.0, but are supported on all modern browsers. +# +# Note that when changing this option you need to delete any form_*.png files in +# the HTML output directory before the changes have effect. +# The default value is: YES. +# This tag requires that the tag GENERATE_HTML is set to YES. + +FORMULA_TRANSPARENT = YES + +# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see +# http://www.mathjax.org) which uses client side Javascript for the rendering +# instead of using prerendered bitmaps. Use this if you do not have LaTeX +# installed or if you want to formulas look prettier in the HTML output. When +# enabled you may also need to install MathJax separately and configure the path +# to it using the MATHJAX_RELPATH option. +# The default value is: NO. +# This tag requires that the tag GENERATE_HTML is set to YES. + +USE_MATHJAX = NO + +# When MathJax is enabled you can set the default output format to be used for +# the MathJax output. See the MathJax site (see: +# http://docs.mathjax.org/en/latest/output.html) for more details. +# Possible values are: HTML-CSS (which is slower, but has the best +# compatibility), NativeMML (i.e. MathML) and SVG. +# The default value is: HTML-CSS. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_FORMAT = HTML-CSS + +# When MathJax is enabled you need to specify the location relative to the HTML +# output directory using the MATHJAX_RELPATH option. The destination directory +# should contain the MathJax.js script. For instance, if the mathjax directory +# is located at the same level as the HTML output directory, then +# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax +# Content Delivery Network so you can quickly see the result without installing +# MathJax. However, it is strongly recommended to install a local copy of +# MathJax from http://www.mathjax.org before deployment. +# The default value is: http://cdn.mathjax.org/mathjax/latest. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_RELPATH = http://www.mathjax.org/mathjax + +# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax +# extension names that should be enabled during MathJax rendering. For example +# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_EXTENSIONS = + +# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces +# of code that will be used on startup of the MathJax code. See the MathJax site +# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an +# example see the documentation. +# This tag requires that the tag USE_MATHJAX is set to YES. + +MATHJAX_CODEFILE = + +# When the SEARCHENGINE tag is enabled doxygen will generate a search box for +# the HTML output. The underlying search engine uses javascript and DHTML and +# should work on any modern browser. Note that when using HTML help +# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) +# there is already a search function so this one should typically be disabled. +# For large projects the javascript based search engine can be slow, then +# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to +# search using the keyboard; to jump to the search box use + S +# (what the is depends on the OS and browser, but it is typically +# , /