summaryrefslogtreecommitdiffstats
path: root/vendor/basic-toml
diff options
context:
space:
mode:
Diffstat (limited to 'vendor/basic-toml')
-rw-r--r--vendor/basic-toml/.cargo-checksum.json1
-rw-r--r--vendor/basic-toml/Cargo.lock100
-rw-r--r--vendor/basic-toml/Cargo.toml56
-rw-r--r--vendor/basic-toml/LICENSE-APACHE176
-rw-r--r--vendor/basic-toml/LICENSE-MIT23
-rw-r--r--vendor/basic-toml/README.md36
-rw-r--r--vendor/basic-toml/examples/decode.rs54
-rw-r--r--vendor/basic-toml/src/de.rs1723
-rw-r--r--vendor/basic-toml/src/error.rs54
-rw-r--r--vendor/basic-toml/src/lib.rs141
-rw-r--r--vendor/basic-toml/src/ser.rs838
-rw-r--r--vendor/basic-toml/src/tokens.rs546
-rw-r--r--vendor/basic-toml/tests/README.md1
-rw-r--r--vendor/basic-toml/tests/datetime.rs142
-rw-r--r--vendor/basic-toml/tests/de-errors.rs350
-rw-r--r--vendor/basic-toml/tests/display-tricky.rs53
-rw-r--r--vendor/basic-toml/tests/enum_external_deserialize.rs30
-rw-r--r--vendor/basic-toml/tests/float.rs81
-rw-r--r--vendor/basic-toml/tests/formatting.rs53
-rw-r--r--vendor/basic-toml/tests/invalid-encoder/array-mixed-types-ints-and-floats.json15
-rw-r--r--vendor/basic-toml/tests/invalid-misc.rs48
-rw-r--r--vendor/basic-toml/tests/invalid.rs226
-rw-r--r--vendor/basic-toml/tests/invalid/datetime-malformed-no-leads.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/datetime-malformed-no-secs.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/datetime-malformed-no-t.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/datetime-malformed-with-milli.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/duplicate-key-table.toml5
-rw-r--r--vendor/basic-toml/tests/invalid/duplicate-keys.toml2
-rw-r--r--vendor/basic-toml/tests/invalid/duplicate-table.toml8
-rw-r--r--vendor/basic-toml/tests/invalid/duplicate-tables.toml2
-rw-r--r--vendor/basic-toml/tests/invalid/empty-implicit-table.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/empty-table.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/float-no-leading-zero.toml2
-rw-r--r--vendor/basic-toml/tests/invalid/float-no-suffix.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/float-no-trailing-digits.toml2
-rw-r--r--vendor/basic-toml/tests/invalid/key-after-array.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/key-after-table.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/key-empty.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/key-hash.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/key-newline.toml2
-rw-r--r--vendor/basic-toml/tests/invalid/key-open-bracket.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/key-single-open-bracket.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/key-space.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/key-start-bracket.toml3
-rw-r--r--vendor/basic-toml/tests/invalid/key-two-equals.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/string-bad-byte-escape.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/string-bad-escape.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/string-bad-line-ending-escape.toml3
-rw-r--r--vendor/basic-toml/tests/invalid/string-byte-escapes.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/string-no-close.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/table-array-implicit.toml14
-rw-r--r--vendor/basic-toml/tests/invalid/table-array-malformed-bracket.toml2
-rw-r--r--vendor/basic-toml/tests/invalid/table-array-malformed-empty.toml2
-rw-r--r--vendor/basic-toml/tests/invalid/table-empty.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/table-nested-brackets-close.toml2
-rw-r--r--vendor/basic-toml/tests/invalid/table-nested-brackets-open.toml2
-rw-r--r--vendor/basic-toml/tests/invalid/table-whitespace.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/table-with-pound.toml2
-rw-r--r--vendor/basic-toml/tests/invalid/text-after-array-entries.toml4
-rw-r--r--vendor/basic-toml/tests/invalid/text-after-integer.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/text-after-string.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/text-after-table.toml1
-rw-r--r--vendor/basic-toml/tests/invalid/text-before-array-separator.toml4
-rw-r--r--vendor/basic-toml/tests/invalid/text-in-array.toml5
-rw-r--r--vendor/basic-toml/tests/parser.rs687
-rw-r--r--vendor/basic-toml/tests/tokens.rs188
-rw-r--r--vendor/basic-toml/tests/valid.rs368
-rw-r--r--vendor/basic-toml/tests/valid/array-empty.json11
-rw-r--r--vendor/basic-toml/tests/valid/array-empty.toml1
-rw-r--r--vendor/basic-toml/tests/valid/array-mixed-types-arrays-and-ints.json11
-rw-r--r--vendor/basic-toml/tests/valid/array-mixed-types-arrays-and-ints.toml1
-rw-r--r--vendor/basic-toml/tests/valid/array-mixed-types-ints-and-floats.json9
-rw-r--r--vendor/basic-toml/tests/valid/array-mixed-types-ints-and-floats.toml1
-rw-r--r--vendor/basic-toml/tests/valid/array-mixed-types-strings-and-ints.json9
-rw-r--r--vendor/basic-toml/tests/valid/array-mixed-types-strings-and-ints.toml1
-rw-r--r--vendor/basic-toml/tests/valid/array-nospaces.json10
-rw-r--r--vendor/basic-toml/tests/valid/array-nospaces.toml1
-rw-r--r--vendor/basic-toml/tests/valid/arrays-hetergeneous.json19
-rw-r--r--vendor/basic-toml/tests/valid/arrays-hetergeneous.toml1
-rw-r--r--vendor/basic-toml/tests/valid/arrays-nested.json13
-rw-r--r--vendor/basic-toml/tests/valid/arrays-nested.toml1
-rw-r--r--vendor/basic-toml/tests/valid/arrays.json34
-rw-r--r--vendor/basic-toml/tests/valid/arrays.toml8
-rw-r--r--vendor/basic-toml/tests/valid/bool.json4
-rw-r--r--vendor/basic-toml/tests/valid/bool.toml2
-rw-r--r--vendor/basic-toml/tests/valid/comments-everywhere.json12
-rw-r--r--vendor/basic-toml/tests/valid/comments-everywhere.toml24
-rw-r--r--vendor/basic-toml/tests/valid/datetime-truncate.json6
-rw-r--r--vendor/basic-toml/tests/valid/datetime-truncate.toml1
-rw-r--r--vendor/basic-toml/tests/valid/datetime.json3
-rw-r--r--vendor/basic-toml/tests/valid/datetime.toml1
-rw-r--r--vendor/basic-toml/tests/valid/dotted-keys.json34
-rw-r--r--vendor/basic-toml/tests/valid/dotted-keys.toml7
-rw-r--r--vendor/basic-toml/tests/valid/empty.json1
-rw-r--r--vendor/basic-toml/tests/valid/empty.toml0
-rw-r--r--vendor/basic-toml/tests/valid/example-bom.toml5
-rw-r--r--vendor/basic-toml/tests/valid/example-v0.3.0.json1
-rw-r--r--vendor/basic-toml/tests/valid/example-v0.3.0.toml182
-rw-r--r--vendor/basic-toml/tests/valid/example-v0.4.0.json1
-rw-r--r--vendor/basic-toml/tests/valid/example-v0.4.0.toml236
-rw-r--r--vendor/basic-toml/tests/valid/example.json14
-rw-r--r--vendor/basic-toml/tests/valid/example.toml5
-rw-r--r--vendor/basic-toml/tests/valid/example2.json1
-rw-r--r--vendor/basic-toml/tests/valid/example2.toml47
-rw-r--r--vendor/basic-toml/tests/valid/float-exponent.json11
-rw-r--r--vendor/basic-toml/tests/valid/float-exponent.toml9
-rw-r--r--vendor/basic-toml/tests/valid/float.json4
-rw-r--r--vendor/basic-toml/tests/valid/float.toml2
-rw-r--r--vendor/basic-toml/tests/valid/hard_example.json1
-rw-r--r--vendor/basic-toml/tests/valid/hard_example.toml33
-rw-r--r--vendor/basic-toml/tests/valid/implicit-and-explicit-after.json10
-rw-r--r--vendor/basic-toml/tests/valid/implicit-and-explicit-after.toml5
-rw-r--r--vendor/basic-toml/tests/valid/implicit-and-explicit-before.json10
-rw-r--r--vendor/basic-toml/tests/valid/implicit-and-explicit-before.toml5
-rw-r--r--vendor/basic-toml/tests/valid/implicit-groups.json9
-rw-r--r--vendor/basic-toml/tests/valid/implicit-groups.toml2
-rw-r--r--vendor/basic-toml/tests/valid/integer.json14
-rw-r--r--vendor/basic-toml/tests/valid/integer.toml18
-rw-r--r--vendor/basic-toml/tests/valid/key-empty.json3
-rw-r--r--vendor/basic-toml/tests/valid/key-empty.toml1
-rw-r--r--vendor/basic-toml/tests/valid/key-equals-nospace.json3
-rw-r--r--vendor/basic-toml/tests/valid/key-equals-nospace.toml1
-rw-r--r--vendor/basic-toml/tests/valid/key-quote-newline.json3
-rw-r--r--vendor/basic-toml/tests/valid/key-quote-newline.toml1
-rw-r--r--vendor/basic-toml/tests/valid/key-space.json3
-rw-r--r--vendor/basic-toml/tests/valid/key-space.toml1
-rw-r--r--vendor/basic-toml/tests/valid/key-special-chars.json5
-rw-r--r--vendor/basic-toml/tests/valid/key-special-chars.toml1
-rw-r--r--vendor/basic-toml/tests/valid/key-with-pound.json3
-rw-r--r--vendor/basic-toml/tests/valid/key-with-pound.toml1
-rw-r--r--vendor/basic-toml/tests/valid/long-float.json4
-rw-r--r--vendor/basic-toml/tests/valid/long-float.toml2
-rw-r--r--vendor/basic-toml/tests/valid/long-integer.json4
-rw-r--r--vendor/basic-toml/tests/valid/long-integer.toml2
-rw-r--r--vendor/basic-toml/tests/valid/multiline-string.json38
-rw-r--r--vendor/basic-toml/tests/valid/multiline-string.toml34
-rw-r--r--vendor/basic-toml/tests/valid/quote-surrounded-value.json10
-rw-r--r--vendor/basic-toml/tests/valid/quote-surrounded-value.toml2
-rw-r--r--vendor/basic-toml/tests/valid/raw-multiline-string.json14
-rw-r--r--vendor/basic-toml/tests/valid/raw-multiline-string.toml9
-rw-r--r--vendor/basic-toml/tests/valid/raw-string.json30
-rw-r--r--vendor/basic-toml/tests/valid/raw-string.toml7
-rw-r--r--vendor/basic-toml/tests/valid/string-delim-end.json14
-rw-r--r--vendor/basic-toml/tests/valid/string-delim-end.toml3
-rw-r--r--vendor/basic-toml/tests/valid/string-empty.json6
-rw-r--r--vendor/basic-toml/tests/valid/string-empty.toml1
-rw-r--r--vendor/basic-toml/tests/valid/string-escapes.json58
-rw-r--r--vendor/basic-toml/tests/valid/string-escapes.toml14
-rw-r--r--vendor/basic-toml/tests/valid/string-simple.json6
-rw-r--r--vendor/basic-toml/tests/valid/string-simple.toml1
-rw-r--r--vendor/basic-toml/tests/valid/string-with-pound.json7
-rw-r--r--vendor/basic-toml/tests/valid/string-with-pound.toml2
-rw-r--r--vendor/basic-toml/tests/valid/table-array-implicit.json7
-rw-r--r--vendor/basic-toml/tests/valid/table-array-implicit.toml2
-rw-r--r--vendor/basic-toml/tests/valid/table-array-many.json16
-rw-r--r--vendor/basic-toml/tests/valid/table-array-many.toml11
-rw-r--r--vendor/basic-toml/tests/valid/table-array-nest-no-keys.json14
-rw-r--r--vendor/basic-toml/tests/valid/table-array-nest-no-keys.toml6
-rw-r--r--vendor/basic-toml/tests/valid/table-array-nest.json18
-rw-r--r--vendor/basic-toml/tests/valid/table-array-nest.toml17
-rw-r--r--vendor/basic-toml/tests/valid/table-array-one.json8
-rw-r--r--vendor/basic-toml/tests/valid/table-array-one.toml3
-rw-r--r--vendor/basic-toml/tests/valid/table-empty.json3
-rw-r--r--vendor/basic-toml/tests/valid/table-empty.toml1
-rw-r--r--vendor/basic-toml/tests/valid/table-multi-empty.json5
-rw-r--r--vendor/basic-toml/tests/valid/table-multi-empty.toml5
-rw-r--r--vendor/basic-toml/tests/valid/table-sub-empty.json3
-rw-r--r--vendor/basic-toml/tests/valid/table-sub-empty.toml2
-rw-r--r--vendor/basic-toml/tests/valid/table-whitespace.json3
-rw-r--r--vendor/basic-toml/tests/valid/table-whitespace.toml1
-rw-r--r--vendor/basic-toml/tests/valid/table-with-pound.json5
-rw-r--r--vendor/basic-toml/tests/valid/table-with-pound.toml2
-rw-r--r--vendor/basic-toml/tests/valid/unicode-escape.json8
-rw-r--r--vendor/basic-toml/tests/valid/unicode-escape.toml6
-rw-r--r--vendor/basic-toml/tests/valid/unicode-literal.json3
-rw-r--r--vendor/basic-toml/tests/valid/unicode-literal.toml1
176 files changed, 7375 insertions, 0 deletions
diff --git a/vendor/basic-toml/.cargo-checksum.json b/vendor/basic-toml/.cargo-checksum.json
new file mode 100644
index 000000000..f875b19ff
--- /dev/null
+++ b/vendor/basic-toml/.cargo-checksum.json
@@ -0,0 +1 @@
+{"files":{"Cargo.lock":"308a8fe5d6fbbbe77aafb260239c30e50c7fc9bc45707aab6f901e6742396495","Cargo.toml":"5529927350c465769e2dcf6df137c08d4813bce3f4828028bd3be508f0410876","LICENSE-APACHE":"62c7a1e35f56406896d7aa7ca52d0cc0d272ac022b5d2796e7d6905db8a3636a","LICENSE-MIT":"23f18e03dc49df91622fe2a76176497404e46ced8a715d9d2b67a7446571cca3","README.md":"5619ae83205466278a91e52d0d0ff050440754a847acef802edfd8ed9ab73e0a","examples/decode.rs":"c6b00634e2d59589f47777b9012672856e8a257be84b081fb88f2ec4e4e41cc8","src/de.rs":"3809a213ad7b04f97105e2d694b131f618d1fe80f22434c67c8e9d51781c5aa2","src/error.rs":"59798d362c2794420801a9dd951916f58163312ec133ab48a2c1ecd4c445b5d7","src/lib.rs":"f58b31356e8c29a99b3a68458fd390cf600d77db77d1ab9233c1d5927a08e141","src/ser.rs":"7b9c591497f268451ff9f395fa7c57118af5dfba94a97f46703533a5b200b812","src/tokens.rs":"41b161076e020d2a51dde8f28179e65cbdf76365f4dd18e316afd6ad71151bfc","tests/README.md":"3e7d07d64c34e117c3d862ee12e4947af8c133fb0b53f6f0de43cc3bfa5057b8","tests/datetime.rs":"b2effdea59ad0b1f856bb67ad3b855e34fea2ef3ddd2453d48d6d30f2b62104a","tests/de-errors.rs":"83a779a3e859a4823775990adb78b01dabab83f1f595914ba61e0e15c7d2fbaa","tests/display-tricky.rs":"240f050d9bdf4693d2dec5b64c8ee616c2fb1acff3addaad4da582bf9ea03295","tests/enum_external_deserialize.rs":"1ea8bd4585d76c03ca15136d7ffa44348e235a57ca846180cf20466e5c8edf22","tests/float.rs":"dcf3439f618b23b0e0b1e7d1f16f4f45eeb8c83689919d7fa7fd2e5e87dc171f","tests/formatting.rs":"d6904ef6d7689beee0751da6d4d6b74fec4d4ce469234894f4a06b345b7ae620","tests/invalid-encoder/array-mixed-types-ints-and-floats.json":"206d2a353153efbcee74e7ee7b0f852e7a948cfb88417f74607b3ad6f020e373","tests/invalid-misc.rs":"49b5d2664647a6dc31d662565d5972da3d9253a1fb5146584ea79b9c0b6379a6","tests/invalid.rs":"17ada3847954c45a56907d125aafe381bf2fb8cb3d6586fc7a10055b3857b2df","tests/invalid/datetime-malformed-no-leads.toml":"1fa6bf72ea0b96be835ac031408f7acfd0b8d18d5f54b4c0fce8136aad27d22e","tests/invalid/datetime-malformed-no-secs.toml":"b9c30b7fa1847c80e381d68cc79834eefbb8813cd0eff465fb7cbe0733df429b","tests/invalid/datetime-malformed-no-t.toml":"490a46d2e707ef4f87987ec12a22afa5a71c97c5eaa0f4201a174c47846c6a4a","tests/invalid/datetime-malformed-with-milli.toml":"62bfd0a6645bcb3f78211523e5673a3d1fa726d9e942c1643df243d6fba474c8","tests/invalid/duplicate-key-table.toml":"a896ea2180d16fcdd4f6c30aa27529b5b29e7e195a4e7a9944343028783602e9","tests/invalid/duplicate-keys.toml":"4bb0a65c22e7704893a2a51334eb2518af702850ef59aeecd5226e7b26bc07ec","tests/invalid/duplicate-table.toml":"449a36439d737320604c96b2fb86e796195f4801446dfd294c32dcb6ab3a7add","tests/invalid/duplicate-tables.toml":"23b16ce26e1155ee6bf1fff559701dce86475c6a2878536b61f6b7e68be340de","tests/invalid/empty-implicit-table.toml":"d6026110dc0dee7babd69592218819328caa425f48cc879e895b29e48550c06c","tests/invalid/empty-table.toml":"37517e5f3dc66819f61f5a7bb8ace1921282415f10551d2defa5c3eb0985b570","tests/invalid/float-no-leading-zero.toml":"159654461094c938574ba2d2d09baa3d3c387dd6ed024fd411530c0573a1ec42","tests/invalid/float-no-suffix.toml":"4d6ce54a312e2da26b89c47e19e2c9002ce5fcfe59ec1201f17e240369429d46","tests/invalid/float-no-trailing-digits.toml":"64e4f336186cd096be2804532dbd694dd50ea551d292a9cdbf0bef2abf227101","tests/invalid/key-after-array.toml":"314af33770170b53bf2ec3be43ea1609d981c81d62c968052499b85ed54ccce8","tests/invalid/key-after-table.toml":"ed0dcf38f003d184dd18d1518702da0115cbfb05a5a28cbcf42de2f9bdee05fa","tests/invalid/key-empty.toml":"4303477abf4c0b920b42e96edd61caecf9c1f2d5d97f56af876854cd725aff3c","tests/invalid/key-hash.toml":"cd2a2eba6032d32c829633d3cd2778aeba81f5ff554e69f62af6557d1dc712f6","tests/invalid/key-newline.toml":"06a313a6bd70c2db6a1f5bda479d854d8f87e037e3cabf18fb5db822466ffcac","tests/invalid/key-open-bracket.toml":"52dea939022949290e3a19f1291d15605429344dce3cd1aa1f1568ecad8ca365","tests/invalid/key-single-open-bracket.toml":"245843abef9e72e7efac30138a994bf6301e7e1d7d7042a33d42e863d2638811","tests/invalid/key-space.toml":"b4e336d07c27fb3d0f0a6e50b733e1546202dfd58aaf2f7956f56fd6f075b0a1","tests/invalid/key-start-bracket.toml":"3bd3748a9df1d25ab2661330a3da187bd4da3958292bbf0e8b59d7963634dd87","tests/invalid/key-two-equals.toml":"3ac0c4e339d47c86e57564e43147b772ae83933b78083dc78d0ef77e231df3f1","tests/invalid/string-bad-byte-escape.toml":"c665dcec7c02f442c4fdc80423698eed2376ce65779cf855371772293bec2927","tests/invalid/string-bad-escape.toml":"eeca691fbba3d270f58ae2953d2d1369a773e619e39d8c11f38d6bf6f8472e82","tests/invalid/string-bad-line-ending-escape.toml":"ffa1673378cf4c4153d7f139ea8e4628c1f0aaa721c94b8766a1a3822fb080e4","tests/invalid/string-byte-escapes.toml":"4a4604b32971de3a252cd01d2997b450972c3ec9030cf22a070d49c57f050da4","tests/invalid/string-no-close.toml":"bb2eaf96eb9f83a52bd0772abb313060a06b94f650efeb45edce774622686882","tests/invalid/table-array-implicit.toml":"9b841ea64d68be4deb54f67fc807b05fd235452ee563ffa7de69dbca64b2f7dd","tests/invalid/table-array-malformed-bracket.toml":"164f641b2628bf04f8202d9746a360a4a243faca1408dc2ecd0c0fdd2d1c2c27","tests/invalid/table-array-malformed-empty.toml":"56ca2a15019cf5c3555041a191f983dc72b1678f0de0afd1a7b8f46ed7970420","tests/invalid/table-empty.toml":"37517e5f3dc66819f61f5a7bb8ace1921282415f10551d2defa5c3eb0985b570","tests/invalid/table-nested-brackets-close.toml":"991e1210f81e24abcd735988f0d960d4ee94d2ec3b133c6fea6e24932d91c507","tests/invalid/table-nested-brackets-open.toml":"8fb569fc90fa606ae94708ee2bf205bff8db8a023624b3e52ef6b2c1a98ba1c6","tests/invalid/table-whitespace.toml":"2c2db1259adc641df0459e896d349d3db60965d5368d5c8ed50aedd3bc88f040","tests/invalid/table-with-pound.toml":"d8070437f07bd115ac8006c61e286401bd3be88728a62264796e757121750ecd","tests/invalid/text-after-array-entries.toml":"2530cace13292313983b90b01d63e4b8ac484809e7ef0ac79904923573eda7ec","tests/invalid/text-after-integer.toml":"6168ed823a135b8914956b04307aeec2347b10eb1aa79008406d7b547cbde682","tests/invalid/text-after-string.toml":"1771987dd679e1cc181cf53406ba313fdc3413a081d17a93da6041bf6ccccf5e","tests/invalid/text-after-table.toml":"f27ae56bb0b42d3af4c813392857afdfeb4bf8ab77ff896cd93ba32cf1a21b26","tests/invalid/text-before-array-separator.toml":"192d28699573abbdc521797576d4885adf756336c3e76971f10270603c957464","tests/invalid/text-in-array.toml":"50d7b16d7a03d470f1a907eebfeb156d0c696e6f9a8c734a5e4caa2365f54654","tests/parser.rs":"51a258620d7f16a7d83a54e593fb005b0f8d10047cecc70ac42fef66befddc5f","tests/tokens.rs":"cdad53278f9e082bcb921ccf19aff5da92fe2cc2795108b49a629c0d8829b8bc","tests/valid.rs":"2ffcf755946117063ab1a8513d5f758cc7b07e81fba1b095ed1a94fbe6925d5b","tests/valid/array-empty.json":"4ed46e8aa3586a7ddd9097cda38919699860052f188880de18e12d4acbee5307","tests/valid/array-empty.toml":"769faa42a690b02ad1b573277f0d793a4a38a7ecf30b9db67bf9836fe2b7778c","tests/valid/array-mixed-types-arrays-and-ints.json":"06a82a2cb05ea8bc5380cc1bd77a432aa4829b2f9516d42ba42ea4b2c619ba5a","tests/valid/array-mixed-types-arrays-and-ints.toml":"c1547b686357c7a865e333a7ce7eed1e30743ebc4daac8ea6148bdd84b7e4dc7","tests/valid/array-mixed-types-ints-and-floats.json":"203b95852cc8f980459bb471c6185bdd47ffde0083174552b898b44522e83de8","tests/valid/array-mixed-types-ints-and-floats.toml":"baa235fc168869716680955fd5bdafab30b6fa9b3d09185086261a56ef12960e","tests/valid/array-mixed-types-strings-and-ints.json":"d5cbed82431602916837e01b7abc24181434e0f60ca1b76d5149a25cc2b6cfc3","tests/valid/array-mixed-types-strings-and-ints.toml":"4440057ed90d4461db0be55cfd71299cad96b601d5faec2140666242f6fde147","tests/valid/array-nospaces.json":"7c82b474797871488c2b522e9b852772a78c681a86900f780f7a0be4f901e1ec","tests/valid/array-nospaces.toml":"01fba63551b1051f7e117c9551c488b293bd3cd4745cbeec6b3709c32f2a3a64","tests/valid/arrays-hetergeneous.json":"dac4702412c90d5ddd6b898c3657c71d782d0aca46e5b9ca19cc694d3e226c0f","tests/valid/arrays-hetergeneous.toml":"361ae2e8470b47b12b336eb61eedd09abb20e216fbeb582b46d16a2831adda4d","tests/valid/arrays-nested.json":"34d6f8fd770f0728f38dbf0d686bed2c218bc16da978290c0208d3bf3704bdec","tests/valid/arrays-nested.toml":"a5941a01a2ba2fa179a3885039a5f81ca6c9876b2e8bea7b880563238be9f004","tests/valid/arrays.json":"315fff195a7d4c80e867e1f14c12a23e36dcc666e8de36138249b15e99bdd4dd","tests/valid/arrays.toml":"2d3c91b77d4f6a65a6a5a2c5ad521dbe86cc2f0ff389dfe58e8c34275cdc35c7","tests/valid/bool.json":"bb608b912fe4d89cb2186da957c10951780575bb34b2f43305335c745eff049c","tests/valid/bool.toml":"3c06ad4dce7430326f5f867105b2e9382def90cccb3ecb69f657c0b88974ab04","tests/valid/comments-everywhere.json":"800f8efd86d1bab4f91f0e367da52a2465e1480387df892561ed8948fd1a38c3","tests/valid/comments-everywhere.toml":"55e6f76fa086ccd0f1c38bb8ad86ae7cfe08c67dc4b2dfba273b8cfaf58a38c4","tests/valid/datetime-truncate.json":"5ca5dbb56bb00c4cfc4c10aeb5607160284593bad3bf6dc988425a4f1f53dfbc","tests/valid/datetime-truncate.toml":"55ee01485f3e537fb6b2d3977f656c11543e53adb4e79c6fa8f2a953183c0f7f","tests/valid/datetime.json":"94f130c3b2a5f30c625a3a3168b9dfe52aa109b470c4e077f352b3dd79382a69","tests/valid/datetime.toml":"4e1b71ba31a1feef80a1e436225aa9c5d291bf780f558e7cfa76998fe2a29e08","tests/valid/dotted-keys.json":"9b67e64726eadd737a02e009452c34e6374693a418a4b1e0f305344095afe035","tests/valid/dotted-keys.toml":"30e19d48a19773a3865e9b5def1e7619d9a8b8797d2d0ea76775e3ea028b0dcd","tests/valid/empty.json":"ca3d163bab055381827226140568f3bef7eaac187cebd76878e0b63e9e442356","tests/valid/empty.toml":"e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855","tests/valid/example-bom.toml":"50f3607be97de2f894ccd41a29d3a8a1b1f430982e5ab5bf43047874837f6a42","tests/valid/example-v0.3.0.json":"ec02332eb59dab93a50560771e4b01b972a29d93b6dca8954728c0631e1799a0","tests/valid/example-v0.3.0.toml":"aba9349588c1ba6af2a3ad0c1db2c3d311dca66b64f654480340a0823dfd4546","tests/valid/example-v0.4.0.json":"8b967b246ca2383172eaaecf790b2115a18020f63c70d0848d1dc25fc1fed5a9","tests/valid/example-v0.4.0.toml":"4cae5f85588bdf4fa262eb663074ecaeb350c7ada5f9a340facac8ac71c38e74","tests/valid/example.json":"c57fabb1be0ae7ed140fe7ae5082051571c85180204b8ae8d8da1e86133278c9","tests/valid/example.toml":"6f02b2a52ea63c70b629e41e06a8eb1eb4aab359ab966a7a397b248e13849c9c","tests/valid/example2.json":"de89432e78f0a074aae54650fedf151ceca3b0ccb148b8a66e18e2ed68024ba2","tests/valid/example2.toml":"c389eeb485fb7cb9445d617a9a0f8ba70049f08d66cf6b6f2a9a986574295de1","tests/valid/float-exponent.json":"36357ed835b2bdffeb47048024aa462e3c7782b0eb7188f4ed1b2e3db534d85c","tests/valid/float-exponent.toml":"da4ba974364b3dfd258aa377fe34422f01d084f2124e8a708eddf60478391436","tests/valid/float.json":"9676c13fef00a01bc729456bfe27f1b24a1bd059c9a5913bb0b0401e976a0aab","tests/valid/float.toml":"b6784f554aa38bb210f0905c3bafdfae6db723a4f53288fb07febc66451bbc2d","tests/valid/hard_example.json":"8d170e73e156b8b6be559246880e9cb6a79b36f63d14bc97e3bdf2f2091e7a17","tests/valid/hard_example.toml":"cd3b89f8917a44f944b12fe47f69f86bb39f17db85d1a081bf0c134831eb90f9","tests/valid/implicit-and-explicit-after.json":"6dcaeaf8ee3479bf2cd5c14eb58970757175aaefab014bce9acb0b85e7bf9dd0","tests/valid/implicit-and-explicit-after.toml":"0599b16822764cdb1c3d3cf53f71186b97afc9f60f8d58358a4a89fe4d9477c3","tests/valid/implicit-and-explicit-before.json":"6dcaeaf8ee3479bf2cd5c14eb58970757175aaefab014bce9acb0b85e7bf9dd0","tests/valid/implicit-and-explicit-before.toml":"33435dddb68144b652ca5f5e0c4c53e4f7b3ca0166f9b944eda55f8d76ed2871","tests/valid/implicit-groups.json":"fc2bb43ec20c8c9148c8a70490b3a054506932c41687222ea11faae47eafb723","tests/valid/implicit-groups.toml":"248b3e8272ec43ce4af872981acde10628eeae73537ed6763a1f4245f5a9610c","tests/valid/integer.json":"10b72ac9bbd338935f76e58d1a9a7481900bfbfa05b43c232466a42c485bb293","tests/valid/integer.toml":"b894cb157bd7785aaf2730def4b9ecd9a268c1610fd98b3418484197ba135a21","tests/valid/key-empty.json":"9a7093efce2dbc6fea2a1b31effb7e79f90bd98179fbcb1c833d2a9b242b2883","tests/valid/key-empty.toml":"8137eaf01d9348a9bfe09f9f0682908be043bcbbe5a4b7c6dec9e3cbe18d5daf","tests/valid/key-equals-nospace.json":"b9878ee3585d1b48710a4bc09f2163b910ea71a2bfdaa8f1da68e599e8b30f47","tests/valid/key-equals-nospace.toml":"24cab0d01b67b184d0a737de3a5b5d47b8b69b36203273296d5ef763f7fdcf68","tests/valid/key-quote-newline.json":"dc077cc852761ffbab2cb23486c023dae2e07410c76fcb507a40d96ed8922e06","tests/valid/key-quote-newline.toml":"a224611bfce786f7d04a3a6abda62fdff79bc6fd2cb94263334d135d46e0143b","tests/valid/key-space.json":"30be539b01420be5cedc9078b88c3279bbef7c0bdde36ba8249ed8906112d5c7","tests/valid/key-space.toml":"9e9459b8cfebc404cf93d77c2d4082cadcd57165a2f9ce9cb35d1d12dc94a8c0","tests/valid/key-special-chars.json":"8bbebb20660d93efa73707bdb09e87a43c2b31c18f13df6388c701a1bc7cab8c","tests/valid/key-special-chars.toml":"c6cb0ba12d32f03cda4206097a1edb27cd154215d72e1c5791cc4f8dff2270b3","tests/valid/key-with-pound.json":"ea4dd79d0ad2a824bcce5c4c194d7fae82a7584a2ab7c0d83d6ddaae6130973e","tests/valid/key-with-pound.toml":"c334f676e19c01b96111277248654358cc8222fd0639aecaf429407e63b6a2dc","tests/valid/long-float.json":"7e103f56e490aa1b1fe5a762000ceb1f8c599f7d81aa215c90f5da41ab4ba6db","tests/valid/long-float.toml":"4d23f706f2a0d241840f6ea78657820c9c7b904c0c3c16828f8cc2574d7c8761","tests/valid/long-integer.json":"9ed7976639f0c2cd7f112584e2f5d272e92569be7135ea5bb9ba597abaff0767","tests/valid/long-integer.toml":"309f94be7ff5fd6f6dedbd257a1e6c171cb71aa74409ff3f8babda951f89d687","tests/valid/multiline-string.json":"99026fe11c35cdb15afd6de528a8622f9a44796f8b2655be5dac0af7c8489f0f","tests/valid/multiline-string.toml":"3b06438b7ca34b14fb937736cae48981d1e700ceb631bb8a9480816952122dea","tests/valid/quote-surrounded-value.json":"2cb712bf38c2b0c8dd88595a554cb7b200e0e92b91274ed279549b612362d70f","tests/valid/quote-surrounded-value.toml":"87ad75f060b87c95dbe4b150655a45821d4183be845ff54647975494d6c8267e","tests/valid/raw-multiline-string.json":"4c95e34497433168cac25eb4132485c3bd13c35cad9d13d7becf7f90469dacca","tests/valid/raw-multiline-string.toml":"c724151024ccde432e0ec0d4ba60a5320d77841008116324c39516b8cbb94f4d","tests/valid/raw-string.json":"19268797aff8dfa28437d6ed8f9d813035f6eee50aade5fc774ba12b3290216b","tests/valid/raw-string.toml":"16510e05d477a0856ebaf38cacd0e9e18f02ab63ac7bd1a2eabbaa47a54d0e49","tests/valid/string-delim-end.json":"ea0376c00a2dfc5fe6d62b229f0b4a8dafde7497fe7d7822c4f9d4758e366444","tests/valid/string-delim-end.toml":"d7fa705bf7ff1a0862479a137388fb24e0aa8a78f1d762c8c8a19cdc26bd2228","tests/valid/string-empty.json":"ece7d01326742a46e37d6404175118c0a91a2494f7ba2290bbc1d6f990ddb65b","tests/valid/string-empty.toml":"251e9e4052ede79f6b2462e71f73e0b7c9f5927484f6f77f0cd8b3c839b0c13b","tests/valid/string-escapes.json":"765ebd0b9c5d80e0cdfac2c1eb6d7616f7f12809ff01ee86c68078e16fee4723","tests/valid/string-escapes.toml":"f99744c9822e48295214879d2a00f51ae656d2e8d54fffd8742cfae29f0bad65","tests/valid/string-simple.json":"622676e347676cce9f9733210acbd8056ce77a0588772ffd6efb05bb4e81b571","tests/valid/string-simple.toml":"ae74db09acea3be2ccae7f854f7b6f7c874ace9d4d87bf2f437b059a7d38a464","tests/valid/string-with-pound.json":"458a0add71536c1df5e1ed3ee5483c6eb48578abce0b0ebcdf75ea20d41ed6f4","tests/valid/string-with-pound.toml":"1aee397830d9ad2a93d41ee9c435acdbfef3758d1bb7c48bca7424fbbec89466","tests/valid/table-array-implicit.json":"3f7d3cdb468de67bc183162805d9c753ef5772f6f363ac2a26598387a5d991ea","tests/valid/table-array-implicit.toml":"66bcb030899a95e9a25ec44b7c9291b02f80ecbc324061cf1cd93223a2919f21","tests/valid/table-array-many.json":"3f21243eeb71ca3e5657a43559c806e12e3833e9f74c43c0c12aad9b0c853e4c","tests/valid/table-array-many.toml":"8d8ea546f954a81ca149a02147ae5f4bf075151cfcd530e62dcf05a04d843ffb","tests/valid/table-array-nest-no-keys.json":"a93ed1e96a8c52848e65a2f79c0e5c0627fbc62bd5f3e2557fdd0d42c303e7da","tests/valid/table-array-nest-no-keys.toml":"9cfcd72b7d166b4ae77a7be456017f749f375ad562475802e22dc1a57b503dc7","tests/valid/table-array-nest.json":"0a987d2bf1d5bc85f5c9433f23d389063600682a68538b6e57938a3c572959e4","tests/valid/table-array-nest.toml":"71b9c753bf773f232ac71cb2469a54ee0110ff137829045421edd7c5a64d6b6a","tests/valid/table-array-one.json":"7dc0ea3f7f843f7dc7443e68af43a1e5130a5fbae8a27fb02d8d92fa2487888e","tests/valid/table-array-one.toml":"4c478aea2dd7dfcfda682503b49e610f0fa4ce85a3b3cd0bc9041d4959e3626a","tests/valid/table-empty.json":"11e43e212d87b3b2547a5f2541f4091a3d2f6ba00b2a2004b07e02734e927ea7","tests/valid/table-empty.toml":"24d4941e67d5965d270eaebdb9816b994311e0f2f0e79ef6bb626f362c52842e","tests/valid/table-multi-empty.json":"3a2c82201a0447304afb23fb48ee961b4bd812fca3e1b061cc033e7e2bfb976c","tests/valid/table-multi-empty.toml":"886c9c4cc2d691816ed3fa404cb9d362b0511eb8c389a71419a858abb26f83df","tests/valid/table-sub-empty.json":"85cca6d48a5993c4f207c21ed96652af4f50b6936b0807659c75317c1763b6db","tests/valid/table-sub-empty.toml":"ae92e90a806ffefcbf8cda83cb82acf7448f75efa50dcfb5e2384632d36471b3","tests/valid/table-whitespace.json":"ad84ac49a6d13f7c4a8af0e1e71fd7ff2a446aa16a34c21a809a0850dfa76e73","tests/valid/table-whitespace.toml":"2f15dafb263d2771671db299f6202b4b78d293aec1ded7641ec7eb1cb024b52c","tests/valid/table-with-pound.json":"151e76606efe77500cbb0aa8fcf8ccfadb124d533bb79a9caa62e937b826e676","tests/valid/table-with-pound.toml":"a1f86c2e3789cc89500ec1d5eac2ec0bdb94bf445fddc3cab558b5228f3aba56","tests/valid/unicode-escape.json":"3bc31dab0a0738cee760cff9ca4a4f459c78e9bf3c4261742cf4997dfb2110bd","tests/valid/unicode-escape.toml":"977abfcf8ea9c5c79dcd6f13aacc69c97a7879749115a52a3e3f7f18d50b810a","tests/valid/unicode-literal.json":"1dd42756384b954955815dc3e906db64b4cd2c0c094f9b3c86633d1652d6d79d","tests/valid/unicode-literal.toml":"bffc6c3d4757de31d0cbfd7b8dc591edd2910fe8a4e1c46bbee422dddc841003"},"package":"5c0de75129aa8d0cceaf750b89013f0e08804d6ec61416da787b35ad0d7cddf1"} \ No newline at end of file
diff --git a/vendor/basic-toml/Cargo.lock b/vendor/basic-toml/Cargo.lock
new file mode 100644
index 000000000..e30127d7d
--- /dev/null
+++ b/vendor/basic-toml/Cargo.lock
@@ -0,0 +1,100 @@
+# This file is automatically @generated by Cargo.
+# It is not intended for manual editing.
+version = 3
+
+[[package]]
+name = "basic-toml"
+version = "0.1.2"
+dependencies = [
+ "semver",
+ "serde",
+ "serde_derive",
+ "serde_json",
+]
+
+[[package]]
+name = "itoa"
+version = "1.0.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fad582f4b9e86b6caa621cabeb0963332d92eea04729ab12892c2533951e6440"
+
+[[package]]
+name = "proc-macro2"
+version = "1.0.50"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2"
+dependencies = [
+ "unicode-ident",
+]
+
+[[package]]
+name = "quote"
+version = "1.0.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
+dependencies = [
+ "proc-macro2",
+]
+
+[[package]]
+name = "ryu"
+version = "1.0.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7b4b9743ed687d4b4bcedf9ff5eaa7398495ae14e61cba0a295704edbc7decde"
+
+[[package]]
+name = "semver"
+version = "1.0.16"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "58bc9567378fc7690d6b2addae4e60ac2eeea07becb2c64b9f218b53865cba2a"
+dependencies = [
+ "serde",
+]
+
+[[package]]
+name = "serde"
+version = "1.0.152"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bb7d1f0d3021d347a83e556fc4683dea2ea09d87bccdf88ff5c12545d89d5efb"
+dependencies = [
+ "serde_derive",
+]
+
+[[package]]
+name = "serde_derive"
+version = "1.0.152"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
+[[package]]
+name = "serde_json"
+version = "1.0.91"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "877c235533714907a8c2464236f5c4b2a17262ef1bd71f38f35ea592c8da6883"
+dependencies = [
+ "itoa",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "syn"
+version = "1.0.107"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "unicode-ident",
+]
+
+[[package]]
+name = "unicode-ident"
+version = "1.0.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "84a22b9f218b40614adcb3f4ff08b703773ad44fa9423e4e0d346d5db86e4ebc"
diff --git a/vendor/basic-toml/Cargo.toml b/vendor/basic-toml/Cargo.toml
new file mode 100644
index 000000000..df64feadb
--- /dev/null
+++ b/vendor/basic-toml/Cargo.toml
@@ -0,0 +1,56 @@
+# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
+#
+# When uploading crates to the registry Cargo will automatically
+# "normalize" Cargo.toml files for maximal compatibility
+# with all versions of Cargo and also rewrite `path` dependencies
+# to registry (e.g., crates.io) dependencies.
+#
+# If you are reading this file be aware that the original Cargo.toml
+# will likely look very different (and much more reasonable).
+# See Cargo.toml.orig for the original contents.
+
+[package]
+edition = "2021"
+name = "basic-toml"
+version = "0.1.2"
+authors = [
+ "Alex Crichton <alex@alexcrichton.com>",
+ "David Tolnay <dtolnay@gmail.com>",
+]
+description = "Minimal TOML library with few dependencies"
+documentation = "https://docs.rs/basic-toml"
+readme = "README.md"
+keywords = [
+ "toml",
+ "serde",
+]
+categories = [
+ "config",
+ "encoding",
+ "parser-implementations",
+]
+license = "MIT OR Apache-2.0"
+repository = "https://github.com/dtolnay/basic-toml"
+
+[package.metadata.docs.rs]
+targets = ["x86_64-unknown-linux-gnu"]
+
+[lib]
+doc-scrape-examples = false
+
+[dependencies.serde]
+version = "1.0.97"
+
+[dev-dependencies.semver]
+version = "1.0"
+features = ["serde"]
+
+[dev-dependencies.serde]
+version = "1.0"
+features = ["derive"]
+
+[dev-dependencies.serde_derive]
+version = "1.0"
+
+[dev-dependencies.serde_json]
+version = "1.0"
diff --git a/vendor/basic-toml/LICENSE-APACHE b/vendor/basic-toml/LICENSE-APACHE
new file mode 100644
index 000000000..1b5ec8b78
--- /dev/null
+++ b/vendor/basic-toml/LICENSE-APACHE
@@ -0,0 +1,176 @@
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+END OF TERMS AND CONDITIONS
diff --git a/vendor/basic-toml/LICENSE-MIT b/vendor/basic-toml/LICENSE-MIT
new file mode 100644
index 000000000..31aa79387
--- /dev/null
+++ b/vendor/basic-toml/LICENSE-MIT
@@ -0,0 +1,23 @@
+Permission is hereby granted, free of charge, to any
+person obtaining a copy of this software and associated
+documentation files (the "Software"), to deal in the
+Software without restriction, including without
+limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following
+conditions:
+
+The above copyright notice and this permission notice
+shall be included in all copies or substantial portions
+of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
+ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
+PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
+SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
+CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
+IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/vendor/basic-toml/README.md b/vendor/basic-toml/README.md
new file mode 100644
index 000000000..c76fa400f
--- /dev/null
+++ b/vendor/basic-toml/README.md
@@ -0,0 +1,36 @@
+# basic-toml
+
+[<img alt="github" src="https://img.shields.io/badge/github-dtolnay/basic--toml-8da0cb?style=for-the-badge&labelColor=555555&logo=github" height="20">](https://github.com/dtolnay/basic-toml)
+[<img alt="crates.io" src="https://img.shields.io/crates/v/basic-toml.svg?style=for-the-badge&color=fc8d62&logo=rust" height="20">](https://crates.io/crates/basic-toml)
+[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-basic--toml-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs" height="20">](https://docs.rs/basic-toml)
+[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/dtolnay/basic-toml/ci.yml?branch=master&style=for-the-badge" height="20">](https://github.com/dtolnay/basic-toml/actions?query=branch%3Amaster)
+
+A library for parsing and producing data in [TOML] format using [Serde].
+
+This crate is a stripped down fork of version 0.5 of the `toml` crate (from
+before the `toml_edit` rewrite).
+
+[TOML]: https://toml.io
+[Serde]: https://serde.rs
+
+```toml
+[dependencies]
+basic-toml = "0.1"
+```
+
+<br>
+
+#### License
+
+<sup>
+Licensed under either of <a href="LICENSE-APACHE">Apache License, Version
+2.0</a> or <a href="LICENSE-MIT">MIT license</a> at your option.
+</sup>
+
+<br>
+
+<sub>
+Unless you explicitly state otherwise, any contribution intentionally submitted
+for inclusion in this crate by you, as defined in the Apache-2.0 license, shall
+be dual licensed as above, without any additional terms or conditions.
+</sub>
diff --git a/vendor/basic-toml/examples/decode.rs b/vendor/basic-toml/examples/decode.rs
new file mode 100644
index 000000000..21e438294
--- /dev/null
+++ b/vendor/basic-toml/examples/decode.rs
@@ -0,0 +1,54 @@
+//! An example showing off the usage of `Deserialize` to automatically decode
+//! TOML into a Rust `struct`
+
+#![deny(warnings)]
+#![allow(dead_code)]
+
+use serde_derive::Deserialize;
+
+/// This is what we're going to decode into. Each field is optional, meaning
+/// that it doesn't have to be present in TOML.
+#[derive(Debug, Deserialize)]
+struct Config {
+ global_string: Option<String>,
+ global_integer: Option<u64>,
+ server: Option<ServerConfig>,
+ peers: Option<Vec<PeerConfig>>,
+}
+
+/// Sub-structs are decoded from tables, so this will decode from the `[server]`
+/// table.
+///
+/// Again, each field is optional, meaning they don't have to be present.
+#[derive(Debug, Deserialize)]
+struct ServerConfig {
+ ip: Option<String>,
+ port: Option<u64>,
+}
+
+#[derive(Debug, Deserialize)]
+struct PeerConfig {
+ ip: Option<String>,
+ port: Option<u64>,
+}
+
+fn main() {
+ let toml_str = r#"
+ global_string = "test"
+ global_integer = 5
+
+ [server]
+ ip = "127.0.0.1"
+ port = 80
+
+ [[peers]]
+ ip = "127.0.0.1"
+ port = 8080
+
+ [[peers]]
+ ip = "127.0.0.1"
+ "#;
+
+ let decoded: Config = basic_toml::from_str(toml_str).unwrap();
+ println!("{:#?}", decoded);
+}
diff --git a/vendor/basic-toml/src/de.rs b/vendor/basic-toml/src/de.rs
new file mode 100644
index 000000000..21531f565
--- /dev/null
+++ b/vendor/basic-toml/src/de.rs
@@ -0,0 +1,1723 @@
+use crate::tokens::{Error as TokenError, Span, Token, Tokenizer};
+use serde::de;
+use serde::de::IntoDeserializer;
+use std::borrow::Cow;
+use std::collections::{HashMap, HashSet};
+use std::error;
+use std::f64;
+use std::fmt::{self, Display};
+use std::iter;
+use std::str;
+use std::vec;
+
+type TablePair<'a> = ((Span, Cow<'a, str>), Value<'a>);
+
+/// Deserializes a byte slice into a type.
+///
+/// This function will attempt to interpret `bytes` as UTF-8 data and then
+/// deserialize `T` from the TOML document provided.
+pub fn from_slice<'de, T>(bytes: &'de [u8]) -> Result<T, crate::Error>
+where
+ T: de::Deserialize<'de>,
+{
+ match str::from_utf8(bytes) {
+ Ok(s) => from_str(s),
+ Err(e) => Err(crate::Error::from(*Error::custom(None, e.to_string()))),
+ }
+}
+
+/// Deserializes a string into a type.
+///
+/// This function will attempt to interpret `s` as a TOML document and
+/// deserialize `T` from the document.
+pub fn from_str<'de, T>(s: &'de str) -> Result<T, crate::Error>
+where
+ T: de::Deserialize<'de>,
+{
+ let mut d = Deserializer::new(s);
+ T::deserialize(&mut d).map_err(|e| crate::Error::from(*e))
+}
+
+#[derive(Debug)]
+pub(crate) struct Error {
+ kind: ErrorKind,
+ line: Option<usize>,
+ col: usize,
+ at: Option<usize>,
+ message: String,
+ key: Vec<String>,
+}
+
+/// Errors that can occur when deserializing a type.
+#[derive(Debug)]
+enum ErrorKind {
+ /// EOF was reached when looking for a value.
+ UnexpectedEof,
+
+ /// An invalid character not allowed in a string was found.
+ InvalidCharInString(char),
+
+ /// An invalid character was found as an escape.
+ InvalidEscape(char),
+
+ /// An invalid character was found in a hex escape.
+ InvalidHexEscape(char),
+
+ /// An invalid escape value was specified in a hex escape in a string.
+ ///
+ /// Valid values are in the plane of unicode codepoints.
+ InvalidEscapeValue(u32),
+
+ /// A newline in a string was encountered when one was not allowed.
+ NewlineInString,
+
+ /// An unexpected character was encountered, typically when looking for a
+ /// value.
+ Unexpected(char),
+
+ /// An unterminated string was found where EOF was found before the ending
+ /// EOF mark.
+ UnterminatedString,
+
+ /// A newline was found in a table key.
+ NewlineInTableKey,
+
+ /// A number failed to parse.
+ NumberInvalid,
+
+ /// Wanted one sort of token, but found another.
+ Wanted {
+ /// Expected token type.
+ expected: &'static str,
+ /// Actually found token type.
+ found: &'static str,
+ },
+
+ /// A duplicate table definition was found.
+ DuplicateTable(String),
+
+ /// Duplicate key in table.
+ DuplicateKey(String),
+
+ /// A previously defined table was redefined as an array.
+ RedefineAsArray,
+
+ /// Multiline strings are not allowed for key.
+ MultilineStringKey,
+
+ /// A custom error which could be generated when deserializing a particular
+ /// type.
+ Custom,
+
+ /// A tuple with a certain number of elements was expected but something
+ /// else was found.
+ ExpectedTuple(usize),
+
+ /// Expected table keys to be in increasing tuple index order, but something
+ /// else was found.
+ ExpectedTupleIndex {
+ /// Expected index.
+ expected: usize,
+ /// Key that was specified.
+ found: String,
+ },
+
+ /// An empty table was expected but entries were found.
+ ExpectedEmptyTable,
+
+ /// Dotted key attempted to extend something that is not a table.
+ DottedKeyInvalidType,
+
+ /// An unexpected key was encountered.
+ ///
+ /// Used when deserializing a struct with a limited set of fields.
+ UnexpectedKeys {
+ /// The unexpected keys.
+ keys: Vec<String>,
+ /// Keys that may be specified.
+ available: &'static [&'static str],
+ },
+
+ /// Unquoted string was found when quoted one was expected.
+ UnquotedString,
+}
+
+struct Deserializer<'a> {
+ input: &'a str,
+ tokens: Tokenizer<'a>,
+}
+
+impl<'de, 'b> de::Deserializer<'de> for &'b mut Deserializer<'de> {
+ type Error = Box<Error>;
+
+ fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Box<Error>>
+ where
+ V: de::Visitor<'de>,
+ {
+ let mut tables = self.tables()?;
+ let table_indices = build_table_indices(&tables);
+ let table_pindices = build_table_pindices(&tables);
+
+ let res = visitor.visit_map(MapVisitor {
+ values: Vec::new().into_iter().peekable(),
+ next_value: None,
+ depth: 0,
+ cur: 0,
+ cur_parent: 0,
+ max: tables.len(),
+ table_indices: &table_indices,
+ table_pindices: &table_pindices,
+ tables: &mut tables,
+ array: false,
+ de: self,
+ keys: HashSet::new(),
+ });
+ res.map_err(|mut err| {
+ // Errors originating from this library (toml), have an offset
+ // attached to them already. Other errors, like those originating
+ // from serde (like "missing field") or from a custom deserializer,
+ // do not have offsets on them. Here, we do a best guess at their
+ // location, by attributing them to the "current table" (the last
+ // item in `tables`).
+ err.fix_offset(|| tables.last().map(|table| table.at));
+ err.fix_linecol(|at| self.to_linecol(at));
+ err
+ })
+ }
+
+ serde::forward_to_deserialize_any! {
+ bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq
+ bytes byte_buf map unit newtype_struct
+ ignored_any unit_struct tuple_struct tuple option identifier struct enum
+ }
+}
+
+// Builds a datastructure that allows for efficient sublinear lookups. The
+// returned HashMap contains a mapping from table header (like [a.b.c]) to list
+// of tables with that precise name. The tables are being identified by their
+// index in the passed slice. We use a list as the implementation uses this data
+// structure for arrays as well as tables, so if any top level [[name]] array
+// contains multiple entries, there are multiple entries in the list. The lookup
+// is performed in the `SeqAccess` implementation of `MapVisitor`. The lists are
+// ordered, which we exploit in the search code by using bisection.
+fn build_table_indices<'de>(tables: &[Table<'de>]) -> HashMap<Vec<Cow<'de, str>>, Vec<usize>> {
+ let mut res = HashMap::new();
+ for (i, table) in tables.iter().enumerate() {
+ let header = table.header.iter().map(|v| v.1.clone()).collect::<Vec<_>>();
+ res.entry(header).or_insert_with(Vec::new).push(i);
+ }
+ res
+}
+
+// Builds a datastructure that allows for efficient sublinear lookups. The
+// returned HashMap contains a mapping from table header (like [a.b.c]) to list
+// of tables whose name at least starts with the specified name. So searching
+// for [a.b] would give both [a.b.c.d] as well as [a.b.e]. The tables are being
+// identified by their index in the passed slice.
+//
+// A list is used for two reasons: First, the implementation also stores arrays
+// in the same data structure and any top level array of size 2 or greater
+// creates multiple entries in the list with the same shared name. Second, there
+// can be multiple tables sharing the same prefix.
+//
+// The lookup is performed in the `MapAccess` implementation of `MapVisitor`.
+// The lists are ordered, which we exploit in the search code by using
+// bisection.
+fn build_table_pindices<'de>(tables: &[Table<'de>]) -> HashMap<Vec<Cow<'de, str>>, Vec<usize>> {
+ let mut res = HashMap::new();
+ for (i, table) in tables.iter().enumerate() {
+ let header = table.header.iter().map(|v| v.1.clone()).collect::<Vec<_>>();
+ for len in 0..=header.len() {
+ res.entry(header[..len].to_owned())
+ .or_insert_with(Vec::new)
+ .push(i);
+ }
+ }
+ res
+}
+
+fn headers_equal(hdr_a: &[(Span, Cow<str>)], hdr_b: &[(Span, Cow<str>)]) -> bool {
+ if hdr_a.len() != hdr_b.len() {
+ return false;
+ }
+ hdr_a.iter().zip(hdr_b.iter()).all(|(h1, h2)| h1.1 == h2.1)
+}
+
+struct Table<'a> {
+ at: usize,
+ header: Vec<(Span, Cow<'a, str>)>,
+ values: Option<Vec<TablePair<'a>>>,
+ array: bool,
+}
+
+struct MapVisitor<'de, 'b> {
+ values: iter::Peekable<vec::IntoIter<TablePair<'de>>>,
+ next_value: Option<TablePair<'de>>,
+ depth: usize,
+ cur: usize,
+ cur_parent: usize,
+ max: usize,
+ table_indices: &'b HashMap<Vec<Cow<'de, str>>, Vec<usize>>,
+ table_pindices: &'b HashMap<Vec<Cow<'de, str>>, Vec<usize>>,
+ tables: &'b mut [Table<'de>],
+ array: bool,
+ de: &'b mut Deserializer<'de>,
+ keys: HashSet<Cow<'de, str>>,
+}
+
+impl<'de, 'b> de::MapAccess<'de> for MapVisitor<'de, 'b> {
+ type Error = Box<Error>;
+
+ fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Box<Error>>
+ where
+ K: de::DeserializeSeed<'de>,
+ {
+ if self.cur_parent == self.max || self.cur == self.max {
+ return Ok(None);
+ }
+
+ loop {
+ assert!(self.next_value.is_none());
+ if let Some(((span, key), value)) = self.values.next() {
+ if !self.keys.insert(key.clone()) {
+ return Err(Error::from_kind(
+ Some(span.start),
+ ErrorKind::DuplicateKey(key.into_owned()),
+ ));
+ }
+ let ret = seed.deserialize(StrDeserializer::new(key.clone()))?;
+ self.next_value = Some(((span, key), value));
+ return Ok(Some(ret));
+ }
+
+ let next_table = {
+ let prefix_stripped = self.tables[self.cur_parent].header[..self.depth]
+ .iter()
+ .map(|v| v.1.clone())
+ .collect::<Vec<_>>();
+ self.table_pindices
+ .get(&prefix_stripped)
+ .and_then(|entries| {
+ let start = entries.binary_search(&self.cur).unwrap_or_else(|v| v);
+ if start == entries.len() || entries[start] < self.cur {
+ return None;
+ }
+ entries[start..]
+ .iter()
+ .filter_map(|i| if *i < self.max { Some(*i) } else { None })
+ .map(|i| (i, &self.tables[i]))
+ .find(|(_, table)| table.values.is_some())
+ .map(|p| p.0)
+ })
+ };
+
+ let pos = match next_table {
+ Some(pos) => pos,
+ None => return Ok(None),
+ };
+ self.cur = pos;
+
+ // Test to see if we're duplicating our parent's table, and if so
+ // then this is an error in the toml format
+ if self.cur_parent != pos {
+ if headers_equal(
+ &self.tables[self.cur_parent].header,
+ &self.tables[pos].header,
+ ) {
+ let at = self.tables[pos].at;
+ let name = self.tables[pos]
+ .header
+ .iter()
+ .map(|k| k.1.clone())
+ .collect::<Vec<_>>()
+ .join(".");
+ return Err(self.de.error(at, ErrorKind::DuplicateTable(name)));
+ }
+
+ // If we're here we know we should share the same prefix, and if
+ // the longer table was defined first then we want to narrow
+ // down our parent's length if possible to ensure that we catch
+ // duplicate tables defined afterwards.
+ let parent_len = self.tables[self.cur_parent].header.len();
+ let cur_len = self.tables[pos].header.len();
+ if cur_len < parent_len {
+ self.cur_parent = pos;
+ }
+ }
+
+ let table = &mut self.tables[pos];
+
+ // If we're not yet at the appropriate depth for this table then we
+ // just next the next portion of its header and then continue
+ // decoding.
+ if self.depth != table.header.len() {
+ let (span, key) = &table.header[self.depth];
+ if !self.keys.insert(key.clone()) {
+ return Err(Error::from_kind(
+ Some(span.start),
+ ErrorKind::DuplicateKey(key.clone().into_owned()),
+ ));
+ }
+ let key = seed.deserialize(StrDeserializer::new(key.clone()))?;
+ return Ok(Some(key));
+ }
+
+ // Rule out cases like:
+ //
+ // [[foo.bar]]
+ // [[foo]]
+ if table.array {
+ let kind = ErrorKind::RedefineAsArray;
+ return Err(self.de.error(table.at, kind));
+ }
+
+ self.values = table
+ .values
+ .take()
+ .expect("Unable to read table values")
+ .into_iter()
+ .peekable();
+ }
+ }
+
+ fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Box<Error>>
+ where
+ V: de::DeserializeSeed<'de>,
+ {
+ if let Some((k, v)) = self.next_value.take() {
+ match seed.deserialize(ValueDeserializer::new(v)) {
+ Ok(v) => return Ok(v),
+ Err(mut e) => {
+ e.add_key_context(&k.1);
+ return Err(e);
+ }
+ }
+ }
+
+ let array =
+ self.tables[self.cur].array && self.depth == self.tables[self.cur].header.len() - 1;
+ self.cur += 1;
+ let res = seed.deserialize(MapVisitor {
+ values: Vec::new().into_iter().peekable(),
+ next_value: None,
+ depth: self.depth + if array { 0 } else { 1 },
+ cur_parent: self.cur - 1,
+ cur: 0,
+ max: self.max,
+ array,
+ table_indices: self.table_indices,
+ table_pindices: self.table_pindices,
+ tables: &mut *self.tables,
+ de: &mut *self.de,
+ keys: HashSet::new(),
+ });
+ res.map_err(|mut e| {
+ e.add_key_context(&self.tables[self.cur - 1].header[self.depth].1);
+ e
+ })
+ }
+}
+
+impl<'de, 'b> de::SeqAccess<'de> for MapVisitor<'de, 'b> {
+ type Error = Box<Error>;
+
+ fn next_element_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Box<Error>>
+ where
+ K: de::DeserializeSeed<'de>,
+ {
+ assert!(self.next_value.is_none());
+ assert!(self.values.next().is_none());
+
+ if self.cur_parent == self.max {
+ return Ok(None);
+ }
+
+ let header_stripped = self.tables[self.cur_parent]
+ .header
+ .iter()
+ .map(|v| v.1.clone())
+ .collect::<Vec<_>>();
+ let start_idx = self.cur_parent + 1;
+ let next = self
+ .table_indices
+ .get(&header_stripped)
+ .and_then(|entries| {
+ let start = entries.binary_search(&start_idx).unwrap_or_else(|v| v);
+ if start == entries.len() || entries[start] < start_idx {
+ return None;
+ }
+ entries[start..]
+ .iter()
+ .filter_map(|i| if *i < self.max { Some(*i) } else { None })
+ .map(|i| (i, &self.tables[i]))
+ .find(|(_, table)| table.array)
+ .map(|p| p.0)
+ })
+ .unwrap_or(self.max);
+
+ let ret = seed.deserialize(MapVisitor {
+ values: self.tables[self.cur_parent]
+ .values
+ .take()
+ .expect("Unable to read table values")
+ .into_iter()
+ .peekable(),
+ next_value: None,
+ depth: self.depth + 1,
+ cur_parent: self.cur_parent,
+ max: next,
+ cur: 0,
+ array: false,
+ table_indices: self.table_indices,
+ table_pindices: self.table_pindices,
+ tables: self.tables,
+ de: self.de,
+ keys: HashSet::new(),
+ })?;
+ self.cur_parent = next;
+ Ok(Some(ret))
+ }
+}
+
+impl<'de, 'b> de::Deserializer<'de> for MapVisitor<'de, 'b> {
+ type Error = Box<Error>;
+
+ fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Box<Error>>
+ where
+ V: de::Visitor<'de>,
+ {
+ if self.array {
+ visitor.visit_seq(self)
+ } else {
+ visitor.visit_map(self)
+ }
+ }
+
+ // `None` is interpreted as a missing field so be sure to implement `Some`
+ // as a present field.
+ fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Box<Error>>
+ where
+ V: de::Visitor<'de>,
+ {
+ visitor.visit_some(self)
+ }
+
+ fn deserialize_newtype_struct<V>(
+ self,
+ _name: &'static str,
+ visitor: V,
+ ) -> Result<V::Value, Box<Error>>
+ where
+ V: de::Visitor<'de>,
+ {
+ visitor.visit_newtype_struct(self)
+ }
+
+ serde::forward_to_deserialize_any! {
+ bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq
+ bytes byte_buf map unit identifier
+ ignored_any unit_struct tuple_struct tuple struct enum
+ }
+}
+
+struct StrDeserializer<'a> {
+ key: Cow<'a, str>,
+}
+
+impl<'a> StrDeserializer<'a> {
+ fn new(key: Cow<'a, str>) -> StrDeserializer<'a> {
+ StrDeserializer { key }
+ }
+}
+
+impl<'a> de::IntoDeserializer<'a, Box<Error>> for StrDeserializer<'a> {
+ type Deserializer = Self;
+
+ fn into_deserializer(self) -> Self::Deserializer {
+ self
+ }
+}
+
+impl<'de> de::Deserializer<'de> for StrDeserializer<'de> {
+ type Error = Box<Error>;
+
+ fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Box<Error>>
+ where
+ V: de::Visitor<'de>,
+ {
+ match self.key {
+ Cow::Borrowed(s) => visitor.visit_borrowed_str(s),
+ Cow::Owned(s) => visitor.visit_string(s),
+ }
+ }
+
+ serde::forward_to_deserialize_any! {
+ bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq
+ bytes byte_buf map option unit newtype_struct
+ ignored_any unit_struct tuple_struct tuple enum identifier struct
+ }
+}
+
+struct ValueDeserializer<'a> {
+ value: Value<'a>,
+ validate_struct_keys: bool,
+}
+
+impl<'a> ValueDeserializer<'a> {
+ fn new(value: Value<'a>) -> ValueDeserializer<'a> {
+ ValueDeserializer {
+ value,
+ validate_struct_keys: false,
+ }
+ }
+
+ fn with_struct_key_validation(mut self) -> Self {
+ self.validate_struct_keys = true;
+ self
+ }
+}
+
+impl<'de> de::Deserializer<'de> for ValueDeserializer<'de> {
+ type Error = Box<Error>;
+
+ fn deserialize_any<V>(self, visitor: V) -> Result<V::Value, Box<Error>>
+ where
+ V: de::Visitor<'de>,
+ {
+ let start = self.value.start;
+ let res = match self.value.e {
+ E::Integer(i) => visitor.visit_i64(i),
+ E::Boolean(b) => visitor.visit_bool(b),
+ E::Float(f) => visitor.visit_f64(f),
+ E::String(Cow::Borrowed(s)) => visitor.visit_borrowed_str(s),
+ E::String(Cow::Owned(s)) => visitor.visit_string(s),
+ E::Array(values) => {
+ let mut s = de::value::SeqDeserializer::new(values.into_iter());
+ let ret = visitor.visit_seq(&mut s)?;
+ s.end()?;
+ Ok(ret)
+ }
+ E::InlineTable(values) | E::DottedTable(values) => {
+ visitor.visit_map(InlineTableDeserializer {
+ values: values.into_iter(),
+ next_value: None,
+ keys: HashSet::new(),
+ })
+ }
+ };
+ res.map_err(|mut err| {
+ // Attribute the error to whatever value returned the error.
+ err.fix_offset(|| Some(start));
+ err
+ })
+ }
+
+ fn deserialize_struct<V>(
+ self,
+ _name: &'static str,
+ fields: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value, Box<Error>>
+ where
+ V: de::Visitor<'de>,
+ {
+ if self.validate_struct_keys {
+ match self.value.e {
+ E::InlineTable(ref values) | E::DottedTable(ref values) => {
+ let extra_fields = values
+ .iter()
+ .filter_map(|key_value| {
+ let (ref key, ref _val) = *key_value;
+ if fields.contains(&&*(key.1)) {
+ None
+ } else {
+ Some(key.clone())
+ }
+ })
+ .collect::<Vec<_>>();
+
+ if !extra_fields.is_empty() {
+ return Err(Error::from_kind(
+ Some(self.value.start),
+ ErrorKind::UnexpectedKeys {
+ keys: extra_fields
+ .iter()
+ .map(|k| k.1.to_string())
+ .collect::<Vec<_>>(),
+ available: fields,
+ },
+ ));
+ }
+ }
+ _ => {}
+ }
+ }
+
+ self.deserialize_any(visitor)
+ }
+
+ // `None` is interpreted as a missing field so be sure to implement `Some`
+ // as a present field.
+ fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Box<Error>>
+ where
+ V: de::Visitor<'de>,
+ {
+ visitor.visit_some(self)
+ }
+
+ fn deserialize_enum<V>(
+ self,
+ _name: &'static str,
+ _variants: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value, Box<Error>>
+ where
+ V: de::Visitor<'de>,
+ {
+ match self.value.e {
+ E::String(val) => visitor.visit_enum(val.into_deserializer()),
+ e => Err(Error::from_kind(
+ Some(self.value.start),
+ ErrorKind::Wanted {
+ expected: "string",
+ found: e.type_name(),
+ },
+ )),
+ }
+ }
+
+ fn deserialize_newtype_struct<V>(
+ self,
+ _name: &'static str,
+ visitor: V,
+ ) -> Result<V::Value, Box<Error>>
+ where
+ V: de::Visitor<'de>,
+ {
+ visitor.visit_newtype_struct(self)
+ }
+
+ serde::forward_to_deserialize_any! {
+ bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string seq
+ bytes byte_buf map unit identifier
+ ignored_any unit_struct tuple_struct tuple
+ }
+}
+
+impl<'de, 'b> de::IntoDeserializer<'de, Box<Error>> for MapVisitor<'de, 'b> {
+ type Deserializer = MapVisitor<'de, 'b>;
+
+ fn into_deserializer(self) -> Self::Deserializer {
+ self
+ }
+}
+
+impl<'de, 'b> de::IntoDeserializer<'de, Box<Error>> for &'b mut Deserializer<'de> {
+ type Deserializer = Self;
+
+ fn into_deserializer(self) -> Self::Deserializer {
+ self
+ }
+}
+
+impl<'de> de::IntoDeserializer<'de, Box<Error>> for Value<'de> {
+ type Deserializer = ValueDeserializer<'de>;
+
+ fn into_deserializer(self) -> Self::Deserializer {
+ ValueDeserializer::new(self)
+ }
+}
+
+struct DottedTableDeserializer<'a> {
+ name: Cow<'a, str>,
+ value: Value<'a>,
+}
+
+impl<'de> de::EnumAccess<'de> for DottedTableDeserializer<'de> {
+ type Error = Box<Error>;
+ type Variant = TableEnumDeserializer<'de>;
+
+ fn variant_seed<V>(self, seed: V) -> Result<(V::Value, Self::Variant), Self::Error>
+ where
+ V: de::DeserializeSeed<'de>,
+ {
+ let (name, value) = (self.name, self.value);
+ seed.deserialize(StrDeserializer::new(name))
+ .map(|val| (val, TableEnumDeserializer { value }))
+ }
+}
+
+struct InlineTableDeserializer<'de> {
+ values: vec::IntoIter<TablePair<'de>>,
+ next_value: Option<Value<'de>>,
+ keys: HashSet<Cow<'de, str>>,
+}
+
+impl<'de> de::MapAccess<'de> for InlineTableDeserializer<'de> {
+ type Error = Box<Error>;
+
+ fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Box<Error>>
+ where
+ K: de::DeserializeSeed<'de>,
+ {
+ let ((span, key), value) = match self.values.next() {
+ Some(pair) => pair,
+ None => return Ok(None),
+ };
+ self.next_value = Some(value);
+ if !self.keys.insert(key.clone()) {
+ return Err(Error::from_kind(
+ Some(span.start),
+ ErrorKind::DuplicateKey(key.into_owned()),
+ ));
+ }
+ seed.deserialize(StrDeserializer::new(key)).map(Some)
+ }
+
+ fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Box<Error>>
+ where
+ V: de::DeserializeSeed<'de>,
+ {
+ let value = self.next_value.take().expect("Unable to read table values");
+ seed.deserialize(ValueDeserializer::new(value))
+ }
+}
+
+impl<'de> de::EnumAccess<'de> for InlineTableDeserializer<'de> {
+ type Error = Box<Error>;
+ type Variant = TableEnumDeserializer<'de>;
+
+ fn variant_seed<V>(mut self, seed: V) -> Result<(V::Value, Self::Variant), Self::Error>
+ where
+ V: de::DeserializeSeed<'de>,
+ {
+ let (key, value) = match self.values.next() {
+ Some(pair) => pair,
+ None => {
+ return Err(Error::from_kind(
+ None, // FIXME: How do we get an offset here?
+ ErrorKind::Wanted {
+ expected: "table with exactly 1 entry",
+ found: "empty table",
+ },
+ ));
+ }
+ };
+
+ seed.deserialize(StrDeserializer::new(key.1))
+ .map(|val| (val, TableEnumDeserializer { value }))
+ }
+}
+
+/// Deserializes table values into enum variants.
+struct TableEnumDeserializer<'a> {
+ value: Value<'a>,
+}
+
+impl<'de> de::VariantAccess<'de> for TableEnumDeserializer<'de> {
+ type Error = Box<Error>;
+
+ fn unit_variant(self) -> Result<(), Self::Error> {
+ match self.value.e {
+ E::InlineTable(values) | E::DottedTable(values) => {
+ if values.is_empty() {
+ Ok(())
+ } else {
+ Err(Error::from_kind(
+ Some(self.value.start),
+ ErrorKind::ExpectedEmptyTable,
+ ))
+ }
+ }
+ e => Err(Error::from_kind(
+ Some(self.value.start),
+ ErrorKind::Wanted {
+ expected: "table",
+ found: e.type_name(),
+ },
+ )),
+ }
+ }
+
+ fn newtype_variant_seed<T>(self, seed: T) -> Result<T::Value, Self::Error>
+ where
+ T: de::DeserializeSeed<'de>,
+ {
+ seed.deserialize(ValueDeserializer::new(self.value))
+ }
+
+ fn tuple_variant<V>(self, len: usize, visitor: V) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ match self.value.e {
+ E::InlineTable(values) | E::DottedTable(values) => {
+ let tuple_values = values
+ .into_iter()
+ .enumerate()
+ .map(|(index, (key, value))| match key.1.parse::<usize>() {
+ Ok(key_index) if key_index == index => Ok(value),
+ Ok(_) | Err(_) => Err(Error::from_kind(
+ Some(key.0.start),
+ ErrorKind::ExpectedTupleIndex {
+ expected: index,
+ found: key.1.to_string(),
+ },
+ )),
+ })
+ // Fold all values into a `Vec`, or return the first error.
+ .fold(Ok(Vec::with_capacity(len)), |result, value_result| {
+ result.and_then(move |mut tuple_values| match value_result {
+ Ok(value) => {
+ tuple_values.push(value);
+ Ok(tuple_values)
+ }
+ // `Result<de::Value, Self::Error>` to `Result<Vec<_>, Self::Error>`
+ Err(e) => Err(e),
+ })
+ })?;
+
+ if tuple_values.len() == len {
+ de::Deserializer::deserialize_seq(
+ ValueDeserializer::new(Value {
+ e: E::Array(tuple_values),
+ start: self.value.start,
+ end: self.value.end,
+ }),
+ visitor,
+ )
+ } else {
+ Err(Error::from_kind(
+ Some(self.value.start),
+ ErrorKind::ExpectedTuple(len),
+ ))
+ }
+ }
+ e => Err(Error::from_kind(
+ Some(self.value.start),
+ ErrorKind::Wanted {
+ expected: "table",
+ found: e.type_name(),
+ },
+ )),
+ }
+ }
+
+ fn struct_variant<V>(
+ self,
+ fields: &'static [&'static str],
+ visitor: V,
+ ) -> Result<V::Value, Self::Error>
+ where
+ V: de::Visitor<'de>,
+ {
+ de::Deserializer::deserialize_struct(
+ ValueDeserializer::new(self.value).with_struct_key_validation(),
+ "", // TODO: this should be the variant name
+ fields,
+ visitor,
+ )
+ }
+}
+
+impl<'a> Deserializer<'a> {
+ fn new(input: &'a str) -> Deserializer<'a> {
+ Deserializer {
+ tokens: Tokenizer::new(input),
+ input,
+ }
+ }
+
+ fn tables(&mut self) -> Result<Vec<Table<'a>>, Box<Error>> {
+ let mut tables = Vec::new();
+ let mut cur_table = Table {
+ at: 0,
+ header: Vec::new(),
+ values: None,
+ array: false,
+ };
+
+ while let Some(line) = self.line()? {
+ match line {
+ Line::Table {
+ at,
+ mut header,
+ array,
+ } => {
+ if !cur_table.header.is_empty() || cur_table.values.is_some() {
+ tables.push(cur_table);
+ }
+ cur_table = Table {
+ at,
+ header: Vec::new(),
+ values: Some(Vec::new()),
+ array,
+ };
+ loop {
+ let part = header.next().map_err(|e| self.token_error(e));
+ match part? {
+ Some(part) => cur_table.header.push(part),
+ None => break,
+ }
+ }
+ }
+ Line::KeyValue(key, value) => {
+ if cur_table.values.is_none() {
+ cur_table.values = Some(Vec::new());
+ }
+ self.add_dotted_key(key, value, cur_table.values.as_mut().unwrap())?;
+ }
+ }
+ }
+ if !cur_table.header.is_empty() || cur_table.values.is_some() {
+ tables.push(cur_table);
+ }
+ Ok(tables)
+ }
+
+ fn line(&mut self) -> Result<Option<Line<'a>>, Box<Error>> {
+ loop {
+ self.eat_whitespace();
+ if self.eat_comment()? {
+ continue;
+ }
+ if self.eat(Token::Newline)? {
+ continue;
+ }
+ break;
+ }
+
+ match self.peek()? {
+ Some((_, Token::LeftBracket)) => self.table_header().map(Some),
+ Some(_) => self.key_value().map(Some),
+ None => Ok(None),
+ }
+ }
+
+ fn table_header(&mut self) -> Result<Line<'a>, Box<Error>> {
+ let start = self.tokens.current();
+ self.expect(Token::LeftBracket)?;
+ let array = self.eat(Token::LeftBracket)?;
+ let ret = Header::new(self.tokens.clone(), array);
+ self.tokens.skip_to_newline();
+ Ok(Line::Table {
+ at: start,
+ header: ret,
+ array,
+ })
+ }
+
+ fn key_value(&mut self) -> Result<Line<'a>, Box<Error>> {
+ let key = self.dotted_key()?;
+ self.eat_whitespace();
+ self.expect(Token::Equals)?;
+ self.eat_whitespace();
+
+ let value = self.value()?;
+ self.eat_whitespace();
+ if !self.eat_comment()? {
+ self.eat_newline_or_eof()?;
+ }
+
+ Ok(Line::KeyValue(key, value))
+ }
+
+ fn value(&mut self) -> Result<Value<'a>, Box<Error>> {
+ let at = self.tokens.current();
+ let value = match self.next()? {
+ Some((Span { start, end }, Token::String { val, .. })) => Value {
+ e: E::String(val),
+ start,
+ end,
+ },
+ Some((Span { start, end }, Token::Keylike("true"))) => Value {
+ e: E::Boolean(true),
+ start,
+ end,
+ },
+ Some((Span { start, end }, Token::Keylike("false"))) => Value {
+ e: E::Boolean(false),
+ start,
+ end,
+ },
+ Some((span, Token::Keylike(key))) => self.parse_keylike(at, span, key)?,
+ Some((span, Token::Plus)) => self.number_leading_plus(span)?,
+ Some((Span { start, .. }, Token::LeftBrace)) => {
+ self.inline_table().map(|(Span { end, .. }, table)| Value {
+ e: E::InlineTable(table),
+ start,
+ end,
+ })?
+ }
+ Some((Span { start, .. }, Token::LeftBracket)) => {
+ self.array().map(|(Span { end, .. }, array)| Value {
+ e: E::Array(array),
+ start,
+ end,
+ })?
+ }
+ Some(token) => {
+ return Err(self.error(
+ at,
+ ErrorKind::Wanted {
+ expected: "a value",
+ found: token.1.describe(),
+ },
+ ));
+ }
+ None => return Err(self.eof()),
+ };
+ Ok(value)
+ }
+
+ fn parse_keylike(
+ &mut self,
+ at: usize,
+ span: Span,
+ key: &'a str,
+ ) -> Result<Value<'a>, Box<Error>> {
+ if key == "inf" || key == "nan" {
+ return self.number(span, key);
+ }
+
+ let first_char = key.chars().next().expect("key should not be empty here");
+ match first_char {
+ '-' | '0'..='9' => self.number(span, key),
+ _ => Err(self.error(at, ErrorKind::UnquotedString)),
+ }
+ }
+
+ fn number(&mut self, Span { start, end }: Span, s: &'a str) -> Result<Value<'a>, Box<Error>> {
+ let to_integer = |f| Value {
+ e: E::Integer(f),
+ start,
+ end,
+ };
+ if let Some(s) = s.strip_prefix("0x") {
+ self.integer(s, 16).map(to_integer)
+ } else if let Some(s) = s.strip_prefix("0o") {
+ self.integer(s, 8).map(to_integer)
+ } else if let Some(s) = s.strip_prefix("0b") {
+ self.integer(s, 2).map(to_integer)
+ } else if s.contains('e') || s.contains('E') {
+ self.float(s, None).map(|f| Value {
+ e: E::Float(f),
+ start,
+ end,
+ })
+ } else if self.eat(Token::Period)? {
+ let at = self.tokens.current();
+ match self.next()? {
+ Some((Span { start, end }, Token::Keylike(after))) => {
+ self.float(s, Some(after)).map(|f| Value {
+ e: E::Float(f),
+ start,
+ end,
+ })
+ }
+ _ => Err(self.error(at, ErrorKind::NumberInvalid)),
+ }
+ } else if s == "inf" {
+ Ok(Value {
+ e: E::Float(f64::INFINITY),
+ start,
+ end,
+ })
+ } else if s == "-inf" {
+ Ok(Value {
+ e: E::Float(f64::NEG_INFINITY),
+ start,
+ end,
+ })
+ } else if s == "nan" {
+ Ok(Value {
+ e: E::Float(f64::NAN),
+ start,
+ end,
+ })
+ } else if s == "-nan" {
+ Ok(Value {
+ e: E::Float(-f64::NAN),
+ start,
+ end,
+ })
+ } else {
+ self.integer(s, 10).map(to_integer)
+ }
+ }
+
+ fn number_leading_plus(&mut self, Span { start, .. }: Span) -> Result<Value<'a>, Box<Error>> {
+ let start_token = self.tokens.current();
+ match self.next()? {
+ Some((Span { end, .. }, Token::Keylike(s))) => self.number(Span { start, end }, s),
+ _ => Err(self.error(start_token, ErrorKind::NumberInvalid)),
+ }
+ }
+
+ fn integer(&self, s: &'a str, radix: u32) -> Result<i64, Box<Error>> {
+ let allow_sign = radix == 10;
+ let allow_leading_zeros = radix != 10;
+ let (prefix, suffix) = self.parse_integer(s, allow_sign, allow_leading_zeros, radix)?;
+ let start = self.tokens.substr_offset(s);
+ if !suffix.is_empty() {
+ return Err(self.error(start, ErrorKind::NumberInvalid));
+ }
+ i64::from_str_radix(prefix.replace('_', "").trim_start_matches('+'), radix)
+ .map_err(|_e| self.error(start, ErrorKind::NumberInvalid))
+ }
+
+ fn parse_integer(
+ &self,
+ s: &'a str,
+ allow_sign: bool,
+ allow_leading_zeros: bool,
+ radix: u32,
+ ) -> Result<(&'a str, &'a str), Box<Error>> {
+ let start = self.tokens.substr_offset(s);
+
+ let mut first = true;
+ let mut first_zero = false;
+ let mut underscore = false;
+ let mut end = s.len();
+ for (i, c) in s.char_indices() {
+ let at = i + start;
+ if i == 0 && (c == '+' || c == '-') && allow_sign {
+ continue;
+ }
+
+ if c == '0' && first {
+ first_zero = true;
+ } else if c.is_digit(radix) {
+ if !first && first_zero && !allow_leading_zeros {
+ return Err(self.error(at, ErrorKind::NumberInvalid));
+ }
+ underscore = false;
+ } else if c == '_' && first {
+ return Err(self.error(at, ErrorKind::NumberInvalid));
+ } else if c == '_' && !underscore {
+ underscore = true;
+ } else {
+ end = i;
+ break;
+ }
+ first = false;
+ }
+ if first || underscore {
+ return Err(self.error(start, ErrorKind::NumberInvalid));
+ }
+ Ok((&s[..end], &s[end..]))
+ }
+
+ fn float(&mut self, s: &'a str, after_decimal: Option<&'a str>) -> Result<f64, Box<Error>> {
+ let (integral, mut suffix) = self.parse_integer(s, true, false, 10)?;
+ let start = self.tokens.substr_offset(integral);
+
+ let mut fraction = None;
+ if let Some(after) = after_decimal {
+ if !suffix.is_empty() {
+ return Err(self.error(start, ErrorKind::NumberInvalid));
+ }
+ let (a, b) = self.parse_integer(after, false, true, 10)?;
+ fraction = Some(a);
+ suffix = b;
+ }
+
+ let mut exponent = None;
+ if suffix.starts_with('e') || suffix.starts_with('E') {
+ let (a, b) = if suffix.len() == 1 {
+ self.eat(Token::Plus)?;
+ match self.next()? {
+ Some((_, Token::Keylike(s))) => self.parse_integer(s, false, true, 10)?,
+ _ => return Err(self.error(start, ErrorKind::NumberInvalid)),
+ }
+ } else {
+ self.parse_integer(&suffix[1..], true, true, 10)?
+ };
+ if !b.is_empty() {
+ return Err(self.error(start, ErrorKind::NumberInvalid));
+ }
+ exponent = Some(a);
+ } else if !suffix.is_empty() {
+ return Err(self.error(start, ErrorKind::NumberInvalid));
+ }
+
+ let mut number = integral
+ .trim_start_matches('+')
+ .chars()
+ .filter(|c| *c != '_')
+ .collect::<String>();
+ if let Some(fraction) = fraction {
+ number.push('.');
+ number.extend(fraction.chars().filter(|c| *c != '_'));
+ }
+ if let Some(exponent) = exponent {
+ number.push('E');
+ number.extend(exponent.chars().filter(|c| *c != '_'));
+ }
+ number
+ .parse()
+ .map_err(|_e| self.error(start, ErrorKind::NumberInvalid))
+ .and_then(|n: f64| {
+ if n.is_finite() {
+ Ok(n)
+ } else {
+ Err(self.error(start, ErrorKind::NumberInvalid))
+ }
+ })
+ }
+
+ // TODO(#140): shouldn't buffer up this entire table in memory, it'd be
+ // great to defer parsing everything until later.
+ fn inline_table(&mut self) -> Result<(Span, Vec<TablePair<'a>>), Box<Error>> {
+ let mut ret = Vec::new();
+ self.eat_whitespace();
+ if let Some(span) = self.eat_spanned(Token::RightBrace)? {
+ return Ok((span, ret));
+ }
+ loop {
+ let key = self.dotted_key()?;
+ self.eat_whitespace();
+ self.expect(Token::Equals)?;
+ self.eat_whitespace();
+ let value = self.value()?;
+ self.add_dotted_key(key, value, &mut ret)?;
+
+ self.eat_whitespace();
+ if let Some(span) = self.eat_spanned(Token::RightBrace)? {
+ return Ok((span, ret));
+ }
+ self.expect(Token::Comma)?;
+ self.eat_whitespace();
+ }
+ }
+
+ // TODO(#140): shouldn't buffer up this entire array in memory, it'd be
+ // great to defer parsing everything until later.
+ fn array(&mut self) -> Result<(Span, Vec<Value<'a>>), Box<Error>> {
+ let mut ret = Vec::new();
+
+ let intermediate = |me: &mut Deserializer| -> Result<(), Box<Error>> {
+ loop {
+ me.eat_whitespace();
+ if !me.eat(Token::Newline)? && !me.eat_comment()? {
+ break;
+ }
+ }
+ Ok(())
+ };
+
+ loop {
+ intermediate(self)?;
+ if let Some(span) = self.eat_spanned(Token::RightBracket)? {
+ return Ok((span, ret));
+ }
+ let value = self.value()?;
+ ret.push(value);
+ intermediate(self)?;
+ if !self.eat(Token::Comma)? {
+ break;
+ }
+ }
+ intermediate(self)?;
+ let span = self.expect_spanned(Token::RightBracket)?;
+ Ok((span, ret))
+ }
+
+ fn table_key(&mut self) -> Result<(Span, Cow<'a, str>), Box<Error>> {
+ self.tokens.table_key().map_err(|e| self.token_error(e))
+ }
+
+ fn dotted_key(&mut self) -> Result<Vec<(Span, Cow<'a, str>)>, Box<Error>> {
+ let mut result = Vec::new();
+ result.push(self.table_key()?);
+ self.eat_whitespace();
+ while self.eat(Token::Period)? {
+ self.eat_whitespace();
+ result.push(self.table_key()?);
+ self.eat_whitespace();
+ }
+ Ok(result)
+ }
+
+ /// Stores a value in the appropriate hierarchical structure positioned based on the dotted key.
+ ///
+ /// Given the following definition: `multi.part.key = "value"`, `multi` and `part` are
+ /// intermediate parts which are mapped to the relevant fields in the deserialized type's data
+ /// hierarchy.
+ ///
+ /// # Parameters
+ ///
+ /// * `key_parts`: Each segment of the dotted key, e.g. `part.one` maps to
+ /// `vec![Cow::Borrowed("part"), Cow::Borrowed("one")].`
+ /// * `value`: The parsed value.
+ /// * `values`: The `Vec` to store the value in.
+ fn add_dotted_key(
+ &self,
+ mut key_parts: Vec<(Span, Cow<'a, str>)>,
+ value: Value<'a>,
+ values: &mut Vec<TablePair<'a>>,
+ ) -> Result<(), Box<Error>> {
+ let key = key_parts.remove(0);
+ if key_parts.is_empty() {
+ values.push((key, value));
+ return Ok(());
+ }
+ match values.iter_mut().find(|&&mut (ref k, _)| *k.1 == key.1) {
+ Some(&mut (
+ _,
+ Value {
+ e: E::DottedTable(ref mut v),
+ ..
+ },
+ )) => {
+ return self.add_dotted_key(key_parts, value, v);
+ }
+ Some(&mut (_, Value { start, .. })) => {
+ return Err(self.error(start, ErrorKind::DottedKeyInvalidType));
+ }
+ None => {}
+ }
+ // The start/end value is somewhat misleading here.
+ let table_values = Value {
+ e: E::DottedTable(Vec::new()),
+ start: value.start,
+ end: value.end,
+ };
+ values.push((key, table_values));
+ let last_i = values.len() - 1;
+ if let (
+ _,
+ Value {
+ e: E::DottedTable(ref mut v),
+ ..
+ },
+ ) = values[last_i]
+ {
+ self.add_dotted_key(key_parts, value, v)?;
+ }
+ Ok(())
+ }
+
+ fn eat_whitespace(&mut self) {
+ self.tokens.eat_whitespace();
+ }
+
+ fn eat_comment(&mut self) -> Result<bool, Box<Error>> {
+ self.tokens.eat_comment().map_err(|e| self.token_error(e))
+ }
+
+ fn eat_newline_or_eof(&mut self) -> Result<(), Box<Error>> {
+ self.tokens
+ .eat_newline_or_eof()
+ .map_err(|e| self.token_error(e))
+ }
+
+ fn eat(&mut self, expected: Token<'a>) -> Result<bool, Box<Error>> {
+ self.tokens.eat(expected).map_err(|e| self.token_error(e))
+ }
+
+ fn eat_spanned(&mut self, expected: Token<'a>) -> Result<Option<Span>, Box<Error>> {
+ self.tokens
+ .eat_spanned(expected)
+ .map_err(|e| self.token_error(e))
+ }
+
+ fn expect(&mut self, expected: Token<'a>) -> Result<(), Box<Error>> {
+ self.tokens
+ .expect(expected)
+ .map_err(|e| self.token_error(e))
+ }
+
+ fn expect_spanned(&mut self, expected: Token<'a>) -> Result<Span, Box<Error>> {
+ self.tokens
+ .expect_spanned(expected)
+ .map_err(|e| self.token_error(e))
+ }
+
+ fn next(&mut self) -> Result<Option<(Span, Token<'a>)>, Box<Error>> {
+ self.tokens.next().map_err(|e| self.token_error(e))
+ }
+
+ fn peek(&mut self) -> Result<Option<(Span, Token<'a>)>, Box<Error>> {
+ self.tokens.peek().map_err(|e| self.token_error(e))
+ }
+
+ fn eof(&self) -> Box<Error> {
+ self.error(self.input.len(), ErrorKind::UnexpectedEof)
+ }
+
+ fn token_error(&self, error: TokenError) -> Box<Error> {
+ match error {
+ TokenError::InvalidCharInString(at, ch) => {
+ self.error(at, ErrorKind::InvalidCharInString(ch))
+ }
+ TokenError::InvalidEscape(at, ch) => self.error(at, ErrorKind::InvalidEscape(ch)),
+ TokenError::InvalidEscapeValue(at, v) => {
+ self.error(at, ErrorKind::InvalidEscapeValue(v))
+ }
+ TokenError::InvalidHexEscape(at, ch) => self.error(at, ErrorKind::InvalidHexEscape(ch)),
+ TokenError::NewlineInString(at) => self.error(at, ErrorKind::NewlineInString),
+ TokenError::Unexpected(at, ch) => self.error(at, ErrorKind::Unexpected(ch)),
+ TokenError::UnterminatedString(at) => self.error(at, ErrorKind::UnterminatedString),
+ TokenError::NewlineInTableKey(at) => self.error(at, ErrorKind::NewlineInTableKey),
+ TokenError::Wanted {
+ at,
+ expected,
+ found,
+ } => self.error(at, ErrorKind::Wanted { expected, found }),
+ TokenError::MultilineStringKey(at) => self.error(at, ErrorKind::MultilineStringKey),
+ }
+ }
+
+ fn error(&self, at: usize, kind: ErrorKind) -> Box<Error> {
+ let mut err = Error::from_kind(Some(at), kind);
+ err.fix_linecol(|at| self.to_linecol(at));
+ err
+ }
+
+ /// Converts a byte offset from an error message to a (line, column) pair
+ ///
+ /// All indexes are 0-based.
+ fn to_linecol(&self, offset: usize) -> (usize, usize) {
+ let mut cur = 0;
+ // Use split_terminator instead of lines so that if there is a `\r`, it
+ // is included in the offset calculation. The `+1` values below account
+ // for the `\n`.
+ for (i, line) in self.input.split_terminator('\n').enumerate() {
+ if cur + line.len() + 1 > offset {
+ return (i, offset - cur);
+ }
+ cur += line.len() + 1;
+ }
+ (self.input.lines().count(), 0)
+ }
+}
+
+impl Error {
+ pub(crate) fn line_col(&self) -> Option<(usize, usize)> {
+ self.line.map(|line| (line, self.col))
+ }
+
+ fn from_kind(at: Option<usize>, kind: ErrorKind) -> Box<Self> {
+ Box::new(Error {
+ kind,
+ line: None,
+ col: 0,
+ at,
+ message: String::new(),
+ key: Vec::new(),
+ })
+ }
+
+ fn custom(at: Option<usize>, s: String) -> Box<Self> {
+ Box::new(Error {
+ kind: ErrorKind::Custom,
+ line: None,
+ col: 0,
+ at,
+ message: s,
+ key: Vec::new(),
+ })
+ }
+
+ pub(crate) fn add_key_context(&mut self, key: &str) {
+ self.key.insert(0, key.to_string());
+ }
+
+ fn fix_offset<F>(&mut self, f: F)
+ where
+ F: FnOnce() -> Option<usize>,
+ {
+ // An existing offset is always better positioned than anything we might
+ // want to add later.
+ if self.at.is_none() {
+ self.at = f();
+ }
+ }
+
+ fn fix_linecol<F>(&mut self, f: F)
+ where
+ F: FnOnce(usize) -> (usize, usize),
+ {
+ if let Some(at) = self.at {
+ let (line, col) = f(at);
+ self.line = Some(line);
+ self.col = col;
+ }
+ }
+}
+
+impl std::convert::From<Error> for std::io::Error {
+ fn from(e: Error) -> Self {
+ std::io::Error::new(std::io::ErrorKind::InvalidData, e.to_string())
+ }
+}
+
+impl Display for Error {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match &self.kind {
+ ErrorKind::UnexpectedEof => "unexpected eof encountered".fmt(f)?,
+ ErrorKind::InvalidCharInString(c) => write!(
+ f,
+ "invalid character in string: `{}`",
+ c.escape_default().collect::<String>()
+ )?,
+ ErrorKind::InvalidEscape(c) => write!(
+ f,
+ "invalid escape character in string: `{}`",
+ c.escape_default().collect::<String>()
+ )?,
+ ErrorKind::InvalidHexEscape(c) => write!(
+ f,
+ "invalid hex escape character in string: `{}`",
+ c.escape_default().collect::<String>()
+ )?,
+ ErrorKind::InvalidEscapeValue(c) => write!(f, "invalid escape value: `{}`", c)?,
+ ErrorKind::NewlineInString => "newline in string found".fmt(f)?,
+ ErrorKind::Unexpected(ch) => write!(
+ f,
+ "unexpected character found: `{}`",
+ ch.escape_default().collect::<String>()
+ )?,
+ ErrorKind::UnterminatedString => "unterminated string".fmt(f)?,
+ ErrorKind::NewlineInTableKey => "found newline in table key".fmt(f)?,
+ ErrorKind::Wanted { expected, found } => {
+ write!(f, "expected {}, found {}", expected, found)?;
+ }
+ ErrorKind::NumberInvalid => "invalid number".fmt(f)?,
+ ErrorKind::DuplicateTable(ref s) => {
+ write!(f, "redefinition of table `{}`", s)?;
+ }
+ ErrorKind::DuplicateKey(ref s) => {
+ write!(f, "duplicate key: `{}`", s)?;
+ }
+ ErrorKind::RedefineAsArray => "table redefined as array".fmt(f)?,
+ ErrorKind::MultilineStringKey => "multiline strings are not allowed for key".fmt(f)?,
+ ErrorKind::Custom => self.message.fmt(f)?,
+ ErrorKind::ExpectedTuple(l) => write!(f, "expected table with length {}", l)?,
+ ErrorKind::ExpectedTupleIndex {
+ expected,
+ ref found,
+ } => write!(f, "expected table key `{}`, but was `{}`", expected, found)?,
+ ErrorKind::ExpectedEmptyTable => "expected empty table".fmt(f)?,
+ ErrorKind::DottedKeyInvalidType => {
+ "dotted key attempted to extend non-table type".fmt(f)?;
+ }
+ ErrorKind::UnexpectedKeys {
+ ref keys,
+ available,
+ } => write!(
+ f,
+ "unexpected keys in table: `{:?}`, available keys: `{:?}`",
+ keys, available
+ )?,
+ ErrorKind::UnquotedString => write!(
+ f,
+ "invalid TOML value, did you mean to use a quoted string?"
+ )?,
+ }
+
+ if !self.key.is_empty() {
+ write!(f, " for key `")?;
+ for (i, k) in self.key.iter().enumerate() {
+ if i > 0 {
+ write!(f, ".")?;
+ }
+ write!(f, "{}", k)?;
+ }
+ write!(f, "`")?;
+ }
+
+ if let Some(line) = self.line {
+ write!(f, " at line {} column {}", line + 1, self.col + 1)?;
+ }
+
+ Ok(())
+ }
+}
+
+impl error::Error for Error {}
+
+impl de::Error for Box<Error> {
+ fn custom<T: Display>(msg: T) -> Self {
+ Error::custom(None, msg.to_string())
+ }
+}
+
+enum Line<'a> {
+ Table {
+ at: usize,
+ header: Header<'a>,
+ array: bool,
+ },
+ KeyValue(Vec<(Span, Cow<'a, str>)>, Value<'a>),
+}
+
+struct Header<'a> {
+ first: bool,
+ array: bool,
+ tokens: Tokenizer<'a>,
+}
+
+impl<'a> Header<'a> {
+ fn new(tokens: Tokenizer<'a>, array: bool) -> Header<'a> {
+ Header {
+ first: true,
+ array,
+ tokens,
+ }
+ }
+
+ fn next(&mut self) -> Result<Option<(Span, Cow<'a, str>)>, TokenError> {
+ self.tokens.eat_whitespace();
+
+ if self.first || self.tokens.eat(Token::Period)? {
+ self.first = false;
+ self.tokens.eat_whitespace();
+ self.tokens.table_key().map(Some)
+ } else {
+ self.tokens.expect(Token::RightBracket)?;
+ if self.array {
+ self.tokens.expect(Token::RightBracket)?;
+ }
+
+ self.tokens.eat_whitespace();
+ if !self.tokens.eat_comment()? {
+ self.tokens.eat_newline_or_eof()?;
+ }
+ Ok(None)
+ }
+ }
+}
+
+#[derive(Debug)]
+struct Value<'a> {
+ e: E<'a>,
+ start: usize,
+ end: usize,
+}
+
+#[derive(Debug)]
+enum E<'a> {
+ Integer(i64),
+ Float(f64),
+ Boolean(bool),
+ String(Cow<'a, str>),
+ Array(Vec<Value<'a>>),
+ InlineTable(Vec<TablePair<'a>>),
+ DottedTable(Vec<TablePair<'a>>),
+}
+
+impl<'a> E<'a> {
+ fn type_name(&self) -> &'static str {
+ match *self {
+ E::String(..) => "string",
+ E::Integer(..) => "integer",
+ E::Float(..) => "float",
+ E::Boolean(..) => "boolean",
+ E::Array(..) => "array",
+ E::InlineTable(..) => "inline table",
+ E::DottedTable(..) => "dotted table",
+ }
+ }
+}
diff --git a/vendor/basic-toml/src/error.rs b/vendor/basic-toml/src/error.rs
new file mode 100644
index 000000000..9abfd53e8
--- /dev/null
+++ b/vendor/basic-toml/src/error.rs
@@ -0,0 +1,54 @@
+use std::fmt::{self, Debug, Display};
+
+/// Errors that can occur when serializing or deserializing TOML.
+pub struct Error(Box<ErrorInner>);
+
+pub(crate) enum ErrorInner {
+ Ser(crate::ser::Error),
+ De(crate::de::Error),
+}
+
+impl Error {
+ /// Produces a (line, column) pair of the position of the error if
+ /// available.
+ ///
+ /// All indexes are 0-based.
+ pub fn line_col(&self) -> Option<(usize, usize)> {
+ match &*self.0 {
+ ErrorInner::Ser(_) => None,
+ ErrorInner::De(error) => error.line_col(),
+ }
+ }
+}
+
+impl From<crate::ser::Error> for Error {
+ fn from(error: crate::ser::Error) -> Self {
+ Error(Box::new(ErrorInner::Ser(error)))
+ }
+}
+
+impl From<crate::de::Error> for Error {
+ fn from(error: crate::de::Error) -> Self {
+ Error(Box::new(ErrorInner::De(error)))
+ }
+}
+
+impl Display for Error {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &*self.0 {
+ ErrorInner::Ser(error) => Display::fmt(error, formatter),
+ ErrorInner::De(error) => Display::fmt(error, formatter),
+ }
+ }
+}
+
+impl Debug for Error {
+ fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ match &*self.0 {
+ ErrorInner::Ser(error) => Debug::fmt(error, formatter),
+ ErrorInner::De(error) => Debug::fmt(error, formatter),
+ }
+ }
+}
+
+impl std::error::Error for Error {}
diff --git a/vendor/basic-toml/src/lib.rs b/vendor/basic-toml/src/lib.rs
new file mode 100644
index 000000000..d060fd046
--- /dev/null
+++ b/vendor/basic-toml/src/lib.rs
@@ -0,0 +1,141 @@
+//! [![github]](https://github.com/dtolnay/basic-toml)&ensp;[![crates-io]](https://crates.io/crates/basic-toml)&ensp;[![docs-rs]](https://docs.rs/basic-toml)
+//!
+//! [github]: https://img.shields.io/badge/github-8da0cb?style=for-the-badge&labelColor=555555&logo=github
+//! [crates-io]: https://img.shields.io/badge/crates.io-fc8d62?style=for-the-badge&labelColor=555555&logo=rust
+//! [docs-rs]: https://img.shields.io/badge/docs.rs-66c2a5?style=for-the-badge&labelColor=555555&logo=docs.rs
+//!
+//! <br>
+//!
+//! A library for parsing and producing data in [TOML] format using [Serde].
+//!
+//! TOML is designed to be "a config file format for humans": minimal and easy
+//! to read due to obvious semantics.
+//!
+//! ```toml
+//! [package]
+//! name = "basic-toml"
+#![doc = concat!("version = \"", env!("CARGO_PKG_VERSION_MAJOR"), ".", env!("CARGO_PKG_VERSION_MINOR"), ".", env!("CARGO_PKG_VERSION_PATCH"), "\"")]
+//! authors = ["Alex Crichton <alex@alexcrichton.com>"]
+//!
+//! [dependencies]
+//! serde = "1.0"
+//! ```
+//!
+//! The TOML format is widely used throughout the Rust community for
+//! configuration, notably being used by [Cargo], Rust's package manager.
+//!
+//! [TOML]: https://toml.io
+//! [Serde]: https://serde.rs
+//! [Cargo]: https://crates.io
+//!
+//! # Deserialization
+//!
+//! ```
+//! use semver::{Version, VersionReq};
+//! use serde_derive::Deserialize;
+//! use std::collections::BTreeMap as Map;
+//!
+//! #[derive(Deserialize)]
+//! struct Manifest {
+//! package: Package,
+//! #[serde(default)]
+//! dependencies: Map<String, VersionReq>,
+//! }
+//!
+//! #[derive(Deserialize)]
+//! struct Package {
+//! name: String,
+//! version: Version,
+//! #[serde(default)]
+//! authors: Vec<String>,
+//! }
+//!
+//! fn main() {
+//! let manifest: Manifest = basic_toml::from_str(r#"
+//! [package]
+//! name = "basic-toml"
+#![doc = concat!(" version = \"", env!("CARGO_PKG_VERSION_MAJOR"), ".", env!("CARGO_PKG_VERSION_MINOR"), ".", env!("CARGO_PKG_VERSION_PATCH"), "\"")]
+//! authors = ["Alex Crichton <alex@alexcrichton.com>"]
+//!
+//! [dependencies]
+//! serde = "^1.0"
+//! "#).unwrap();
+//!
+//! assert_eq!(manifest.package.name, "basic-toml");
+#![doc = concat!(" assert_eq!(manifest.package.version, Version::new(", env!("CARGO_PKG_VERSION_MAJOR"), ", ", env!("CARGO_PKG_VERSION_MINOR"), ", ", env!("CARGO_PKG_VERSION_PATCH"), "));")]
+//! assert_eq!(manifest.package.authors, ["Alex Crichton <alex@alexcrichton.com>"]);
+//! assert_eq!(manifest.dependencies["serde"].to_string(), "^1.0");
+//! }
+//! ```
+//!
+//! # Serialization
+//!
+//! ```
+//! use semver::{Version, VersionReq};
+//! use serde_derive::Serialize;
+//! use std::collections::BTreeMap as Map;
+//!
+//! #[derive(Serialize)]
+//! struct Manifest {
+//! package: Package,
+//! dependencies: Map<String, VersionReq>,
+//! }
+//!
+//! #[derive(Serialize)]
+//! struct Package {
+//! name: String,
+//! version: Version,
+//! authors: Vec<String>,
+//! }
+//!
+//! fn main() {
+//! let manifest = Manifest {
+//! package: Package {
+//! name: "basic-toml".to_owned(),
+#![doc = concat!(" version: Version::new(", env!("CARGO_PKG_VERSION_MAJOR"), ", ", env!("CARGO_PKG_VERSION_MINOR"), ", ", env!("CARGO_PKG_VERSION_PATCH"), "),")]
+//! authors: vec!["Alex Crichton <alex@alexcrichton.com>".to_owned()],
+//! },
+//! dependencies: {
+//! let mut dependencies = Map::new();
+//! dependencies.insert("serde".to_owned(), "^1.0".parse().unwrap());
+//! dependencies
+//! },
+//! };
+//!
+//! let toml = basic_toml::to_string(&manifest).unwrap();
+//! print!("{}", toml);
+//! }
+//! ```
+//!
+//! # Spec compatibility
+//!
+//! TOML v0.5.0.
+//!
+//! TOML's date and time syntax are not supported.
+
+#![doc(html_root_url = "https://docs.rs/basic-toml/0.1.2")]
+#![deny(missing_docs)]
+#![allow(
+ clippy::bool_to_int_with_if,
+ clippy::let_underscore_untyped,
+ clippy::manual_let_else,
+ clippy::manual_range_contains,
+ clippy::match_like_matches_macro,
+ clippy::missing_errors_doc,
+ clippy::must_use_candidate,
+ clippy::needless_doctest_main,
+ clippy::needless_pass_by_value,
+ clippy::similar_names,
+ clippy::type_complexity,
+ clippy::uninlined_format_args,
+ clippy::unwrap_or_else_default
+)]
+
+mod de;
+mod error;
+mod ser;
+mod tokens;
+
+pub use crate::de::{from_slice, from_str};
+pub use crate::error::Error;
+pub use crate::ser::to_string;
diff --git a/vendor/basic-toml/src/ser.rs b/vendor/basic-toml/src/ser.rs
new file mode 100644
index 000000000..c7a600898
--- /dev/null
+++ b/vendor/basic-toml/src/ser.rs
@@ -0,0 +1,838 @@
+use serde::ser::{self, Serialize};
+use std::cell::Cell;
+use std::error;
+use std::fmt::{self, Display, Write};
+
+/// Serialize the given data structure as a String of TOML.
+///
+/// Serialization can fail if `T`'s implementation of `Serialize` decides to
+/// fail, if `T` contains a map with non-string keys, or if `T` attempts to
+/// serialize an unsupported datatype such as an enum, tuple, or tuple struct.
+pub fn to_string<T: ?Sized>(value: &T) -> Result<String, crate::Error>
+where
+ T: Serialize,
+{
+ let mut dst = String::with_capacity(128);
+ value.serialize(&mut Serializer::new(&mut dst))?;
+ Ok(dst)
+}
+
+#[derive(Debug)]
+pub(crate) enum Error {
+ /// Indicates that a Rust type was requested to be serialized but it was not
+ /// supported.
+ ///
+ /// Currently the TOML format does not support serializing types such as
+ /// enums, tuples and tuple structs.
+ UnsupportedType,
+
+ /// The key of all TOML maps must be strings, but serialization was
+ /// attempted where the key of a map was not a string.
+ KeyNotString,
+
+ /// All values in a TOML table must be emitted before further tables are
+ /// emitted. If a value is emitted *after* a table then this error is
+ /// generated.
+ ValueAfterTable,
+
+ /// None was attempted to be serialized, but it's not supported.
+ UnsupportedNone,
+
+ /// A custom error which could be generated when serializing a particular
+ /// type.
+ Custom(String),
+}
+
+struct Serializer<'a> {
+ dst: &'a mut String,
+ state: State<'a>,
+}
+
+#[derive(Debug, Copy, Clone)]
+enum ArrayState {
+ Started,
+ StartedAsATable,
+}
+
+#[derive(Debug, Clone)]
+enum State<'a> {
+ Table {
+ key: &'a str,
+ parent: &'a State<'a>,
+ first: &'a Cell<bool>,
+ table_emitted: &'a Cell<bool>,
+ },
+ Array {
+ parent: &'a State<'a>,
+ first: &'a Cell<bool>,
+ type_: &'a Cell<Option<ArrayState>>,
+ len: Option<usize>,
+ },
+ End,
+}
+
+struct SerializeSeq<'a, 'b> {
+ ser: &'b mut Serializer<'a>,
+ first: Cell<bool>,
+ type_: Cell<Option<ArrayState>>,
+ len: Option<usize>,
+}
+
+struct SerializeTable<'a, 'b> {
+ ser: &'b mut Serializer<'a>,
+ key: String,
+ first: Cell<bool>,
+ table_emitted: Cell<bool>,
+}
+
+impl<'a> Serializer<'a> {
+ fn new(dst: &'a mut String) -> Serializer<'a> {
+ Serializer {
+ dst,
+ state: State::End,
+ }
+ }
+
+ fn display<T: Display>(&mut self, t: T, type_: ArrayState) -> Result<(), Error> {
+ self.emit_key(type_)?;
+ write!(self.dst, "{}", t).map_err(ser::Error::custom)?;
+ if let State::Table { .. } = self.state {
+ self.dst.push('\n');
+ }
+ Ok(())
+ }
+
+ fn emit_key(&mut self, type_: ArrayState) -> Result<(), Error> {
+ self.array_type(type_);
+ let state = self.state.clone();
+ self._emit_key(&state)
+ }
+
+ // recursive implementation of `emit_key` above
+ fn _emit_key(&mut self, state: &State) -> Result<(), Error> {
+ match *state {
+ State::End => Ok(()),
+ State::Array {
+ parent,
+ first,
+ type_,
+ len,
+ } => {
+ assert!(type_.get().is_some());
+ if first.get() {
+ self._emit_key(parent)?;
+ }
+ self.emit_array(first, len);
+ Ok(())
+ }
+ State::Table {
+ parent,
+ first,
+ table_emitted,
+ key,
+ } => {
+ if table_emitted.get() {
+ return Err(Error::ValueAfterTable);
+ }
+ if first.get() {
+ self.emit_table_header(parent)?;
+ first.set(false);
+ }
+ self.escape_key(key)?;
+ self.dst.push_str(" = ");
+ Ok(())
+ }
+ }
+ }
+
+ fn emit_array(&mut self, first: &Cell<bool>, _len: Option<usize>) {
+ if first.get() {
+ self.dst.push('[');
+ } else {
+ self.dst.push_str(", ");
+ }
+ }
+
+ fn array_type(&mut self, type_: ArrayState) {
+ let prev = match self.state {
+ State::Array { type_, .. } => type_,
+ _ => return,
+ };
+ if prev.get().is_none() {
+ prev.set(Some(type_));
+ }
+ }
+
+ fn escape_key(&mut self, key: &str) -> Result<(), Error> {
+ let ok = !key.is_empty()
+ && key.chars().all(|c| match c {
+ 'a'..='z' | 'A'..='Z' | '0'..='9' | '-' | '_' => true,
+ _ => false,
+ });
+ if ok {
+ write!(self.dst, "{}", key).map_err(ser::Error::custom)?;
+ } else {
+ self.emit_str(key)?;
+ }
+ Ok(())
+ }
+
+ fn emit_str(&mut self, value: &str) -> Result<(), Error> {
+ self.dst.push('"');
+ for ch in value.chars() {
+ match ch {
+ '\u{8}' => self.dst.push_str("\\b"),
+ '\u{9}' => self.dst.push_str("\\t"),
+ '\u{a}' => self.dst.push_str("\\n"),
+ '\u{c}' => self.dst.push_str("\\f"),
+ '\u{d}' => self.dst.push_str("\\r"),
+ '\u{22}' => self.dst.push_str("\\\""),
+ '\u{5c}' => self.dst.push_str("\\\\"),
+ c if c <= '\u{1f}' || c == '\u{7f}' => {
+ write!(self.dst, "\\u{:04X}", ch as u32).map_err(ser::Error::custom)?;
+ }
+ ch => self.dst.push(ch),
+ }
+ }
+ self.dst.push('"');
+ Ok(())
+ }
+
+ fn emit_table_header(&mut self, state: &State) -> Result<(), Error> {
+ let array_of_tables = match *state {
+ State::End => return Ok(()),
+ State::Array { .. } => true,
+ State::Table { .. } => false,
+ };
+
+ // Unlike [..]s, we can't omit [[..]] ancestors, so be sure to emit
+ // table headers for them.
+ let mut p = state;
+ if let State::Array { first, parent, .. } = *state {
+ if first.get() {
+ p = parent;
+ }
+ }
+ while let State::Table { first, parent, .. } = *p {
+ p = parent;
+ if !first.get() {
+ break;
+ }
+ if let State::Array {
+ parent: &State::Table { .. },
+ ..
+ } = *parent
+ {
+ self.emit_table_header(parent)?;
+ break;
+ }
+ }
+
+ match *state {
+ State::Table { first, .. } => {
+ if !first.get() {
+ // Newline if we are a table that is not the first table in
+ // the document.
+ self.dst.push('\n');
+ }
+ }
+ State::Array { parent, first, .. } => {
+ if !first.get() {
+ // Always newline if we are not the first item in the
+ // table-array
+ self.dst.push('\n');
+ } else if let State::Table { first, .. } = *parent {
+ if !first.get() {
+ // Newline if we are not the first item in the document
+ self.dst.push('\n');
+ }
+ }
+ }
+ State::End => {}
+ }
+ self.dst.push('[');
+ if array_of_tables {
+ self.dst.push('[');
+ }
+ self.emit_key_part(state)?;
+ if array_of_tables {
+ self.dst.push(']');
+ }
+ self.dst.push_str("]\n");
+ Ok(())
+ }
+
+ fn emit_key_part(&mut self, key: &State) -> Result<bool, Error> {
+ match *key {
+ State::Array { parent, .. } => self.emit_key_part(parent),
+ State::End => Ok(true),
+ State::Table {
+ key,
+ parent,
+ table_emitted,
+ ..
+ } => {
+ table_emitted.set(true);
+ let first = self.emit_key_part(parent)?;
+ if !first {
+ self.dst.push('.');
+ }
+ self.escape_key(key)?;
+ Ok(false)
+ }
+ }
+ }
+}
+
+macro_rules! serialize_float {
+ ($this:expr, $v:expr) => {{
+ $this.emit_key(ArrayState::Started)?;
+ match ($v.is_sign_negative(), $v.is_nan(), $v == 0.0) {
+ (true, true, _) => write!($this.dst, "-nan"),
+ (false, true, _) => write!($this.dst, "nan"),
+ (true, false, true) => write!($this.dst, "-0.0"),
+ (false, false, true) => write!($this.dst, "0.0"),
+ (_, false, false) => write!($this.dst, "{}", $v).and_then(|_| {
+ if $v % 1.0 == 0.0 {
+ write!($this.dst, ".0")
+ } else {
+ Ok(())
+ }
+ }),
+ }
+ .map_err(ser::Error::custom)?;
+
+ if let State::Table { .. } = $this.state {
+ $this.dst.push_str("\n");
+ }
+ return Ok(());
+ }};
+}
+
+impl<'a, 'b> ser::Serializer for &'b mut Serializer<'a> {
+ type Ok = ();
+ type Error = Error;
+ type SerializeSeq = SerializeSeq<'a, 'b>;
+ type SerializeTuple = SerializeSeq<'a, 'b>;
+ type SerializeTupleStruct = SerializeSeq<'a, 'b>;
+ type SerializeTupleVariant = ser::Impossible<(), Error>;
+ type SerializeMap = SerializeTable<'a, 'b>;
+ type SerializeStruct = SerializeTable<'a, 'b>;
+ type SerializeStructVariant = ser::Impossible<(), Error>;
+
+ fn serialize_bool(self, v: bool) -> Result<(), Self::Error> {
+ self.display(v, ArrayState::Started)
+ }
+
+ fn serialize_i8(self, v: i8) -> Result<(), Self::Error> {
+ self.display(v, ArrayState::Started)
+ }
+
+ fn serialize_i16(self, v: i16) -> Result<(), Self::Error> {
+ self.display(v, ArrayState::Started)
+ }
+
+ fn serialize_i32(self, v: i32) -> Result<(), Self::Error> {
+ self.display(v, ArrayState::Started)
+ }
+
+ fn serialize_i64(self, v: i64) -> Result<(), Self::Error> {
+ self.display(v, ArrayState::Started)
+ }
+
+ fn serialize_u8(self, v: u8) -> Result<(), Self::Error> {
+ self.display(v, ArrayState::Started)
+ }
+
+ fn serialize_u16(self, v: u16) -> Result<(), Self::Error> {
+ self.display(v, ArrayState::Started)
+ }
+
+ fn serialize_u32(self, v: u32) -> Result<(), Self::Error> {
+ self.display(v, ArrayState::Started)
+ }
+
+ fn serialize_u64(self, v: u64) -> Result<(), Self::Error> {
+ self.display(v, ArrayState::Started)
+ }
+
+ fn serialize_f32(self, v: f32) -> Result<(), Self::Error> {
+ serialize_float!(self, v)
+ }
+
+ fn serialize_f64(self, v: f64) -> Result<(), Self::Error> {
+ serialize_float!(self, v)
+ }
+
+ fn serialize_char(self, v: char) -> Result<(), Self::Error> {
+ let mut buf = [0; 4];
+ self.serialize_str(v.encode_utf8(&mut buf))
+ }
+
+ fn serialize_str(self, value: &str) -> Result<(), Self::Error> {
+ self.emit_key(ArrayState::Started)?;
+ self.emit_str(value)?;
+ if let State::Table { .. } = self.state {
+ self.dst.push('\n');
+ }
+ Ok(())
+ }
+
+ fn serialize_bytes(self, value: &[u8]) -> Result<(), Self::Error> {
+ value.serialize(self)
+ }
+
+ fn serialize_none(self) -> Result<(), Self::Error> {
+ Err(Error::UnsupportedNone)
+ }
+
+ fn serialize_some<T: ?Sized>(self, value: &T) -> Result<(), Self::Error>
+ where
+ T: Serialize,
+ {
+ value.serialize(self)
+ }
+
+ fn serialize_unit(self) -> Result<(), Self::Error> {
+ Err(Error::UnsupportedType)
+ }
+
+ fn serialize_unit_struct(self, _name: &'static str) -> Result<(), Self::Error> {
+ Err(Error::UnsupportedType)
+ }
+
+ fn serialize_unit_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ variant: &'static str,
+ ) -> Result<(), Self::Error> {
+ self.serialize_str(variant)
+ }
+
+ fn serialize_newtype_struct<T: ?Sized>(
+ self,
+ _name: &'static str,
+ value: &T,
+ ) -> Result<(), Self::Error>
+ where
+ T: Serialize,
+ {
+ value.serialize(self)
+ }
+
+ fn serialize_newtype_variant<T: ?Sized>(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ _value: &T,
+ ) -> Result<(), Self::Error>
+ where
+ T: Serialize,
+ {
+ Err(Error::UnsupportedType)
+ }
+
+ fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
+ self.array_type(ArrayState::Started);
+ Ok(SerializeSeq {
+ ser: self,
+ first: Cell::new(true),
+ type_: Cell::new(None),
+ len,
+ })
+ }
+
+ fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
+ self.serialize_seq(Some(len))
+ }
+
+ fn serialize_tuple_struct(
+ self,
+ _name: &'static str,
+ len: usize,
+ ) -> Result<Self::SerializeTupleStruct, Self::Error> {
+ self.serialize_seq(Some(len))
+ }
+
+ fn serialize_tuple_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeTupleVariant, Self::Error> {
+ Err(Error::UnsupportedType)
+ }
+
+ fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
+ self.array_type(ArrayState::StartedAsATable);
+ Ok(SerializeTable {
+ ser: self,
+ key: String::new(),
+ first: Cell::new(true),
+ table_emitted: Cell::new(false),
+ })
+ }
+
+ fn serialize_struct(
+ self,
+ _name: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeStruct, Self::Error> {
+ self.array_type(ArrayState::StartedAsATable);
+ Ok(SerializeTable {
+ ser: self,
+ key: String::new(),
+ first: Cell::new(true),
+ table_emitted: Cell::new(false),
+ })
+ }
+
+ fn serialize_struct_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeStructVariant, Self::Error> {
+ Err(Error::UnsupportedType)
+ }
+}
+
+impl<'a, 'b> ser::SerializeSeq for SerializeSeq<'a, 'b> {
+ type Ok = ();
+ type Error = Error;
+
+ fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Error>
+ where
+ T: Serialize,
+ {
+ value.serialize(&mut Serializer {
+ dst: &mut *self.ser.dst,
+ state: State::Array {
+ parent: &self.ser.state,
+ first: &self.first,
+ type_: &self.type_,
+ len: self.len,
+ },
+ })?;
+ self.first.set(false);
+ Ok(())
+ }
+
+ fn end(self) -> Result<(), Error> {
+ match self.type_.get() {
+ Some(ArrayState::StartedAsATable) => return Ok(()),
+ Some(ArrayState::Started) => self.ser.dst.push(']'),
+ None => {
+ assert!(self.first.get());
+ self.ser.emit_key(ArrayState::Started)?;
+ self.ser.dst.push_str("[]");
+ }
+ }
+ if let State::Table { .. } = self.ser.state {
+ self.ser.dst.push('\n');
+ }
+ Ok(())
+ }
+}
+
+impl<'a, 'b> ser::SerializeTuple for SerializeSeq<'a, 'b> {
+ type Ok = ();
+ type Error = Error;
+
+ fn serialize_element<T: ?Sized>(&mut self, value: &T) -> Result<(), Error>
+ where
+ T: Serialize,
+ {
+ ser::SerializeSeq::serialize_element(self, value)
+ }
+
+ fn end(self) -> Result<(), Error> {
+ ser::SerializeSeq::end(self)
+ }
+}
+
+impl<'a, 'b> ser::SerializeTupleStruct for SerializeSeq<'a, 'b> {
+ type Ok = ();
+ type Error = Error;
+
+ fn serialize_field<T: ?Sized>(&mut self, value: &T) -> Result<(), Error>
+ where
+ T: Serialize,
+ {
+ ser::SerializeSeq::serialize_element(self, value)
+ }
+
+ fn end(self) -> Result<(), Error> {
+ ser::SerializeSeq::end(self)
+ }
+}
+
+impl<'a, 'b> ser::SerializeMap for SerializeTable<'a, 'b> {
+ type Ok = ();
+ type Error = Error;
+
+ fn serialize_key<T: ?Sized>(&mut self, input: &T) -> Result<(), Error>
+ where
+ T: Serialize,
+ {
+ self.key = input.serialize(StringExtractor)?;
+ Ok(())
+ }
+
+ fn serialize_value<T: ?Sized>(&mut self, value: &T) -> Result<(), Error>
+ where
+ T: Serialize,
+ {
+ let res = value.serialize(&mut Serializer {
+ dst: &mut *self.ser.dst,
+ state: State::Table {
+ key: &self.key,
+ parent: &self.ser.state,
+ first: &self.first,
+ table_emitted: &self.table_emitted,
+ },
+ });
+ match res {
+ Ok(()) => self.first.set(false),
+ Err(Error::UnsupportedNone) => {}
+ Err(e) => return Err(e),
+ }
+ Ok(())
+ }
+
+ fn end(self) -> Result<(), Error> {
+ if self.first.get() {
+ let state = self.ser.state.clone();
+ self.ser.emit_table_header(&state)?;
+ }
+ Ok(())
+ }
+}
+
+impl<'a, 'b> ser::SerializeStruct for SerializeTable<'a, 'b> {
+ type Ok = ();
+ type Error = Error;
+
+ fn serialize_field<T: ?Sized>(&mut self, key: &'static str, value: &T) -> Result<(), Error>
+ where
+ T: Serialize,
+ {
+ let res = value.serialize(&mut Serializer {
+ dst: &mut *self.ser.dst,
+ state: State::Table {
+ key,
+ parent: &self.ser.state,
+ first: &self.first,
+ table_emitted: &self.table_emitted,
+ },
+ });
+ match res {
+ Ok(()) => self.first.set(false),
+ Err(Error::UnsupportedNone) => {}
+ Err(e) => return Err(e),
+ }
+ Ok(())
+ }
+
+ fn end(self) -> Result<(), Error> {
+ if self.first.get() {
+ let state = self.ser.state.clone();
+ self.ser.emit_table_header(&state)?;
+ }
+ Ok(())
+ }
+}
+
+struct StringExtractor;
+
+impl ser::Serializer for StringExtractor {
+ type Ok = String;
+ type Error = Error;
+ type SerializeSeq = ser::Impossible<String, Error>;
+ type SerializeTuple = ser::Impossible<String, Error>;
+ type SerializeTupleStruct = ser::Impossible<String, Error>;
+ type SerializeTupleVariant = ser::Impossible<String, Error>;
+ type SerializeMap = ser::Impossible<String, Error>;
+ type SerializeStruct = ser::Impossible<String, Error>;
+ type SerializeStructVariant = ser::Impossible<String, Error>;
+
+ fn serialize_bool(self, _v: bool) -> Result<String, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_i8(self, _v: i8) -> Result<String, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_i16(self, _v: i16) -> Result<String, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_i32(self, _v: i32) -> Result<String, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_i64(self, _v: i64) -> Result<String, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_u8(self, _v: u8) -> Result<String, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_u16(self, _v: u16) -> Result<String, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_u32(self, _v: u32) -> Result<String, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_u64(self, _v: u64) -> Result<String, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_f32(self, _v: f32) -> Result<String, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_f64(self, _v: f64) -> Result<String, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_char(self, _v: char) -> Result<String, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_str(self, value: &str) -> Result<String, Self::Error> {
+ Ok(value.to_string())
+ }
+
+ fn serialize_bytes(self, _value: &[u8]) -> Result<String, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_none(self) -> Result<String, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_some<T: ?Sized>(self, _value: &T) -> Result<String, Self::Error>
+ where
+ T: Serialize,
+ {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_unit(self) -> Result<String, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_unit_struct(self, _name: &'static str) -> Result<String, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_unit_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ ) -> Result<String, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_newtype_struct<T: ?Sized>(
+ self,
+ _name: &'static str,
+ value: &T,
+ ) -> Result<String, Self::Error>
+ where
+ T: Serialize,
+ {
+ value.serialize(self)
+ }
+
+ fn serialize_newtype_variant<T: ?Sized>(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ _value: &T,
+ ) -> Result<String, Self::Error>
+ where
+ T: Serialize,
+ {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_seq(self, _len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_tuple(self, _len: usize) -> Result<Self::SerializeTuple, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_tuple_struct(
+ self,
+ _name: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeTupleStruct, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_tuple_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeTupleVariant, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_map(self, _len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_struct(
+ self,
+ _name: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeStruct, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+
+ fn serialize_struct_variant(
+ self,
+ _name: &'static str,
+ _variant_index: u32,
+ _variant: &'static str,
+ _len: usize,
+ ) -> Result<Self::SerializeStructVariant, Self::Error> {
+ Err(Error::KeyNotString)
+ }
+}
+
+impl Display for Error {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match *self {
+ Error::UnsupportedType => "unsupported Rust type".fmt(f),
+ Error::KeyNotString => "map key was not a string".fmt(f),
+ Error::ValueAfterTable => "values must be emitted before tables".fmt(f),
+ Error::UnsupportedNone => "unsupported None value".fmt(f),
+ Error::Custom(ref s) => s.fmt(f),
+ }
+ }
+}
+
+impl error::Error for Error {}
+
+impl ser::Error for Error {
+ fn custom<T: Display>(msg: T) -> Error {
+ Error::Custom(msg.to_string())
+ }
+}
diff --git a/vendor/basic-toml/src/tokens.rs b/vendor/basic-toml/src/tokens.rs
new file mode 100644
index 000000000..22aebc65b
--- /dev/null
+++ b/vendor/basic-toml/src/tokens.rs
@@ -0,0 +1,546 @@
+use std::borrow::Cow;
+use std::char;
+use std::str;
+
+/// A span, designating a range of bytes where a token is located.
+#[derive(Eq, PartialEq, Debug, Clone, Copy)]
+pub struct Span {
+ /// The start of the range.
+ pub start: usize,
+ /// The end of the range (exclusive).
+ pub end: usize,
+}
+
+impl From<Span> for (usize, usize) {
+ fn from(Span { start, end }: Span) -> (usize, usize) {
+ (start, end)
+ }
+}
+
+#[derive(Eq, PartialEq, Debug)]
+pub enum Token<'a> {
+ Whitespace(&'a str),
+ Newline,
+ Comment(&'a str),
+
+ Equals,
+ Period,
+ Comma,
+ Colon,
+ Plus,
+ LeftBrace,
+ RightBrace,
+ LeftBracket,
+ RightBracket,
+
+ Keylike(&'a str),
+ String {
+ src: &'a str,
+ val: Cow<'a, str>,
+ multiline: bool,
+ },
+}
+
+#[derive(Eq, PartialEq, Debug)]
+pub enum Error {
+ InvalidCharInString(usize, char),
+ InvalidEscape(usize, char),
+ InvalidHexEscape(usize, char),
+ InvalidEscapeValue(usize, u32),
+ NewlineInString(usize),
+ Unexpected(usize, char),
+ UnterminatedString(usize),
+ NewlineInTableKey(usize),
+ MultilineStringKey(usize),
+ Wanted {
+ at: usize,
+ expected: &'static str,
+ found: &'static str,
+ },
+}
+
+#[derive(Clone)]
+pub struct Tokenizer<'a> {
+ input: &'a str,
+ chars: CrlfFold<'a>,
+}
+
+#[derive(Clone)]
+struct CrlfFold<'a> {
+ chars: str::CharIndices<'a>,
+}
+
+#[derive(Debug)]
+enum MaybeString {
+ NotEscaped(usize),
+ Owned(String),
+}
+
+impl<'a> Tokenizer<'a> {
+ pub fn new(input: &'a str) -> Tokenizer<'a> {
+ let mut t = Tokenizer {
+ input,
+ chars: CrlfFold {
+ chars: input.char_indices(),
+ },
+ };
+ // Eat utf-8 BOM
+ t.eatc('\u{feff}');
+ t
+ }
+
+ pub fn next(&mut self) -> Result<Option<(Span, Token<'a>)>, Error> {
+ let (start, token) = match self.one() {
+ Some((start, '\n')) => (start, Token::Newline),
+ Some((start, ' ' | '\t')) => (start, self.whitespace_token(start)),
+ Some((start, '#')) => (start, self.comment_token(start)),
+ Some((start, '=')) => (start, Token::Equals),
+ Some((start, '.')) => (start, Token::Period),
+ Some((start, ',')) => (start, Token::Comma),
+ Some((start, ':')) => (start, Token::Colon),
+ Some((start, '+')) => (start, Token::Plus),
+ Some((start, '{')) => (start, Token::LeftBrace),
+ Some((start, '}')) => (start, Token::RightBrace),
+ Some((start, '[')) => (start, Token::LeftBracket),
+ Some((start, ']')) => (start, Token::RightBracket),
+ Some((start, '\'')) => {
+ return self
+ .literal_string(start)
+ .map(|t| Some((self.step_span(start), t)))
+ }
+ Some((start, '"')) => {
+ return self
+ .basic_string(start)
+ .map(|t| Some((self.step_span(start), t)))
+ }
+ Some((start, ch)) if is_keylike(ch) => (start, self.keylike(start)),
+
+ Some((start, ch)) => return Err(Error::Unexpected(start, ch)),
+ None => return Ok(None),
+ };
+
+ let span = self.step_span(start);
+ Ok(Some((span, token)))
+ }
+
+ pub fn peek(&mut self) -> Result<Option<(Span, Token<'a>)>, Error> {
+ self.clone().next()
+ }
+
+ pub fn eat(&mut self, expected: Token<'a>) -> Result<bool, Error> {
+ self.eat_spanned(expected).map(|s| s.is_some())
+ }
+
+ /// Eat a value, returning it's span if it was consumed.
+ pub fn eat_spanned(&mut self, expected: Token<'a>) -> Result<Option<Span>, Error> {
+ let span = match self.peek()? {
+ Some((span, ref found)) if expected == *found => span,
+ Some(_) | None => return Ok(None),
+ };
+
+ drop(self.next());
+ Ok(Some(span))
+ }
+
+ pub fn expect(&mut self, expected: Token<'a>) -> Result<(), Error> {
+ // ignore span
+ let _ = self.expect_spanned(expected)?;
+ Ok(())
+ }
+
+ /// Expect the given token returning its span.
+ pub fn expect_spanned(&mut self, expected: Token<'a>) -> Result<Span, Error> {
+ let current = self.current();
+ match self.next()? {
+ Some((span, found)) => {
+ if expected == found {
+ Ok(span)
+ } else {
+ Err(Error::Wanted {
+ at: current,
+ expected: expected.describe(),
+ found: found.describe(),
+ })
+ }
+ }
+ None => Err(Error::Wanted {
+ at: self.input.len(),
+ expected: expected.describe(),
+ found: "eof",
+ }),
+ }
+ }
+
+ pub fn table_key(&mut self) -> Result<(Span, Cow<'a, str>), Error> {
+ let current = self.current();
+ match self.next()? {
+ Some((span, Token::Keylike(k))) => Ok((span, k.into())),
+ Some((
+ span,
+ Token::String {
+ src,
+ val,
+ multiline,
+ },
+ )) => {
+ let offset = self.substr_offset(src);
+ if multiline {
+ return Err(Error::MultilineStringKey(offset));
+ }
+ match src.find('\n') {
+ None => Ok((span, val)),
+ Some(i) => Err(Error::NewlineInTableKey(offset + i)),
+ }
+ }
+ Some((_, other)) => Err(Error::Wanted {
+ at: current,
+ expected: "a table key",
+ found: other.describe(),
+ }),
+ None => Err(Error::Wanted {
+ at: self.input.len(),
+ expected: "a table key",
+ found: "eof",
+ }),
+ }
+ }
+
+ pub fn eat_whitespace(&mut self) {
+ while self.eatc(' ') || self.eatc('\t') {
+ // ...
+ }
+ }
+
+ pub fn eat_comment(&mut self) -> Result<bool, Error> {
+ if !self.eatc('#') {
+ return Ok(false);
+ }
+ drop(self.comment_token(0));
+ self.eat_newline_or_eof().map(|()| true)
+ }
+
+ pub fn eat_newline_or_eof(&mut self) -> Result<(), Error> {
+ let current = self.current();
+ match self.next()? {
+ None | Some((_, Token::Newline)) => Ok(()),
+ Some((_, other)) => Err(Error::Wanted {
+ at: current,
+ expected: "newline",
+ found: other.describe(),
+ }),
+ }
+ }
+
+ pub fn skip_to_newline(&mut self) {
+ loop {
+ match self.one() {
+ Some((_, '\n')) | None => break,
+ _ => {}
+ }
+ }
+ }
+
+ fn eatc(&mut self, ch: char) -> bool {
+ match self.chars.clone().next() {
+ Some((_, ch2)) if ch == ch2 => {
+ self.one();
+ true
+ }
+ _ => false,
+ }
+ }
+
+ pub fn current(&mut self) -> usize {
+ match self.chars.clone().next() {
+ Some(i) => i.0,
+ None => self.input.len(),
+ }
+ }
+
+ fn whitespace_token(&mut self, start: usize) -> Token<'a> {
+ while self.eatc(' ') || self.eatc('\t') {
+ // ...
+ }
+ Token::Whitespace(&self.input[start..self.current()])
+ }
+
+ fn comment_token(&mut self, start: usize) -> Token<'a> {
+ while let Some((_, ch)) = self.chars.clone().next() {
+ if ch != '\t' && (ch < '\u{20}' || ch > '\u{10ffff}') {
+ break;
+ }
+ self.one();
+ }
+ Token::Comment(&self.input[start..self.current()])
+ }
+
+ fn read_string(
+ &mut self,
+ delim: char,
+ start: usize,
+ new_ch: &mut dyn FnMut(
+ &mut Tokenizer,
+ &mut MaybeString,
+ bool,
+ usize,
+ char,
+ ) -> Result<(), Error>,
+ ) -> Result<Token<'a>, Error> {
+ let mut multiline = false;
+ if self.eatc(delim) {
+ if self.eatc(delim) {
+ multiline = true;
+ } else {
+ return Ok(Token::String {
+ src: &self.input[start..start + 2],
+ val: Cow::Borrowed(""),
+ multiline: false,
+ });
+ }
+ }
+ let mut val = MaybeString::NotEscaped(self.current());
+ let mut n = 0;
+ loop {
+ n += 1;
+ match self.one() {
+ Some((i, '\n')) => {
+ if multiline {
+ if self.input.as_bytes()[i] == b'\r' {
+ val.make_owned(&self.input[..i]);
+ }
+ if n == 1 {
+ val = MaybeString::NotEscaped(self.current());
+ } else {
+ val.push('\n');
+ }
+ } else {
+ return Err(Error::NewlineInString(i));
+ }
+ }
+ Some((mut i, ch)) if ch == delim => {
+ if multiline {
+ if !self.eatc(delim) {
+ val.push(delim);
+ continue;
+ }
+ if !self.eatc(delim) {
+ val.push(delim);
+ val.push(delim);
+ continue;
+ }
+ if self.eatc(delim) {
+ val.push(delim);
+ i += 1;
+ }
+ if self.eatc(delim) {
+ val.push(delim);
+ i += 1;
+ }
+ }
+ return Ok(Token::String {
+ src: &self.input[start..self.current()],
+ val: val.into_cow(&self.input[..i]),
+ multiline,
+ });
+ }
+ Some((i, c)) => new_ch(self, &mut val, multiline, i, c)?,
+ None => return Err(Error::UnterminatedString(start)),
+ }
+ }
+ }
+
+ fn literal_string(&mut self, start: usize) -> Result<Token<'a>, Error> {
+ self.read_string('\'', start, &mut |_me, val, _multi, i, ch| {
+ if ch == '\u{09}' || ('\u{20}' <= ch && ch <= '\u{10ffff}' && ch != '\u{7f}') {
+ val.push(ch);
+ Ok(())
+ } else {
+ Err(Error::InvalidCharInString(i, ch))
+ }
+ })
+ }
+
+ fn basic_string(&mut self, start: usize) -> Result<Token<'a>, Error> {
+ self.read_string('"', start, &mut |me, val, multi, i, ch| match ch {
+ '\\' => {
+ val.make_owned(&me.input[..i]);
+ match me.chars.next() {
+ Some((_, '"')) => val.push('"'),
+ Some((_, '\\')) => val.push('\\'),
+ Some((_, 'b')) => val.push('\u{8}'),
+ Some((_, 'f')) => val.push('\u{c}'),
+ Some((_, 'n')) => val.push('\n'),
+ Some((_, 'r')) => val.push('\r'),
+ Some((_, 't')) => val.push('\t'),
+ Some((i, c @ ('u' | 'U'))) => {
+ let len = if c == 'u' { 4 } else { 8 };
+ val.push(me.hex(start, i, len)?);
+ }
+ Some((i, c @ (' ' | '\t' | '\n'))) if multi => {
+ if c != '\n' {
+ while let Some((_, ch)) = me.chars.clone().next() {
+ match ch {
+ ' ' | '\t' => {
+ me.chars.next();
+ continue;
+ }
+ '\n' => {
+ me.chars.next();
+ break;
+ }
+ _ => return Err(Error::InvalidEscape(i, c)),
+ }
+ }
+ }
+ while let Some((_, ch)) = me.chars.clone().next() {
+ match ch {
+ ' ' | '\t' | '\n' => {
+ me.chars.next();
+ }
+ _ => break,
+ }
+ }
+ }
+ Some((i, c)) => return Err(Error::InvalidEscape(i, c)),
+ None => return Err(Error::UnterminatedString(start)),
+ }
+ Ok(())
+ }
+ ch if ch == '\u{09}' || ('\u{20}' <= ch && ch <= '\u{10ffff}' && ch != '\u{7f}') => {
+ val.push(ch);
+ Ok(())
+ }
+ _ => Err(Error::InvalidCharInString(i, ch)),
+ })
+ }
+
+ fn hex(&mut self, start: usize, i: usize, len: usize) -> Result<char, Error> {
+ let mut buf = String::with_capacity(len);
+ for _ in 0..len {
+ match self.one() {
+ Some((_, ch)) if ch as u32 <= 0x7F && ch.is_ascii_hexdigit() => buf.push(ch),
+ Some((i, ch)) => return Err(Error::InvalidHexEscape(i, ch)),
+ None => return Err(Error::UnterminatedString(start)),
+ }
+ }
+ let val = u32::from_str_radix(&buf, 16).unwrap();
+ match char::from_u32(val) {
+ Some(ch) => Ok(ch),
+ None => Err(Error::InvalidEscapeValue(i, val)),
+ }
+ }
+
+ fn keylike(&mut self, start: usize) -> Token<'a> {
+ while let Some((_, ch)) = self.peek_one() {
+ if !is_keylike(ch) {
+ break;
+ }
+ self.one();
+ }
+ Token::Keylike(&self.input[start..self.current()])
+ }
+
+ pub fn substr_offset(&self, s: &'a str) -> usize {
+ assert!(s.len() <= self.input.len());
+ let a = self.input.as_ptr() as usize;
+ let b = s.as_ptr() as usize;
+ assert!(a <= b);
+ b - a
+ }
+
+ /// Calculate the span of a single character.
+ fn step_span(&mut self, start: usize) -> Span {
+ let end = match self.peek_one() {
+ Some(t) => t.0,
+ None => self.input.len(),
+ };
+ Span { start, end }
+ }
+
+ /// Peek one char without consuming it.
+ fn peek_one(&mut self) -> Option<(usize, char)> {
+ self.chars.clone().next()
+ }
+
+ /// Take one char.
+ pub fn one(&mut self) -> Option<(usize, char)> {
+ self.chars.next()
+ }
+}
+
+impl<'a> Iterator for CrlfFold<'a> {
+ type Item = (usize, char);
+
+ fn next(&mut self) -> Option<(usize, char)> {
+ self.chars.next().map(|(i, c)| {
+ if c == '\r' {
+ let mut attempt = self.chars.clone();
+ if let Some((_, '\n')) = attempt.next() {
+ self.chars = attempt;
+ return (i, '\n');
+ }
+ }
+ (i, c)
+ })
+ }
+}
+
+impl MaybeString {
+ fn push(&mut self, ch: char) {
+ match *self {
+ MaybeString::NotEscaped(..) => {}
+ MaybeString::Owned(ref mut s) => s.push(ch),
+ }
+ }
+
+ fn make_owned(&mut self, input: &str) {
+ match *self {
+ MaybeString::NotEscaped(start) => {
+ *self = MaybeString::Owned(input[start..].to_owned());
+ }
+ MaybeString::Owned(..) => {}
+ }
+ }
+
+ fn into_cow(self, input: &str) -> Cow<str> {
+ match self {
+ MaybeString::NotEscaped(start) => Cow::Borrowed(&input[start..]),
+ MaybeString::Owned(s) => Cow::Owned(s),
+ }
+ }
+}
+
+fn is_keylike(ch: char) -> bool {
+ ('A' <= ch && ch <= 'Z')
+ || ('a' <= ch && ch <= 'z')
+ || ('0' <= ch && ch <= '9')
+ || ch == '-'
+ || ch == '_'
+}
+
+impl<'a> Token<'a> {
+ pub fn describe(&self) -> &'static str {
+ match *self {
+ Token::Keylike(_) => "an identifier",
+ Token::Equals => "an equals",
+ Token::Period => "a period",
+ Token::Comment(_) => "a comment",
+ Token::Newline => "a newline",
+ Token::Whitespace(_) => "whitespace",
+ Token::Comma => "a comma",
+ Token::RightBrace => "a right brace",
+ Token::LeftBrace => "a left brace",
+ Token::RightBracket => "a right bracket",
+ Token::LeftBracket => "a left bracket",
+ Token::String { multiline, .. } => {
+ if multiline {
+ "a multiline string"
+ } else {
+ "a string"
+ }
+ }
+ Token::Colon => "a colon",
+ Token::Plus => "a plus",
+ }
+ }
+}
diff --git a/vendor/basic-toml/tests/README.md b/vendor/basic-toml/tests/README.md
new file mode 100644
index 000000000..ebbc01ccf
--- /dev/null
+++ b/vendor/basic-toml/tests/README.md
@@ -0,0 +1 @@
+Tests are from https://github.com/BurntSushi/toml-test
diff --git a/vendor/basic-toml/tests/datetime.rs b/vendor/basic-toml/tests/datetime.rs
new file mode 100644
index 000000000..0672a3ba7
--- /dev/null
+++ b/vendor/basic-toml/tests/datetime.rs
@@ -0,0 +1,142 @@
+use serde_json::Value;
+
+macro_rules! bad {
+ ($toml:expr, $msg:expr) => {
+ match basic_toml::from_str::<Value>($toml) {
+ Ok(s) => panic!("parsed to: {:#?}", s),
+ Err(e) => assert_eq!(e.to_string(), $msg),
+ }
+ };
+}
+
+#[test]
+fn times() {
+ fn multi_bad(s: &str, msg: &str) {
+ bad!(s, msg);
+ bad!(&s.replace('T', " "), msg);
+ bad!(&s.replace('T', "t"), msg);
+ bad!(&s.replace('Z', "z"), msg);
+ }
+
+ multi_bad(
+ "foo = 1997-09-09T09:09:09Z",
+ "invalid number at line 1 column 7",
+ );
+ multi_bad(
+ "foo = 1997-09-09T09:09:09+09:09",
+ "invalid number at line 1 column 7",
+ );
+ multi_bad(
+ "foo = 1997-09-09T09:09:09-09:09",
+ "invalid number at line 1 column 7",
+ );
+ multi_bad(
+ "foo = 1997-09-09T09:09:09",
+ "invalid number at line 1 column 7",
+ );
+ multi_bad("foo = 1997-09-09", "invalid number at line 1 column 7");
+ bad!("foo = 1997-09-09 ", "invalid number at line 1 column 7");
+ bad!(
+ "foo = 1997-09-09 # comment",
+ "invalid number at line 1 column 7"
+ );
+ multi_bad("foo = 09:09:09", "invalid number at line 1 column 8");
+ multi_bad(
+ "foo = 1997-09-09T09:09:09.09Z",
+ "invalid number at line 1 column 7",
+ );
+ multi_bad(
+ "foo = 1997-09-09T09:09:09.09+09:09",
+ "invalid number at line 1 column 7",
+ );
+ multi_bad(
+ "foo = 1997-09-09T09:09:09.09-09:09",
+ "invalid number at line 1 column 7",
+ );
+ multi_bad(
+ "foo = 1997-09-09T09:09:09.09",
+ "invalid number at line 1 column 7",
+ );
+ multi_bad("foo = 09:09:09.09", "invalid number at line 1 column 8");
+}
+
+#[test]
+fn bad_times() {
+ bad!("foo = 199-09-09", "invalid number at line 1 column 7");
+ bad!("foo = 199709-09", "invalid number at line 1 column 7");
+ bad!("foo = 1997-9-09", "invalid number at line 1 column 7");
+ bad!("foo = 1997-09-9", "invalid number at line 1 column 7");
+ bad!(
+ "foo = 1997-09-0909:09:09",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 1997-09-09T09:09:09.",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = T",
+ "invalid TOML value, did you mean to use a quoted string? at line 1 column 7"
+ );
+ bad!(
+ "foo = T.",
+ "invalid TOML value, did you mean to use a quoted string? at line 1 column 7"
+ );
+ bad!(
+ "foo = TZ",
+ "invalid TOML value, did you mean to use a quoted string? at line 1 column 7"
+ );
+ bad!(
+ "foo = 1997-09-09T09:09:09.09+",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 1997-09-09T09:09:09.09+09",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 1997-09-09T09:09:09.09+09:9",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 1997-09-09T09:09:09.09+0909",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 1997-09-09T09:09:09.09-",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 1997-09-09T09:09:09.09-09",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 1997-09-09T09:09:09.09-09:9",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 1997-09-09T09:09:09.09-0909",
+ "invalid number at line 1 column 7"
+ );
+
+ bad!(
+ "foo = 1997-00-09T09:09:09.09Z",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 1997-09-00T09:09:09.09Z",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 1997-09-09T30:09:09.09Z",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 1997-09-09T12:69:09.09Z",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 1997-09-09T12:09:69.09Z",
+ "invalid number at line 1 column 7"
+ );
+}
diff --git a/vendor/basic-toml/tests/de-errors.rs b/vendor/basic-toml/tests/de-errors.rs
new file mode 100644
index 000000000..aac0c432e
--- /dev/null
+++ b/vendor/basic-toml/tests/de-errors.rs
@@ -0,0 +1,350 @@
+#![allow(clippy::too_many_lines)]
+
+use serde::{de, Deserialize};
+use std::fmt;
+
+macro_rules! bad {
+ ($toml:expr, $ty:ty, $msg:expr) => {
+ match basic_toml::from_str::<$ty>($toml) {
+ Ok(s) => panic!("parsed to: {:#?}", s),
+ Err(e) => assert_eq!(e.to_string(), $msg),
+ }
+ };
+}
+
+#[derive(Debug, Deserialize, PartialEq)]
+struct Parent<T> {
+ p_a: T,
+ p_b: Vec<Child<T>>,
+}
+
+#[derive(Debug, Deserialize, PartialEq)]
+#[serde(deny_unknown_fields)]
+struct Child<T> {
+ c_a: T,
+ c_b: T,
+}
+
+#[derive(Debug, PartialEq)]
+enum CasedString {
+ Lowercase(String),
+ Uppercase(String),
+}
+
+impl<'de> de::Deserialize<'de> for CasedString {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: de::Deserializer<'de>,
+ {
+ struct CasedStringVisitor;
+
+ impl<'de> de::Visitor<'de> for CasedStringVisitor {
+ type Value = CasedString;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("a string")
+ }
+
+ fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
+ where
+ E: de::Error,
+ {
+ if s.is_empty() {
+ Err(de::Error::invalid_length(0, &"a non-empty string"))
+ } else if s.chars().all(|x| x.is_ascii_lowercase()) {
+ Ok(CasedString::Lowercase(s.to_string()))
+ } else if s.chars().all(|x| x.is_ascii_uppercase()) {
+ Ok(CasedString::Uppercase(s.to_string()))
+ } else {
+ Err(de::Error::invalid_value(
+ de::Unexpected::Str(s),
+ &"all lowercase or all uppercase",
+ ))
+ }
+ }
+ }
+
+ deserializer.deserialize_any(CasedStringVisitor)
+ }
+}
+
+#[test]
+fn custom_errors() {
+ basic_toml::from_str::<Parent<CasedString>>(
+ "
+ p_a = 'a'
+ p_b = [{c_a = 'a', c_b = 'c'}]
+ ",
+ )
+ .unwrap();
+
+ // Custom error at p_b value.
+ bad!(
+ "
+ p_a = ''
+ # ^
+ ",
+ Parent<CasedString>,
+ "invalid length 0, expected a non-empty string for key `p_a` at line 2 column 19"
+ );
+
+ // Missing field in table.
+ bad!(
+ "
+ p_a = 'a'
+ # ^
+ ",
+ Parent<CasedString>,
+ "missing field `p_b` at line 1 column 1"
+ );
+
+ // Invalid type in p_b.
+ bad!(
+ "
+ p_a = 'a'
+ p_b = 1
+ # ^
+ ",
+ Parent<CasedString>,
+ "invalid type: integer `1`, expected a sequence for key `p_b` at line 3 column 19"
+ );
+
+ // Sub-table in Vec is missing a field.
+ bad!(
+ "
+ p_a = 'a'
+ p_b = [
+ {c_a = 'a'}
+ # ^
+ ]
+ ",
+ Parent<CasedString>,
+ "missing field `c_b` for key `p_b` at line 4 column 17"
+ );
+
+ // Sub-table in Vec has a field with a bad value.
+ bad!(
+ "
+ p_a = 'a'
+ p_b = [
+ {c_a = 'a', c_b = '*'}
+ # ^
+ ]
+ ",
+ Parent<CasedString>,
+ "invalid value: string \"*\", expected all lowercase or all uppercase for key `p_b` at line 4 column 35"
+ );
+
+ // Sub-table in Vec is missing a field.
+ bad!(
+ "
+ p_a = 'a'
+ p_b = [
+ {c_a = 'a', c_b = 'b'},
+ {c_a = 'aa'}
+ # ^
+ ]
+ ",
+ Parent<CasedString>,
+ "missing field `c_b` for key `p_b` at line 5 column 17"
+ );
+
+ // Sub-table in the middle of a Vec is missing a field.
+ bad!(
+ "
+ p_a = 'a'
+ p_b = [
+ {c_a = 'a', c_b = 'b'},
+ {c_a = 'aa'},
+ # ^
+ {c_a = 'aaa', c_b = 'bbb'},
+ ]
+ ",
+ Parent<CasedString>,
+ "missing field `c_b` for key `p_b` at line 5 column 17"
+ );
+
+ // Sub-table in the middle of a Vec has a field with a bad value.
+ bad!(
+ "
+ p_a = 'a'
+ p_b = [
+ {c_a = 'a', c_b = 'b'},
+ {c_a = 'aa', c_b = 1},
+ # ^
+ {c_a = 'aaa', c_b = 'bbb'},
+ ]
+ ",
+ Parent<CasedString>,
+ "invalid type: integer `1`, expected a string for key `p_b` at line 5 column 36"
+ );
+
+ // Sub-table in the middle of a Vec has an extra field.
+ // FIXME: This location could be better.
+ bad!(
+ "
+ p_a = 'a'
+ p_b = [
+ {c_a = 'a', c_b = 'b'},
+ {c_a = 'aa', c_b = 'bb', c_d = 'd'},
+ # ^
+ {c_a = 'aaa', c_b = 'bbb'},
+ {c_a = 'aaaa', c_b = 'bbbb'},
+ ]
+ ",
+ Parent<CasedString>,
+ "unknown field `c_d`, expected `c_a` or `c_b` for key `p_b` at line 5 column 17"
+ );
+
+ // Sub-table in the middle of a Vec is missing a field.
+ // FIXME: This location is pretty off.
+ bad!(
+ "
+ p_a = 'a'
+ [[p_b]]
+ c_a = 'a'
+ c_b = 'b'
+ [[p_b]]
+ c_a = 'aa'
+ # c_b = 'bb' # <- missing field
+ [[p_b]]
+ c_a = 'aaa'
+ c_b = 'bbb'
+ [[p_b]]
+ # ^
+ c_a = 'aaaa'
+ c_b = 'bbbb'
+ ",
+ Parent<CasedString>,
+ "missing field `c_b` for key `p_b` at line 12 column 13"
+ );
+
+ // Sub-table in the middle of a Vec has a field with a bad value.
+ bad!(
+ "
+ p_a = 'a'
+ [[p_b]]
+ c_a = 'a'
+ c_b = 'b'
+ [[p_b]]
+ c_a = 'aa'
+ c_b = '*'
+ # ^
+ [[p_b]]
+ c_a = 'aaa'
+ c_b = 'bbb'
+ ",
+ Parent<CasedString>,
+ "invalid value: string \"*\", expected all lowercase or all uppercase for key `p_b.c_b` at line 8 column 19"
+ );
+
+ // Sub-table in the middle of a Vec has an extra field.
+ // FIXME: This location is pretty off.
+ bad!(
+ "
+ p_a = 'a'
+ [[p_b]]
+ c_a = 'a'
+ c_b = 'b'
+ [[p_b]]
+ c_a = 'aa'
+ c_d = 'dd' # unknown field
+ [[p_b]]
+ c_a = 'aaa'
+ c_b = 'bbb'
+ [[p_b]]
+ # ^
+ c_a = 'aaaa'
+ c_b = 'bbbb'
+ ",
+ Parent<CasedString>,
+ "unknown field `c_d`, expected `c_a` or `c_b` for key `p_b` at line 12 column 13"
+ );
+}
+
+#[test]
+fn serde_derive_deserialize_errors() {
+ bad!(
+ "
+ p_a = ''
+ # ^
+ ",
+ Parent<String>,
+ "missing field `p_b` at line 1 column 1"
+ );
+
+ bad!(
+ "
+ p_a = ''
+ p_b = [
+ {c_a = ''}
+ # ^
+ ]
+ ",
+ Parent<String>,
+ "missing field `c_b` for key `p_b` at line 4 column 17"
+ );
+
+ bad!(
+ "
+ p_a = ''
+ p_b = [
+ {c_a = '', c_b = 1}
+ # ^
+ ]
+ ",
+ Parent<String>,
+ "invalid type: integer `1`, expected a string for key `p_b` at line 4 column 34"
+ );
+
+ // FIXME: This location could be better.
+ bad!(
+ "
+ p_a = ''
+ p_b = [
+ {c_a = '', c_b = '', c_d = ''},
+ # ^
+ ]
+ ",
+ Parent<String>,
+ "unknown field `c_d`, expected `c_a` or `c_b` for key `p_b` at line 4 column 17"
+ );
+
+ bad!(
+ "
+ p_a = 'a'
+ p_b = [
+ {c_a = '', c_b = 1, c_d = ''},
+ # ^
+ ]
+ ",
+ Parent<String>,
+ "invalid type: integer `1`, expected a string for key `p_b` at line 4 column 34"
+ );
+}
+
+#[test]
+fn error_handles_crlf() {
+ bad!(
+ "\r\n\
+ [t1]\r\n\
+ [t2]\r\n\
+ a = 1\r\n\
+ . = 2\r\n\
+ ",
+ serde_json::Value,
+ "expected a table key, found a period at line 5 column 1"
+ );
+
+ // Should be the same as above.
+ bad!(
+ "\n\
+ [t1]\n\
+ [t2]\n\
+ a = 1\n\
+ . = 2\n\
+ ",
+ serde_json::Value,
+ "expected a table key, found a period at line 5 column 1"
+ );
+}
diff --git a/vendor/basic-toml/tests/display-tricky.rs b/vendor/basic-toml/tests/display-tricky.rs
new file mode 100644
index 000000000..274b380e9
--- /dev/null
+++ b/vendor/basic-toml/tests/display-tricky.rs
@@ -0,0 +1,53 @@
+use serde::{Deserialize, Serialize};
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct Recipe {
+ pub name: String,
+ pub description: Option<String>,
+ #[serde(default)]
+ pub modules: Vec<Modules>,
+ #[serde(default)]
+ pub packages: Vec<Packages>,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct Modules {
+ pub name: String,
+ pub version: Option<String>,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct Packages {
+ pub name: String,
+ pub version: Option<String>,
+}
+
+#[test]
+fn both_ends() {
+ let recipe_works = basic_toml::from_str::<Recipe>(
+ r#"
+ name = "testing"
+ description = "example"
+ modules = []
+
+ [[packages]]
+ name = "base"
+ "#,
+ )
+ .unwrap();
+ basic_toml::to_string(&recipe_works).unwrap();
+
+ let recipe_fails = basic_toml::from_str::<Recipe>(
+ r#"
+ name = "testing"
+ description = "example"
+ packages = []
+
+ [[modules]]
+ name = "base"
+ "#,
+ )
+ .unwrap();
+ let err = basic_toml::to_string(&recipe_fails).unwrap_err();
+ assert_eq!(err.to_string(), "values must be emitted before tables");
+}
diff --git a/vendor/basic-toml/tests/enum_external_deserialize.rs b/vendor/basic-toml/tests/enum_external_deserialize.rs
new file mode 100644
index 000000000..337fddf80
--- /dev/null
+++ b/vendor/basic-toml/tests/enum_external_deserialize.rs
@@ -0,0 +1,30 @@
+#![allow(clippy::wildcard_imports)]
+
+use serde::Deserialize;
+
+#[derive(Debug, Deserialize, PartialEq)]
+struct Struct {
+ value: Enum,
+}
+
+#[derive(Debug, Deserialize, PartialEq)]
+enum Enum {
+ Variant,
+}
+
+#[test]
+fn unknown_variant() {
+ let error = basic_toml::from_str::<Struct>("value = \"NonExistent\"").unwrap_err();
+
+ assert_eq!(
+ error.to_string(),
+ "unknown variant `NonExistent`, expected `Variant` for key `value` at line 1 column 1"
+ );
+}
+
+#[test]
+fn from_str() {
+ let s = basic_toml::from_str::<Struct>("value = \"Variant\"").unwrap();
+
+ assert_eq!(Enum::Variant, s.value);
+}
diff --git a/vendor/basic-toml/tests/float.rs b/vendor/basic-toml/tests/float.rs
new file mode 100644
index 000000000..6923456fc
--- /dev/null
+++ b/vendor/basic-toml/tests/float.rs
@@ -0,0 +1,81 @@
+#![allow(clippy::float_cmp)]
+
+use serde::{Deserialize, Serialize};
+use serde_json::Value;
+
+#[rustfmt::skip] // appears to be a bug in rustfmt to make this converge...
+macro_rules! float_inf_tests {
+ ($ty:ty) => {{
+ #[derive(Serialize, Deserialize)]
+ struct S {
+ sf1: $ty,
+ sf2: $ty,
+ sf3: $ty,
+ sf4: $ty,
+ sf5: $ty,
+ sf6: $ty,
+ sf7: $ty,
+ sf8: $ty,
+ }
+ let inf: S = basic_toml::from_str(
+ r"
+ # infinity
+ sf1 = inf # positive infinity
+ sf2 = +inf # positive infinity
+ sf3 = -inf # negative infinity
+
+ # not a number
+ sf4 = nan # actual sNaN/qNaN encoding is implementation specific
+ sf5 = +nan # same as `nan`
+ sf6 = -nan # valid, actual encoding is implementation specific
+
+ # zero
+ sf7 = +0.0
+ sf8 = -0.0
+ ",
+ )
+ .expect("Parse infinities.");
+
+ assert!(inf.sf1.is_infinite());
+ assert!(inf.sf1.is_sign_positive());
+ assert!(inf.sf2.is_infinite());
+ assert!(inf.sf2.is_sign_positive());
+ assert!(inf.sf3.is_infinite());
+ assert!(inf.sf3.is_sign_negative());
+
+ assert!(inf.sf4.is_nan());
+ assert!(inf.sf4.is_sign_positive());
+ assert!(inf.sf5.is_nan());
+ assert!(inf.sf5.is_sign_positive());
+ assert!(inf.sf6.is_nan());
+ assert!(inf.sf6.is_sign_negative());
+
+ assert_eq!(inf.sf7, 0.0);
+ assert!(inf.sf7.is_sign_positive());
+ assert_eq!(inf.sf8, 0.0);
+ assert!(inf.sf8.is_sign_negative());
+
+ let s = basic_toml::to_string(&inf).unwrap();
+ assert_eq!(
+ s,
+ "\
+sf1 = inf
+sf2 = inf
+sf3 = -inf
+sf4 = nan
+sf5 = nan
+sf6 = -nan
+sf7 = 0.0
+sf8 = -0.0
+"
+ );
+
+ basic_toml::from_str::<Value>(&s).expect("roundtrip");
+ }};
+}
+
+#[test]
+fn float_inf() {
+ float_inf_tests!(f32);
+ float_inf_tests!(f64);
+}
diff --git a/vendor/basic-toml/tests/formatting.rs b/vendor/basic-toml/tests/formatting.rs
new file mode 100644
index 000000000..5e3fb794a
--- /dev/null
+++ b/vendor/basic-toml/tests/formatting.rs
@@ -0,0 +1,53 @@
+use basic_toml::to_string;
+use serde::{Deserialize, Serialize};
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
+struct User {
+ pub name: String,
+ pub surname: String,
+}
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
+struct Users {
+ pub user: Vec<User>,
+}
+
+#[derive(Debug, Clone, Hash, PartialEq, Eq, Serialize, Deserialize)]
+struct TwoUsers {
+ pub user0: User,
+ pub user1: User,
+}
+
+#[test]
+fn no_unnecessary_newlines_array() {
+ assert!(!to_string(&Users {
+ user: vec![
+ User {
+ name: "John".to_string(),
+ surname: "Doe".to_string(),
+ },
+ User {
+ name: "Jane".to_string(),
+ surname: "Dough".to_string(),
+ },
+ ],
+ })
+ .unwrap()
+ .starts_with('\n'));
+}
+
+#[test]
+fn no_unnecessary_newlines_table() {
+ assert!(!to_string(&TwoUsers {
+ user0: User {
+ name: "John".to_string(),
+ surname: "Doe".to_string(),
+ },
+ user1: User {
+ name: "Jane".to_string(),
+ surname: "Dough".to_string(),
+ },
+ })
+ .unwrap()
+ .starts_with('\n'));
+}
diff --git a/vendor/basic-toml/tests/invalid-encoder/array-mixed-types-ints-and-floats.json b/vendor/basic-toml/tests/invalid-encoder/array-mixed-types-ints-and-floats.json
new file mode 100644
index 000000000..2d42ead67
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid-encoder/array-mixed-types-ints-and-floats.json
@@ -0,0 +1,15 @@
+{
+ "ints-and-floats": {
+ "type": "array",
+ "value": [
+ {
+ "type": "integer",
+ "value": "1"
+ },
+ {
+ "type": "float",
+ "value": "1.1"
+ }
+ ]
+ }
+}
diff --git a/vendor/basic-toml/tests/invalid-misc.rs b/vendor/basic-toml/tests/invalid-misc.rs
new file mode 100644
index 000000000..a02bf045a
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid-misc.rs
@@ -0,0 +1,48 @@
+use serde_json::Value;
+
+macro_rules! bad {
+ ($toml:expr, $msg:expr) => {
+ match basic_toml::from_str::<Value>($toml) {
+ Ok(s) => panic!("parsed to: {:#?}", s),
+ Err(e) => assert_eq!(e.to_string(), $msg),
+ }
+ };
+}
+
+#[test]
+fn bad() {
+ bad!("a = 01", "invalid number at line 1 column 6");
+ bad!("a = 1__1", "invalid number at line 1 column 5");
+ bad!("a = 1_", "invalid number at line 1 column 5");
+ bad!("''", "expected an equals, found eof at line 1 column 3");
+ bad!("a = 9e99999", "invalid number at line 1 column 5");
+
+ bad!(
+ "a = \"\u{7f}\"",
+ "invalid character in string: `\\u{7f}` at line 1 column 6"
+ );
+ bad!(
+ "a = '\u{7f}'",
+ "invalid character in string: `\\u{7f}` at line 1 column 6"
+ );
+
+ bad!("a = -0x1", "invalid number at line 1 column 5");
+ bad!("a = 0x-1", "invalid number at line 1 column 7");
+
+ // Dotted keys.
+ bad!(
+ "a.b.c = 1
+ a.b = 2
+ ",
+ "duplicate key: `b` for key `a` at line 2 column 12"
+ );
+ bad!(
+ "a = 1
+ a.b = 2",
+ "dotted key attempted to extend non-table type at line 1 column 5"
+ );
+ bad!(
+ "a = {k1 = 1, k1.name = \"joe\"}",
+ "dotted key attempted to extend non-table type at line 1 column 11"
+ );
+}
diff --git a/vendor/basic-toml/tests/invalid.rs b/vendor/basic-toml/tests/invalid.rs
new file mode 100644
index 000000000..cb5ef78c4
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid.rs
@@ -0,0 +1,226 @@
+use serde_json::Value;
+
+macro_rules! bad {
+ ($toml:expr, $msg:expr) => {
+ match basic_toml::from_str::<Value>($toml) {
+ Ok(s) => panic!("parsed to: {:#?}", s),
+ Err(e) => assert_eq!(e.to_string(), $msg),
+ }
+ };
+}
+
+macro_rules! test( ($name:ident, $s:expr, $msg:expr) => (
+ #[test]
+ fn $name() { bad!($s, $msg); }
+) );
+
+test!(
+ datetime_malformed_no_leads,
+ include_str!("invalid/datetime-malformed-no-leads.toml"),
+ "invalid number at line 1 column 12"
+);
+test!(
+ datetime_malformed_no_secs,
+ include_str!("invalid/datetime-malformed-no-secs.toml"),
+ "invalid number at line 1 column 11"
+);
+test!(
+ datetime_malformed_no_t,
+ include_str!("invalid/datetime-malformed-no-t.toml"),
+ "invalid number at line 1 column 8"
+);
+test!(
+ datetime_malformed_with_milli,
+ include_str!("invalid/datetime-malformed-with-milli.toml"),
+ "invalid number at line 1 column 14"
+);
+test!(
+ duplicate_key_table,
+ include_str!("invalid/duplicate-key-table.toml"),
+ "duplicate key: `type` for key `fruit` at line 4 column 8"
+);
+test!(
+ duplicate_keys,
+ include_str!("invalid/duplicate-keys.toml"),
+ "duplicate key: `dupe` at line 2 column 1"
+);
+test!(
+ duplicate_table,
+ include_str!("invalid/duplicate-table.toml"),
+ "redefinition of table `dependencies` for key `dependencies` at line 7 column 1"
+);
+test!(
+ duplicate_tables,
+ include_str!("invalid/duplicate-tables.toml"),
+ "redefinition of table `a` for key `a` at line 2 column 1"
+);
+test!(
+ empty_implicit_table,
+ include_str!("invalid/empty-implicit-table.toml"),
+ "expected a table key, found a period at line 1 column 10"
+);
+test!(
+ empty_table,
+ include_str!("invalid/empty-table.toml"),
+ "expected a table key, found a right bracket at line 1 column 2"
+);
+test!(
+ float_no_leading_zero,
+ include_str!("invalid/float-no-leading-zero.toml"),
+ "expected a value, found a period at line 1 column 10"
+);
+test!(
+ float_no_suffix,
+ include_str!("invalid/float-no-suffix.toml"),
+ "invalid number at line 1 column 5"
+);
+test!(
+ float_no_trailing_digits,
+ include_str!("invalid/float-no-trailing-digits.toml"),
+ "invalid number at line 1 column 12"
+);
+test!(
+ key_after_array,
+ include_str!("invalid/key-after-array.toml"),
+ "expected newline, found an identifier at line 1 column 14"
+);
+test!(
+ key_after_table,
+ include_str!("invalid/key-after-table.toml"),
+ "expected newline, found an identifier at line 1 column 11"
+);
+test!(
+ key_empty,
+ include_str!("invalid/key-empty.toml"),
+ "expected a table key, found an equals at line 1 column 2"
+);
+test!(
+ key_hash,
+ include_str!("invalid/key-hash.toml"),
+ "expected an equals, found a comment at line 1 column 2"
+);
+test!(
+ key_newline,
+ include_str!("invalid/key-newline.toml"),
+ "expected an equals, found a newline at line 1 column 2"
+);
+test!(
+ key_open_bracket,
+ include_str!("invalid/key-open-bracket.toml"),
+ "expected a right bracket, found an equals at line 1 column 6"
+);
+test!(
+ key_single_open_bracket,
+ include_str!("invalid/key-single-open-bracket.toml"),
+ "expected a table key, found eof at line 1 column 2"
+);
+test!(
+ key_space,
+ include_str!("invalid/key-space.toml"),
+ "expected an equals, found an identifier at line 1 column 3"
+);
+test!(
+ key_start_bracket,
+ include_str!("invalid/key-start-bracket.toml"),
+ "expected a right bracket, found an equals at line 2 column 6"
+);
+test!(
+ key_two_equals,
+ include_str!("invalid/key-two-equals.toml"),
+ "expected a value, found an equals at line 1 column 6"
+);
+test!(
+ string_bad_byte_escape,
+ include_str!("invalid/string-bad-byte-escape.toml"),
+ "invalid escape character in string: `x` at line 1 column 13"
+);
+test!(
+ string_bad_escape,
+ include_str!("invalid/string-bad-escape.toml"),
+ "invalid escape character in string: `a` at line 1 column 42"
+);
+test!(
+ string_bad_line_ending_escape,
+ include_str!("invalid/string-bad-line-ending-escape.toml"),
+ "invalid escape character in string: ` ` at line 2 column 79"
+);
+test!(
+ string_byte_escapes,
+ include_str!("invalid/string-byte-escapes.toml"),
+ "invalid escape character in string: `x` at line 1 column 12"
+);
+test!(
+ string_no_close,
+ include_str!("invalid/string-no-close.toml"),
+ "newline in string found at line 1 column 42"
+);
+test!(
+ table_array_implicit,
+ include_str!("invalid/table-array-implicit.toml"),
+ "table redefined as array for key `albums` at line 13 column 1"
+);
+test!(
+ table_array_malformed_bracket,
+ include_str!("invalid/table-array-malformed-bracket.toml"),
+ "expected a right bracket, found a newline at line 1 column 10"
+);
+test!(
+ table_array_malformed_empty,
+ include_str!("invalid/table-array-malformed-empty.toml"),
+ "expected a table key, found a right bracket at line 1 column 3"
+);
+test!(
+ table_empty,
+ include_str!("invalid/table-empty.toml"),
+ "expected a table key, found a right bracket at line 1 column 2"
+);
+test!(
+ table_nested_brackets_close,
+ include_str!("invalid/table-nested-brackets-close.toml"),
+ "expected newline, found an identifier at line 1 column 4"
+);
+test!(
+ table_nested_brackets_open,
+ include_str!("invalid/table-nested-brackets-open.toml"),
+ "expected a right bracket, found a left bracket at line 1 column 3"
+);
+test!(
+ table_whitespace,
+ include_str!("invalid/table-whitespace.toml"),
+ "expected a right bracket, found an identifier at line 1 column 10"
+);
+test!(
+ table_with_pound,
+ include_str!("invalid/table-with-pound.toml"),
+ "expected a right bracket, found a comment at line 1 column 5"
+);
+test!(
+ text_after_array_entries,
+ include_str!("invalid/text-after-array-entries.toml"),
+ "invalid TOML value, did you mean to use a quoted string? at line 2 column 46"
+);
+test!(
+ text_after_integer,
+ include_str!("invalid/text-after-integer.toml"),
+ "expected newline, found an identifier at line 1 column 13"
+);
+test!(
+ text_after_string,
+ include_str!("invalid/text-after-string.toml"),
+ "expected newline, found an identifier at line 1 column 41"
+);
+test!(
+ text_after_table,
+ include_str!("invalid/text-after-table.toml"),
+ "expected newline, found an identifier at line 1 column 9"
+);
+test!(
+ text_before_array_separator,
+ include_str!("invalid/text-before-array-separator.toml"),
+ "expected a right bracket, found an identifier at line 2 column 46"
+);
+test!(
+ text_in_array,
+ include_str!("invalid/text-in-array.toml"),
+ "invalid TOML value, did you mean to use a quoted string? at line 3 column 3"
+);
diff --git a/vendor/basic-toml/tests/invalid/datetime-malformed-no-leads.toml b/vendor/basic-toml/tests/invalid/datetime-malformed-no-leads.toml
new file mode 100644
index 000000000..123f173be
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/datetime-malformed-no-leads.toml
@@ -0,0 +1 @@
+no-leads = 1987-7-05T17:45:00Z
diff --git a/vendor/basic-toml/tests/invalid/datetime-malformed-no-secs.toml b/vendor/basic-toml/tests/invalid/datetime-malformed-no-secs.toml
new file mode 100644
index 000000000..ba9390076
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/datetime-malformed-no-secs.toml
@@ -0,0 +1 @@
+no-secs = 1987-07-05T17:45Z
diff --git a/vendor/basic-toml/tests/invalid/datetime-malformed-no-t.toml b/vendor/basic-toml/tests/invalid/datetime-malformed-no-t.toml
new file mode 100644
index 000000000..617e3c56d
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/datetime-malformed-no-t.toml
@@ -0,0 +1 @@
+no-t = 1987-07-0517:45:00Z
diff --git a/vendor/basic-toml/tests/invalid/datetime-malformed-with-milli.toml b/vendor/basic-toml/tests/invalid/datetime-malformed-with-milli.toml
new file mode 100644
index 000000000..eef792f34
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/datetime-malformed-with-milli.toml
@@ -0,0 +1 @@
+with-milli = 1987-07-5T17:45:00.12Z
diff --git a/vendor/basic-toml/tests/invalid/duplicate-key-table.toml b/vendor/basic-toml/tests/invalid/duplicate-key-table.toml
new file mode 100644
index 000000000..cedf05fc5
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/duplicate-key-table.toml
@@ -0,0 +1,5 @@
+[fruit]
+type = "apple"
+
+[fruit.type]
+apple = "yes"
diff --git a/vendor/basic-toml/tests/invalid/duplicate-keys.toml b/vendor/basic-toml/tests/invalid/duplicate-keys.toml
new file mode 100644
index 000000000..9b5aee0e5
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/duplicate-keys.toml
@@ -0,0 +1,2 @@
+dupe = false
+dupe = true
diff --git a/vendor/basic-toml/tests/invalid/duplicate-table.toml b/vendor/basic-toml/tests/invalid/duplicate-table.toml
new file mode 100644
index 000000000..5bd2571e6
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/duplicate-table.toml
@@ -0,0 +1,8 @@
+[dependencies.openssl-sys]
+version = "0.5.2"
+
+[dependencies]
+libc = "0.1"
+
+[dependencies]
+bitflags = "0.1.1"
diff --git a/vendor/basic-toml/tests/invalid/duplicate-tables.toml b/vendor/basic-toml/tests/invalid/duplicate-tables.toml
new file mode 100644
index 000000000..8ddf49b4e
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/duplicate-tables.toml
@@ -0,0 +1,2 @@
+[a]
+[a]
diff --git a/vendor/basic-toml/tests/invalid/empty-implicit-table.toml b/vendor/basic-toml/tests/invalid/empty-implicit-table.toml
new file mode 100644
index 000000000..0cc36d0d2
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/empty-implicit-table.toml
@@ -0,0 +1 @@
+[naughty..naughty]
diff --git a/vendor/basic-toml/tests/invalid/empty-table.toml b/vendor/basic-toml/tests/invalid/empty-table.toml
new file mode 100644
index 000000000..fe51488c7
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/empty-table.toml
@@ -0,0 +1 @@
+[]
diff --git a/vendor/basic-toml/tests/invalid/float-no-leading-zero.toml b/vendor/basic-toml/tests/invalid/float-no-leading-zero.toml
new file mode 100644
index 000000000..cab76bfd1
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/float-no-leading-zero.toml
@@ -0,0 +1,2 @@
+answer = .12345
+neganswer = -.12345
diff --git a/vendor/basic-toml/tests/invalid/float-no-suffix.toml b/vendor/basic-toml/tests/invalid/float-no-suffix.toml
new file mode 100644
index 000000000..76106de75
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/float-no-suffix.toml
@@ -0,0 +1 @@
+a = 1.2f
diff --git a/vendor/basic-toml/tests/invalid/float-no-trailing-digits.toml b/vendor/basic-toml/tests/invalid/float-no-trailing-digits.toml
new file mode 100644
index 000000000..cbff2d06f
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/float-no-trailing-digits.toml
@@ -0,0 +1,2 @@
+answer = 1.
+neganswer = -1.
diff --git a/vendor/basic-toml/tests/invalid/key-after-array.toml b/vendor/basic-toml/tests/invalid/key-after-array.toml
new file mode 100644
index 000000000..5c1a1b0a9
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/key-after-array.toml
@@ -0,0 +1 @@
+[[agencies]] owner = "S Cjelli"
diff --git a/vendor/basic-toml/tests/invalid/key-after-table.toml b/vendor/basic-toml/tests/invalid/key-after-table.toml
new file mode 100644
index 000000000..4bc82136c
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/key-after-table.toml
@@ -0,0 +1 @@
+[history] guard = "sleeping"
diff --git a/vendor/basic-toml/tests/invalid/key-empty.toml b/vendor/basic-toml/tests/invalid/key-empty.toml
new file mode 100644
index 000000000..09f998f41
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/key-empty.toml
@@ -0,0 +1 @@
+ = 1
diff --git a/vendor/basic-toml/tests/invalid/key-hash.toml b/vendor/basic-toml/tests/invalid/key-hash.toml
new file mode 100644
index 000000000..e321b1fbd
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/key-hash.toml
@@ -0,0 +1 @@
+a# = 1
diff --git a/vendor/basic-toml/tests/invalid/key-newline.toml b/vendor/basic-toml/tests/invalid/key-newline.toml
new file mode 100644
index 000000000..707aad54e
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/key-newline.toml
@@ -0,0 +1,2 @@
+a
+= 1
diff --git a/vendor/basic-toml/tests/invalid/key-open-bracket.toml b/vendor/basic-toml/tests/invalid/key-open-bracket.toml
new file mode 100644
index 000000000..f0aeb16e5
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/key-open-bracket.toml
@@ -0,0 +1 @@
+[abc = 1
diff --git a/vendor/basic-toml/tests/invalid/key-single-open-bracket.toml b/vendor/basic-toml/tests/invalid/key-single-open-bracket.toml
new file mode 100644
index 000000000..8e2f0bef1
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/key-single-open-bracket.toml
@@ -0,0 +1 @@
+[ \ No newline at end of file
diff --git a/vendor/basic-toml/tests/invalid/key-space.toml b/vendor/basic-toml/tests/invalid/key-space.toml
new file mode 100644
index 000000000..201806d28
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/key-space.toml
@@ -0,0 +1 @@
+a b = 1 \ No newline at end of file
diff --git a/vendor/basic-toml/tests/invalid/key-start-bracket.toml b/vendor/basic-toml/tests/invalid/key-start-bracket.toml
new file mode 100644
index 000000000..e0597ae1c
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/key-start-bracket.toml
@@ -0,0 +1,3 @@
+[a]
+[xyz = 5
+[b]
diff --git a/vendor/basic-toml/tests/invalid/key-two-equals.toml b/vendor/basic-toml/tests/invalid/key-two-equals.toml
new file mode 100644
index 000000000..25a037894
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/key-two-equals.toml
@@ -0,0 +1 @@
+key= = 1
diff --git a/vendor/basic-toml/tests/invalid/string-bad-byte-escape.toml b/vendor/basic-toml/tests/invalid/string-bad-byte-escape.toml
new file mode 100644
index 000000000..4c7be59f4
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/string-bad-byte-escape.toml
@@ -0,0 +1 @@
+naughty = "\xAg"
diff --git a/vendor/basic-toml/tests/invalid/string-bad-escape.toml b/vendor/basic-toml/tests/invalid/string-bad-escape.toml
new file mode 100644
index 000000000..60acb0ccc
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/string-bad-escape.toml
@@ -0,0 +1 @@
+invalid-escape = "This string has a bad \a escape character."
diff --git a/vendor/basic-toml/tests/invalid/string-bad-line-ending-escape.toml b/vendor/basic-toml/tests/invalid/string-bad-line-ending-escape.toml
new file mode 100644
index 000000000..32e2c4862
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/string-bad-line-ending-escape.toml
@@ -0,0 +1,3 @@
+invalid-escape = """\
+ This string has a non whitespace-character after the line ending escape. \ a
+"""
diff --git a/vendor/basic-toml/tests/invalid/string-byte-escapes.toml b/vendor/basic-toml/tests/invalid/string-byte-escapes.toml
new file mode 100644
index 000000000..e94452a8d
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/string-byte-escapes.toml
@@ -0,0 +1 @@
+answer = "\x33"
diff --git a/vendor/basic-toml/tests/invalid/string-no-close.toml b/vendor/basic-toml/tests/invalid/string-no-close.toml
new file mode 100644
index 000000000..0c292fcab
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/string-no-close.toml
@@ -0,0 +1 @@
+no-ending-quote = "One time, at band camp
diff --git a/vendor/basic-toml/tests/invalid/table-array-implicit.toml b/vendor/basic-toml/tests/invalid/table-array-implicit.toml
new file mode 100644
index 000000000..05f2507ec
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/table-array-implicit.toml
@@ -0,0 +1,14 @@
+# This test is a bit tricky. It should fail because the first use of
+# `[[albums.songs]]` without first declaring `albums` implies that `albums`
+# must be a table. The alternative would be quite weird. Namely, it wouldn't
+# comply with the TOML spec: "Each double-bracketed sub-table will belong to
+# the most *recently* defined table element *above* it."
+#
+# This is in contrast to the *valid* test, table-array-implicit where
+# `[[albums.songs]]` works by itself, so long as `[[albums]]` isn't declared
+# later. (Although, `[albums]` could be.)
+[[albums.songs]]
+name = "Glory Days"
+
+[[albums]]
+name = "Born in the USA"
diff --git a/vendor/basic-toml/tests/invalid/table-array-malformed-bracket.toml b/vendor/basic-toml/tests/invalid/table-array-malformed-bracket.toml
new file mode 100644
index 000000000..39c73b05c
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/table-array-malformed-bracket.toml
@@ -0,0 +1,2 @@
+[[albums]
+name = "Born to Run"
diff --git a/vendor/basic-toml/tests/invalid/table-array-malformed-empty.toml b/vendor/basic-toml/tests/invalid/table-array-malformed-empty.toml
new file mode 100644
index 000000000..a470ca332
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/table-array-malformed-empty.toml
@@ -0,0 +1,2 @@
+[[]]
+name = "Born to Run"
diff --git a/vendor/basic-toml/tests/invalid/table-empty.toml b/vendor/basic-toml/tests/invalid/table-empty.toml
new file mode 100644
index 000000000..fe51488c7
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/table-empty.toml
@@ -0,0 +1 @@
+[]
diff --git a/vendor/basic-toml/tests/invalid/table-nested-brackets-close.toml b/vendor/basic-toml/tests/invalid/table-nested-brackets-close.toml
new file mode 100644
index 000000000..c8b5a6785
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/table-nested-brackets-close.toml
@@ -0,0 +1,2 @@
+[a]b]
+zyx = 42
diff --git a/vendor/basic-toml/tests/invalid/table-nested-brackets-open.toml b/vendor/basic-toml/tests/invalid/table-nested-brackets-open.toml
new file mode 100644
index 000000000..246d7e91f
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/table-nested-brackets-open.toml
@@ -0,0 +1,2 @@
+[a[b]
+zyx = 42
diff --git a/vendor/basic-toml/tests/invalid/table-whitespace.toml b/vendor/basic-toml/tests/invalid/table-whitespace.toml
new file mode 100644
index 000000000..79bbcb1e2
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/table-whitespace.toml
@@ -0,0 +1 @@
+[invalid key] \ No newline at end of file
diff --git a/vendor/basic-toml/tests/invalid/table-with-pound.toml b/vendor/basic-toml/tests/invalid/table-with-pound.toml
new file mode 100644
index 000000000..0d8edb524
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/table-with-pound.toml
@@ -0,0 +1,2 @@
+[key#group]
+answer = 42 \ No newline at end of file
diff --git a/vendor/basic-toml/tests/invalid/text-after-array-entries.toml b/vendor/basic-toml/tests/invalid/text-after-array-entries.toml
new file mode 100644
index 000000000..1a7289074
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/text-after-array-entries.toml
@@ -0,0 +1,4 @@
+array = [
+ "Is there life after an array separator?", No
+ "Entry"
+]
diff --git a/vendor/basic-toml/tests/invalid/text-after-integer.toml b/vendor/basic-toml/tests/invalid/text-after-integer.toml
new file mode 100644
index 000000000..42de7aff4
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/text-after-integer.toml
@@ -0,0 +1 @@
+answer = 42 the ultimate answer?
diff --git a/vendor/basic-toml/tests/invalid/text-after-string.toml b/vendor/basic-toml/tests/invalid/text-after-string.toml
new file mode 100644
index 000000000..c92a6f11d
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/text-after-string.toml
@@ -0,0 +1 @@
+string = "Is there life after strings?" No.
diff --git a/vendor/basic-toml/tests/invalid/text-after-table.toml b/vendor/basic-toml/tests/invalid/text-after-table.toml
new file mode 100644
index 000000000..87da9db26
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/text-after-table.toml
@@ -0,0 +1 @@
+[error] this shouldn't be here
diff --git a/vendor/basic-toml/tests/invalid/text-before-array-separator.toml b/vendor/basic-toml/tests/invalid/text-before-array-separator.toml
new file mode 100644
index 000000000..9b06a3924
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/text-before-array-separator.toml
@@ -0,0 +1,4 @@
+array = [
+ "Is there life before an array separator?" No,
+ "Entry"
+]
diff --git a/vendor/basic-toml/tests/invalid/text-in-array.toml b/vendor/basic-toml/tests/invalid/text-in-array.toml
new file mode 100644
index 000000000..a6a6c4207
--- /dev/null
+++ b/vendor/basic-toml/tests/invalid/text-in-array.toml
@@ -0,0 +1,5 @@
+array = [
+ "Entry 1",
+ I don't belong,
+ "Entry 2",
+]
diff --git a/vendor/basic-toml/tests/parser.rs b/vendor/basic-toml/tests/parser.rs
new file mode 100644
index 000000000..22b40a54e
--- /dev/null
+++ b/vendor/basic-toml/tests/parser.rs
@@ -0,0 +1,687 @@
+#![allow(clippy::let_underscore_untyped, clippy::uninlined_format_args)]
+
+use serde_json::{json, Value};
+
+macro_rules! bad {
+ ($toml:expr, $msg:expr) => {
+ match basic_toml::from_str::<Value>($toml) {
+ Ok(s) => panic!("parsed to: {:#?}", s),
+ Err(e) => assert_eq!(e.to_string(), $msg),
+ }
+ };
+}
+
+#[test]
+fn crlf() {
+ let toml = "\
+ [project]\r\n\
+ \r\n\
+ name = \"splay\"\r\n\
+ version = \"0.1.0\"\r\n\
+ authors = [\"alex@crichton.co\"]\r\n\
+ \r\n\
+ [[lib]]\r\n\
+ \r\n\
+ path = \"lib.rs\"\r\n\
+ name = \"splay\"\r\n\
+ description = \"\"\"\
+ A Rust implementation of a TAR file reader and writer. This library does not\r\n\
+ currently handle compression, but it is abstract over all I/O readers and\r\n\
+ writers. Additionally, great lengths are taken to ensure that the entire\r\n\
+ contents are never required to be entirely resident in memory all at once.\r\n\
+ \"\"\"\
+ ";
+ basic_toml::from_str::<Value>(toml).unwrap();
+}
+
+#[test]
+fn fun_with_strings() {
+ let toml = r#"
+bar = "\U00000000"
+key1 = "One\nTwo"
+key2 = """One\nTwo"""
+key3 = """
+One
+Two"""
+
+key4 = "The quick brown fox jumps over the lazy dog."
+key5 = """
+The quick brown \
+
+
+fox jumps over \
+the lazy dog."""
+key6 = """\
+ The quick brown \
+ fox jumps over \
+ the lazy dog.\
+ """
+# What you see is what you get.
+winpath = 'C:\Users\nodejs\templates'
+winpath2 = '\\ServerX\admin$\system32\'
+quoted = 'Tom "Dubs" Preston-Werner'
+regex = '<\i\c*\s*>'
+
+regex2 = '''I [dw]on't need \d{2} apples'''
+lines = '''
+The first newline is
+trimmed in raw strings.
+All other whitespace
+is preserved.
+'''
+"#;
+ let table: Value = basic_toml::from_str(toml).unwrap();
+ assert_eq!(table["bar"], json!("\0"));
+ assert_eq!(table["key1"], json!("One\nTwo"));
+ assert_eq!(table["key2"], json!("One\nTwo"));
+ assert_eq!(table["key3"], json!("One\nTwo"));
+
+ let msg = "The quick brown fox jumps over the lazy dog.";
+ assert_eq!(table["key4"], json!(msg));
+ assert_eq!(table["key5"], json!(msg));
+ assert_eq!(table["key6"], json!(msg));
+
+ assert_eq!(table["winpath"], json!(r"C:\Users\nodejs\templates"));
+ assert_eq!(table["winpath2"], json!(r"\\ServerX\admin$\system32\"));
+ assert_eq!(table["quoted"], json!(r#"Tom "Dubs" Preston-Werner"#));
+ assert_eq!(table["regex"], json!(r"<\i\c*\s*>"));
+ assert_eq!(table["regex2"], json!(r"I [dw]on't need \d{2} apples"));
+ assert_eq!(
+ table["lines"],
+ json!(
+ "The first newline is\n\
+ trimmed in raw strings.\n\
+ All other whitespace\n\
+ is preserved.\n"
+ )
+ );
+}
+
+#[test]
+fn tables_in_arrays() {
+ let toml = r#"
+[[foo]]
+#…
+[foo.bar]
+#…
+
+[[foo]] # ...
+#…
+[foo.bar]
+#...
+"#;
+ let table: Value = basic_toml::from_str(toml).unwrap();
+ table["foo"][0]["bar"].as_object().unwrap();
+ table["foo"][1]["bar"].as_object().unwrap();
+}
+
+#[test]
+fn empty_table() {
+ let toml = r#"
+[foo]"#;
+ let table: Value = basic_toml::from_str(toml).unwrap();
+ table["foo"].as_object().unwrap();
+}
+
+#[test]
+fn fruit() {
+ let toml = r#"
+[[fruit]]
+name = "apple"
+
+[fruit.physical]
+color = "red"
+shape = "round"
+
+[[fruit.variety]]
+name = "red delicious"
+
+[[fruit.variety]]
+name = "granny smith"
+
+[[fruit]]
+name = "banana"
+
+[[fruit.variety]]
+name = "plantain"
+"#;
+ let table: Value = basic_toml::from_str(toml).unwrap();
+ assert_eq!(table["fruit"][0]["name"], json!("apple"));
+ assert_eq!(table["fruit"][0]["physical"]["color"], json!("red"));
+ assert_eq!(table["fruit"][0]["physical"]["shape"], json!("round"));
+ assert_eq!(
+ table["fruit"][0]["variety"][0]["name"],
+ json!("red delicious")
+ );
+ assert_eq!(
+ table["fruit"][0]["variety"][1]["name"],
+ json!("granny smith")
+ );
+ assert_eq!(table["fruit"][1]["name"], json!("banana"));
+ assert_eq!(table["fruit"][1]["variety"][0]["name"], json!("plantain"));
+}
+
+#[test]
+fn stray_cr() {
+ bad!("\r", "unexpected character found: `\\r` at line 1 column 1");
+ bad!(
+ "a = [ \r ]",
+ "unexpected character found: `\\r` at line 1 column 7"
+ );
+ bad!(
+ "a = \"\"\"\r\"\"\"",
+ "invalid character in string: `\\r` at line 1 column 8"
+ );
+ bad!(
+ "a = \"\"\"\\ \r \"\"\"",
+ "invalid escape character in string: ` ` at line 1 column 9"
+ );
+ bad!(
+ "a = '''\r'''",
+ "invalid character in string: `\\r` at line 1 column 8"
+ );
+ bad!(
+ "a = '\r'",
+ "invalid character in string: `\\r` at line 1 column 6"
+ );
+ bad!(
+ "a = \"\r\"",
+ "invalid character in string: `\\r` at line 1 column 6"
+ );
+}
+
+#[test]
+fn blank_literal_string() {
+ let table: Value = basic_toml::from_str("foo = ''").unwrap();
+ assert_eq!(table["foo"], json!(""));
+}
+
+#[test]
+fn many_blank() {
+ let table: Value = basic_toml::from_str("foo = \"\"\"\n\n\n\"\"\"").unwrap();
+ assert_eq!(table["foo"], json!("\n\n"));
+}
+
+#[test]
+fn literal_eats_crlf() {
+ let toml = "
+ foo = \"\"\"\\\r\n\"\"\"
+ bar = \"\"\"\\\r\n \r\n \r\n a\"\"\"
+ ";
+ let table: Value = basic_toml::from_str(toml).unwrap();
+ assert_eq!(table["foo"], json!(""));
+ assert_eq!(table["bar"], json!("a"));
+}
+
+#[test]
+fn string_no_newline() {
+ bad!("a = \"\n\"", "newline in string found at line 1 column 6");
+ bad!("a = '\n'", "newline in string found at line 1 column 6");
+}
+
+#[test]
+fn bad_leading_zeros() {
+ bad!("a = 00", "invalid number at line 1 column 6");
+ bad!("a = -00", "invalid number at line 1 column 7");
+ bad!("a = +00", "invalid number at line 1 column 7");
+ bad!("a = 00.0", "invalid number at line 1 column 6");
+ bad!("a = -00.0", "invalid number at line 1 column 7");
+ bad!("a = +00.0", "invalid number at line 1 column 7");
+ bad!(
+ "a = 9223372036854775808",
+ "invalid number at line 1 column 5"
+ );
+ bad!(
+ "a = -9223372036854775809",
+ "invalid number at line 1 column 5"
+ );
+}
+
+#[test]
+fn bad_floats() {
+ bad!("a = 0.", "invalid number at line 1 column 7");
+ bad!("a = 0.e", "invalid number at line 1 column 7");
+ bad!("a = 0.E", "invalid number at line 1 column 7");
+ bad!("a = 0.0E", "invalid number at line 1 column 5");
+ bad!("a = 0.0e", "invalid number at line 1 column 5");
+ bad!("a = 0.0e-", "invalid number at line 1 column 9");
+ bad!("a = 0.0e+", "invalid number at line 1 column 5");
+}
+
+#[test]
+fn floats() {
+ macro_rules! t {
+ ($actual:expr, $expected:expr) => {{
+ let f = format!("foo = {}", $actual);
+ println!("{}", f);
+ let a: Value = basic_toml::from_str(&f).unwrap();
+ assert_eq!(a["foo"], json!($expected));
+ }};
+ }
+
+ t!("1.0", 1.0);
+ t!("1.0e0", 1.0);
+ t!("1.0e+0", 1.0);
+ t!("1.0e-0", 1.0);
+ t!("1E-0", 1.0);
+ t!("1.001e-0", 1.001);
+ t!("2e10", 2e10);
+ t!("2e+10", 2e10);
+ t!("2e-10", 2e-10);
+ t!("2_0.0", 20.0);
+ t!("2_0.0_0e1_0", 20.0e10);
+ t!("2_0.1_0e1_0", 20.1e10);
+}
+
+#[test]
+fn bare_key_names() {
+ let toml = "
+ foo = 3
+ foo_3 = 3
+ foo_-2--3--r23f--4-f2-4 = 3
+ _ = 3
+ - = 3
+ 8 = 8
+ \"a\" = 3
+ \"!\" = 3
+ \"a^b\" = 3
+ \"\\\"\" = 3
+ \"character encoding\" = \"value\"
+ 'ʎǝʞ' = \"value\"
+ ";
+ let a: Value = basic_toml::from_str(toml).unwrap();
+ let _ = &a["foo"];
+ let _ = &a["-"];
+ let _ = &a["_"];
+ let _ = &a["8"];
+ let _ = &a["foo_3"];
+ let _ = &a["foo_-2--3--r23f--4-f2-4"];
+ let _ = &a["a"];
+ let _ = &a["!"];
+ let _ = &a["\""];
+ let _ = &a["character encoding"];
+ let _ = &a["ʎǝʞ"];
+}
+
+#[test]
+fn bad_keys() {
+ bad!(
+ "key\n=3",
+ "expected an equals, found a newline at line 1 column 4"
+ );
+ bad!(
+ "key=\n3",
+ "expected a value, found a newline at line 1 column 5"
+ );
+ bad!(
+ "key|=3",
+ "unexpected character found: `|` at line 1 column 4"
+ );
+ bad!(
+ "=3",
+ "expected a table key, found an equals at line 1 column 1"
+ );
+ bad!(
+ "\"\"|=3",
+ "unexpected character found: `|` at line 1 column 3"
+ );
+ bad!("\"\n\"|=3", "newline in string found at line 1 column 2");
+ bad!(
+ "\"\r\"|=3",
+ "invalid character in string: `\\r` at line 1 column 2"
+ );
+ bad!(
+ "''''''=3",
+ "multiline strings are not allowed for key at line 1 column 1"
+ );
+ bad!(
+ "\"\"\"\"\"\"=3",
+ "multiline strings are not allowed for key at line 1 column 1"
+ );
+ bad!(
+ "'''key'''=3",
+ "multiline strings are not allowed for key at line 1 column 1"
+ );
+ bad!(
+ "\"\"\"key\"\"\"=3",
+ "multiline strings are not allowed for key at line 1 column 1"
+ );
+}
+
+#[test]
+fn bad_table_names() {
+ bad!(
+ "[]",
+ "expected a table key, found a right bracket at line 1 column 2"
+ );
+ bad!(
+ "[.]",
+ "expected a table key, found a period at line 1 column 2"
+ );
+ bad!(
+ "[a.]",
+ "expected a table key, found a right bracket at line 1 column 4"
+ );
+ bad!("[!]", "unexpected character found: `!` at line 1 column 2");
+ bad!("[\"\n\"]", "newline in string found at line 1 column 3");
+ bad!(
+ "[a.b]\n[a.\"b\"]",
+ "redefinition of table `a.b` for key `a.b` at line 2 column 1"
+ );
+ bad!("[']", "unterminated string at line 1 column 2");
+ bad!("[''']", "unterminated string at line 1 column 2");
+ bad!(
+ "['''''']",
+ "multiline strings are not allowed for key at line 1 column 2"
+ );
+ bad!(
+ "['''foo''']",
+ "multiline strings are not allowed for key at line 1 column 2"
+ );
+ bad!(
+ "[\"\"\"bar\"\"\"]",
+ "multiline strings are not allowed for key at line 1 column 2"
+ );
+ bad!("['\n']", "newline in string found at line 1 column 3");
+ bad!("['\r\n']", "newline in string found at line 1 column 3");
+}
+
+#[test]
+fn table_names() {
+ let toml = "
+ [a.\"b\"]
+ [\"f f\"]
+ [\"f.f\"]
+ [\"\\\"\"]
+ ['a.a']
+ ['\"\"']
+ ";
+ let a: Value = basic_toml::from_str(toml).unwrap();
+ println!("{:?}", a);
+ let _ = &a["a"]["b"];
+ let _ = &a["f f"];
+ let _ = &a["f.f"];
+ let _ = &a["\""];
+ let _ = &a["\"\""];
+}
+
+#[test]
+fn invalid_bare_numeral() {
+ bad!("4", "expected an equals, found eof at line 1 column 2");
+}
+
+#[test]
+fn inline_tables() {
+ basic_toml::from_str::<Value>("a = {}").unwrap();
+ basic_toml::from_str::<Value>("a = {b=1}").unwrap();
+ basic_toml::from_str::<Value>("a = { b = 1 }").unwrap();
+ basic_toml::from_str::<Value>("a = {a=1,b=2}").unwrap();
+ basic_toml::from_str::<Value>("a = {a=1,b=2,c={}}").unwrap();
+
+ bad!(
+ "a = {a=1,}",
+ "expected a table key, found a right brace at line 1 column 10"
+ );
+ bad!(
+ "a = {,}",
+ "expected a table key, found a comma at line 1 column 6"
+ );
+ bad!(
+ "a = {a=1,a=1}",
+ "duplicate key: `a` for key `a` at line 1 column 10"
+ );
+ bad!(
+ "a = {\n}",
+ "expected a table key, found a newline at line 1 column 6"
+ );
+ bad!(
+ "a = {",
+ "expected a table key, found eof at line 1 column 6"
+ );
+
+ basic_toml::from_str::<Value>("a = {a=[\n]}").unwrap();
+ basic_toml::from_str::<Value>("a = {\"a\"=[\n]}").unwrap();
+ basic_toml::from_str::<Value>("a = [\n{},\n{},\n]").unwrap();
+}
+
+#[test]
+fn number_underscores() {
+ macro_rules! t {
+ ($actual:expr, $expected:expr) => {{
+ let f = format!("foo = {}", $actual);
+ let table: Value = basic_toml::from_str(&f).unwrap();
+ assert_eq!(table["foo"], json!($expected));
+ }};
+ }
+
+ t!("1_0", 10);
+ t!("1_0_0", 100);
+ t!("1_000", 1000);
+ t!("+1_000", 1000);
+ t!("-1_000", -1000);
+}
+
+#[test]
+fn bad_underscores() {
+ bad!("foo = 0_", "invalid number at line 1 column 7");
+ bad!("foo = 0__0", "invalid number at line 1 column 7");
+ bad!(
+ "foo = __0",
+ "invalid TOML value, did you mean to use a quoted string? at line 1 column 7"
+ );
+ bad!("foo = 1_0_", "invalid number at line 1 column 7");
+}
+
+#[test]
+fn bad_unicode_codepoint() {
+ bad!(
+ "foo = \"\\uD800\"",
+ "invalid escape value: `55296` at line 1 column 9"
+ );
+}
+
+#[test]
+fn bad_strings() {
+ bad!(
+ "foo = \"\\uxx\"",
+ "invalid hex escape character in string: `x` at line 1 column 10"
+ );
+ bad!(
+ "foo = \"\\u\"",
+ "invalid hex escape character in string: `\\\"` at line 1 column 10"
+ );
+ bad!("foo = \"\\", "unterminated string at line 1 column 7");
+ bad!("foo = '", "unterminated string at line 1 column 7");
+}
+
+#[test]
+fn empty_string() {
+ let table: Value = basic_toml::from_str::<Value>("foo = \"\"").unwrap();
+ assert_eq!(table["foo"], json!(""));
+}
+
+#[test]
+fn booleans() {
+ let table: Value = basic_toml::from_str("foo = true").unwrap();
+ assert_eq!(table["foo"], json!(true));
+
+ let table: Value = basic_toml::from_str("foo = false").unwrap();
+ assert_eq!(table["foo"], json!(false));
+
+ bad!(
+ "foo = true2",
+ "invalid TOML value, did you mean to use a quoted string? at line 1 column 7"
+ );
+ bad!(
+ "foo = false2",
+ "invalid TOML value, did you mean to use a quoted string? at line 1 column 7"
+ );
+ bad!(
+ "foo = t1",
+ "invalid TOML value, did you mean to use a quoted string? at line 1 column 7"
+ );
+ bad!(
+ "foo = f2",
+ "invalid TOML value, did you mean to use a quoted string? at line 1 column 7"
+ );
+}
+
+#[test]
+fn bad_nesting() {
+ bad!(
+ "
+ a = [2]
+ [[a]]
+ b = 5
+ ",
+ "duplicate key: `a` at line 3 column 11"
+ );
+ bad!(
+ "
+ a = 1
+ [a.b]
+ ",
+ "duplicate key: `a` at line 3 column 10"
+ );
+ bad!(
+ "
+ a = []
+ [a.b]
+ ",
+ "duplicate key: `a` at line 3 column 10"
+ );
+ bad!(
+ "
+ a = []
+ [[a.b]]
+ ",
+ "duplicate key: `a` at line 3 column 11"
+ );
+ bad!(
+ "
+ [a]
+ b = { c = 2, d = {} }
+ [a.b]
+ c = 2
+ ",
+ "duplicate key: `b` for key `a` at line 4 column 12"
+ );
+}
+
+#[test]
+fn bad_table_redefine() {
+ bad!(
+ "
+ [a]
+ foo=\"bar\"
+ [a.b]
+ foo=\"bar\"
+ [a]
+ ",
+ "redefinition of table `a` for key `a` at line 6 column 9"
+ );
+ bad!(
+ "
+ [a]
+ foo=\"bar\"
+ b = { foo = \"bar\" }
+ [a]
+ ",
+ "redefinition of table `a` for key `a` at line 5 column 9"
+ );
+ bad!(
+ "
+ [a]
+ b = {}
+ [a.b]
+ ",
+ "duplicate key: `b` for key `a` at line 4 column 12"
+ );
+
+ bad!(
+ "
+ [a]
+ b = {}
+ [a]
+ ",
+ "redefinition of table `a` for key `a` at line 4 column 9"
+ );
+}
+
+#[test]
+fn datetimes() {
+ bad!(
+ "foo = 2016-09-09T09:09:09Z",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 2016-09-09T09:09:09.1Z",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 2016-09-09T09:09:09.2+10:00",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 2016-09-09T09:09:09.123456789-02:00",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 2016-09-09T09:09:09.Z",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 2016-9-09T09:09:09Z",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 2016-09-09T09:09:09+2:00",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 2016-09-09T09:09:09-2:00",
+ "invalid number at line 1 column 7"
+ );
+ bad!(
+ "foo = 2016-09-09T09:09:09Z-2:00",
+ "invalid number at line 1 column 7"
+ );
+}
+
+#[test]
+fn require_newline_after_value() {
+ bad!("0=0r=false", "invalid number at line 1 column 3");
+ bad!(
+ r#"
+0=""o=""m=""r=""00="0"q="""0"""e="""0"""
+"#,
+ "expected newline, found an identifier at line 2 column 5"
+ );
+ bad!(
+ r#"
+[[0000l0]]
+0="0"[[0000l0]]
+0="0"[[0000l0]]
+0="0"l="0"
+"#,
+ "expected newline, found a left bracket at line 3 column 6"
+ );
+ bad!(
+ r#"
+0=[0]00=[0,0,0]t=["0","0","0"]s=[1000-00-00T00:00:00Z,2000-00-00T00:00:00Z]
+"#,
+ "expected newline, found an identifier at line 2 column 6"
+ );
+ bad!(
+ r#"
+0=0r0=0r=false
+"#,
+ "invalid number at line 2 column 3"
+ );
+ bad!(
+ r#"
+0=0r0=0r=falsefal=false
+"#,
+ "invalid number at line 2 column 3"
+ );
+}
diff --git a/vendor/basic-toml/tests/tokens.rs b/vendor/basic-toml/tests/tokens.rs
new file mode 100644
index 000000000..0cc6e2e2d
--- /dev/null
+++ b/vendor/basic-toml/tests/tokens.rs
@@ -0,0 +1,188 @@
+#![allow(
+ clippy::let_underscore_untyped,
+ clippy::manual_range_contains,
+ clippy::needless_pass_by_value,
+ clippy::type_complexity
+)]
+
+#[path = "../src/tokens.rs"]
+#[allow(dead_code)]
+mod tokens;
+
+use crate::tokens::{Error, Token, Tokenizer};
+use std::borrow::Cow;
+
+fn err(input: &str, err: Error) {
+ let mut t = Tokenizer::new(input);
+ let token = t.next().unwrap_err();
+ assert_eq!(token, err);
+ assert!(t.next().unwrap().is_none());
+}
+
+#[test]
+fn literal_strings() {
+ fn t(input: &str, val: &str, multiline: bool) {
+ let mut t = Tokenizer::new(input);
+ let (_, token) = t.next().unwrap().unwrap();
+ assert_eq!(
+ token,
+ Token::String {
+ src: input,
+ val: Cow::Borrowed(val),
+ multiline,
+ }
+ );
+ assert!(t.next().unwrap().is_none());
+ }
+
+ t("''", "", false);
+ t("''''''", "", true);
+ t("'''\n'''", "", true);
+ t("'a'", "a", false);
+ t("'\"a'", "\"a", false);
+ t("''''a'''", "'a", true);
+ t("'''\n'a\n'''", "'a\n", true);
+ t("'''a\n'a\r\n'''", "a\n'a\n", true);
+}
+
+#[test]
+fn basic_strings() {
+ fn t(input: &str, val: &str, multiline: bool) {
+ let mut t = Tokenizer::new(input);
+ let (_, token) = t.next().unwrap().unwrap();
+ assert_eq!(
+ token,
+ Token::String {
+ src: input,
+ val: Cow::Borrowed(val),
+ multiline,
+ }
+ );
+ assert!(t.next().unwrap().is_none());
+ }
+
+ t(r#""""#, "", false);
+ t(r#""""""""#, "", true);
+ t(r#""a""#, "a", false);
+ t(r#""""a""""#, "a", true);
+ t(r#""\t""#, "\t", false);
+ t(r#""\u0000""#, "\0", false);
+ t(r#""\U00000000""#, "\0", false);
+ t(r#""\U000A0000""#, "\u{A0000}", false);
+ t(r#""\\t""#, "\\t", false);
+ t("\"\t\"", "\t", false);
+ t("\"\"\"\n\t\"\"\"", "\t", true);
+ t("\"\"\"\\\n\"\"\"", "", true);
+ t(
+ "\"\"\"\\\n \t \t \\\r\n \t \n \t \r\n\"\"\"",
+ "",
+ true,
+ );
+ t(r#""\r""#, "\r", false);
+ t(r#""\n""#, "\n", false);
+ t(r#""\b""#, "\u{8}", false);
+ t(r#""a\fa""#, "a\u{c}a", false);
+ t(r#""\"a""#, "\"a", false);
+ t("\"\"\"\na\"\"\"", "a", true);
+ t("\"\"\"\n\"\"\"", "", true);
+ t(r#""""a\"""b""""#, "a\"\"\"b", true);
+ err(r#""\a"#, Error::InvalidEscape(2, 'a'));
+ err("\"\\\n", Error::InvalidEscape(2, '\n'));
+ err("\"\\\r\n", Error::InvalidEscape(2, '\n'));
+ err("\"\\", Error::UnterminatedString(0));
+ err("\"\u{0}", Error::InvalidCharInString(1, '\u{0}'));
+ err(r#""\U00""#, Error::InvalidHexEscape(5, '"'));
+ err(r#""\U00"#, Error::UnterminatedString(0));
+ err(r#""\uD800"#, Error::InvalidEscapeValue(2, 0xd800));
+ err(r#""\UFFFFFFFF"#, Error::InvalidEscapeValue(2, 0xffff_ffff));
+}
+
+#[test]
+fn keylike() {
+ fn t(input: &str) {
+ let mut t = Tokenizer::new(input);
+ let (_, token) = t.next().unwrap().unwrap();
+ assert_eq!(token, Token::Keylike(input));
+ assert!(t.next().unwrap().is_none());
+ }
+ t("foo");
+ t("0bar");
+ t("bar0");
+ t("1234");
+ t("a-b");
+ t("a_B");
+ t("-_-");
+ t("___");
+}
+
+#[test]
+fn all() {
+ fn t(input: &str, expected: &[((usize, usize), Token, &str)]) {
+ let mut tokens = Tokenizer::new(input);
+ let mut actual: Vec<((usize, usize), Token, &str)> = Vec::new();
+ while let Some((span, token)) = tokens.next().unwrap() {
+ actual.push((span.into(), token, &input[span.start..span.end]));
+ }
+ for (a, b) in actual.iter().zip(expected) {
+ assert_eq!(a, b);
+ }
+ assert_eq!(actual.len(), expected.len());
+ }
+
+ t(
+ " a ",
+ &[
+ ((0, 1), Token::Whitespace(" "), " "),
+ ((1, 2), Token::Keylike("a"), "a"),
+ ((2, 3), Token::Whitespace(" "), " "),
+ ],
+ );
+
+ t(
+ " a\t [[]] \t [] {} , . =\n# foo \r\n#foo \n ",
+ &[
+ ((0, 1), Token::Whitespace(" "), " "),
+ ((1, 2), Token::Keylike("a"), "a"),
+ ((2, 4), Token::Whitespace("\t "), "\t "),
+ ((4, 5), Token::LeftBracket, "["),
+ ((5, 6), Token::LeftBracket, "["),
+ ((6, 7), Token::RightBracket, "]"),
+ ((7, 8), Token::RightBracket, "]"),
+ ((8, 11), Token::Whitespace(" \t "), " \t "),
+ ((11, 12), Token::LeftBracket, "["),
+ ((12, 13), Token::RightBracket, "]"),
+ ((13, 14), Token::Whitespace(" "), " "),
+ ((14, 15), Token::LeftBrace, "{"),
+ ((15, 16), Token::RightBrace, "}"),
+ ((16, 17), Token::Whitespace(" "), " "),
+ ((17, 18), Token::Comma, ","),
+ ((18, 19), Token::Whitespace(" "), " "),
+ ((19, 20), Token::Period, "."),
+ ((20, 21), Token::Whitespace(" "), " "),
+ ((21, 22), Token::Equals, "="),
+ ((22, 23), Token::Newline, "\n"),
+ ((23, 29), Token::Comment("# foo "), "# foo "),
+ ((29, 31), Token::Newline, "\r\n"),
+ ((31, 36), Token::Comment("#foo "), "#foo "),
+ ((36, 37), Token::Newline, "\n"),
+ ((37, 38), Token::Whitespace(" "), " "),
+ ],
+ );
+}
+
+#[test]
+fn bare_cr_bad() {
+ err("\r", Error::Unexpected(0, '\r'));
+ err("'\n", Error::NewlineInString(1));
+ err("'\u{0}", Error::InvalidCharInString(1, '\u{0}'));
+ err("'", Error::UnterminatedString(0));
+ err("\u{0}", Error::Unexpected(0, '\u{0}'));
+}
+
+#[test]
+fn bad_comment() {
+ let mut t = Tokenizer::new("#\u{0}");
+ t.next().unwrap().unwrap();
+ assert_eq!(t.next(), Err(Error::Unexpected(1, '\u{0}')));
+ assert!(t.next().unwrap().is_none());
+}
diff --git a/vendor/basic-toml/tests/valid.rs b/vendor/basic-toml/tests/valid.rs
new file mode 100644
index 000000000..c0b4479c0
--- /dev/null
+++ b/vendor/basic-toml/tests/valid.rs
@@ -0,0 +1,368 @@
+#![allow(
+ clippy::match_like_matches_macro,
+ clippy::needless_pass_by_value,
+ clippy::uninlined_format_args
+)]
+
+use serde_json::{json, Value};
+
+fn to_json(toml: Value) -> Value {
+ fn doit(s: &str, json: Value) -> Value {
+ json!({ "type": s, "value": json })
+ }
+
+ match toml {
+ Value::Null => unreachable!(),
+ Value::String(s) => doit("string", Value::String(s)),
+ Value::Number(n) => {
+ let repr = n.to_string();
+ if repr.contains('.') {
+ let float: f64 = repr.parse().unwrap();
+ let mut repr = format!("{:.15}", float);
+ repr.truncate(repr.trim_end_matches('0').len());
+ if repr.ends_with('.') {
+ repr.push('0');
+ }
+ doit("float", Value::String(repr))
+ } else {
+ doit("integer", Value::String(repr))
+ }
+ }
+ Value::Bool(b) => doit("bool", Value::String(format!("{}", b))),
+ Value::Array(arr) => {
+ let is_table = match arr.first() {
+ Some(&Value::Object(_)) => true,
+ _ => false,
+ };
+ let json = Value::Array(arr.into_iter().map(to_json).collect());
+ if is_table {
+ json
+ } else {
+ doit("array", json)
+ }
+ }
+ Value::Object(table) => {
+ let mut map = serde_json::Map::new();
+ for (k, v) in table {
+ map.insert(k, to_json(v));
+ }
+ Value::Object(map)
+ }
+ }
+}
+
+fn run(toml_raw: &str, json_raw: &str) {
+ println!("parsing:\n{}", toml_raw);
+ let toml: Value = basic_toml::from_str(toml_raw).unwrap();
+ let json: Value = serde_json::from_str(json_raw).unwrap();
+
+ // Assert toml == json
+ let toml_json = to_json(toml.clone());
+ assert!(
+ json == toml_json,
+ "expected\n{}\ngot\n{}\n",
+ serde_json::to_string_pretty(&json).unwrap(),
+ serde_json::to_string_pretty(&toml_json).unwrap()
+ );
+
+ // Assert round trip
+ println!("round trip parse: {}", toml);
+ let toml2: Value = basic_toml::from_str(&basic_toml::to_string(&toml).unwrap()).unwrap();
+ assert_eq!(toml, toml2);
+}
+
+macro_rules! test( ($name:ident, $toml:expr, $json:expr) => (
+ #[test]
+ fn $name() { run($toml, $json); }
+) );
+
+test!(
+ array_empty,
+ include_str!("valid/array-empty.toml"),
+ include_str!("valid/array-empty.json")
+);
+test!(
+ array_nospaces,
+ include_str!("valid/array-nospaces.toml"),
+ include_str!("valid/array-nospaces.json")
+);
+test!(
+ arrays_hetergeneous,
+ include_str!("valid/arrays-hetergeneous.toml"),
+ include_str!("valid/arrays-hetergeneous.json")
+);
+#[cfg(any())]
+test!(
+ arrays,
+ include_str!("valid/arrays.toml"),
+ include_str!("valid/arrays.json")
+);
+test!(
+ arrays_nested,
+ include_str!("valid/arrays-nested.toml"),
+ include_str!("valid/arrays-nested.json")
+);
+test!(
+ array_mixed_types_ints_and_floats,
+ include_str!("valid/array-mixed-types-ints-and-floats.toml"),
+ include_str!("valid/array-mixed-types-ints-and-floats.json")
+);
+test!(
+ array_mixed_types_arrays_and_ints,
+ include_str!("valid/array-mixed-types-arrays-and-ints.toml"),
+ include_str!("valid/array-mixed-types-arrays-and-ints.json")
+);
+test!(
+ array_mixed_types_strings_and_ints,
+ include_str!("valid/array-mixed-types-strings-and-ints.toml"),
+ include_str!("valid/array-mixed-types-strings-and-ints.json")
+);
+test!(
+ empty,
+ include_str!("valid/empty.toml"),
+ include_str!("valid/empty.json")
+);
+test!(
+ bool,
+ include_str!("valid/bool.toml"),
+ include_str!("valid/bool.json")
+);
+test!(
+ comments_everywhere,
+ include_str!("valid/comments-everywhere.toml"),
+ include_str!("valid/comments-everywhere.json")
+);
+#[cfg(any())]
+test!(
+ datetime,
+ include_str!("valid/datetime.toml"),
+ include_str!("valid/datetime.json")
+);
+#[cfg(any())]
+test!(
+ example,
+ include_str!("valid/example.toml"),
+ include_str!("valid/example.json")
+);
+test!(
+ float,
+ include_str!("valid/float.toml"),
+ include_str!("valid/float.json")
+);
+#[cfg(any())]
+test!(
+ implicit_and_explicit_after,
+ include_str!("valid/implicit-and-explicit-after.toml"),
+ include_str!("valid/implicit-and-explicit-after.json")
+);
+#[cfg(any())]
+test!(
+ implicit_and_explicit_before,
+ include_str!("valid/implicit-and-explicit-before.toml"),
+ include_str!("valid/implicit-and-explicit-before.json")
+);
+test!(
+ implicit_groups,
+ include_str!("valid/implicit-groups.toml"),
+ include_str!("valid/implicit-groups.json")
+);
+test!(
+ integer,
+ include_str!("valid/integer.toml"),
+ include_str!("valid/integer.json")
+);
+test!(
+ key_equals_nospace,
+ include_str!("valid/key-equals-nospace.toml"),
+ include_str!("valid/key-equals-nospace.json")
+);
+test!(
+ key_space,
+ include_str!("valid/key-space.toml"),
+ include_str!("valid/key-space.json")
+);
+test!(
+ key_special_chars,
+ include_str!("valid/key-special-chars.toml"),
+ include_str!("valid/key-special-chars.json")
+);
+test!(
+ key_with_pound,
+ include_str!("valid/key-with-pound.toml"),
+ include_str!("valid/key-with-pound.json")
+);
+test!(
+ key_empty,
+ include_str!("valid/key-empty.toml"),
+ include_str!("valid/key-empty.json")
+);
+test!(
+ long_float,
+ include_str!("valid/long-float.toml"),
+ include_str!("valid/long-float.json")
+);
+test!(
+ long_integer,
+ include_str!("valid/long-integer.toml"),
+ include_str!("valid/long-integer.json")
+);
+test!(
+ multiline_string,
+ include_str!("valid/multiline-string.toml"),
+ include_str!("valid/multiline-string.json")
+);
+test!(
+ raw_multiline_string,
+ include_str!("valid/raw-multiline-string.toml"),
+ include_str!("valid/raw-multiline-string.json")
+);
+test!(
+ raw_string,
+ include_str!("valid/raw-string.toml"),
+ include_str!("valid/raw-string.json")
+);
+test!(
+ string_empty,
+ include_str!("valid/string-empty.toml"),
+ include_str!("valid/string-empty.json")
+);
+test!(
+ string_escapes,
+ include_str!("valid/string-escapes.toml"),
+ include_str!("valid/string-escapes.json")
+);
+test!(
+ string_simple,
+ include_str!("valid/string-simple.toml"),
+ include_str!("valid/string-simple.json")
+);
+test!(
+ string_with_pound,
+ include_str!("valid/string-with-pound.toml"),
+ include_str!("valid/string-with-pound.json")
+);
+test!(
+ table_array_implicit,
+ include_str!("valid/table-array-implicit.toml"),
+ include_str!("valid/table-array-implicit.json")
+);
+test!(
+ table_array_many,
+ include_str!("valid/table-array-many.toml"),
+ include_str!("valid/table-array-many.json")
+);
+test!(
+ table_array_nest,
+ include_str!("valid/table-array-nest.toml"),
+ include_str!("valid/table-array-nest.json")
+);
+test!(
+ table_array_one,
+ include_str!("valid/table-array-one.toml"),
+ include_str!("valid/table-array-one.json")
+);
+test!(
+ table_empty,
+ include_str!("valid/table-empty.toml"),
+ include_str!("valid/table-empty.json")
+);
+test!(
+ table_sub_empty,
+ include_str!("valid/table-sub-empty.toml"),
+ include_str!("valid/table-sub-empty.json")
+);
+test!(
+ table_multi_empty,
+ include_str!("valid/table-multi-empty.toml"),
+ include_str!("valid/table-multi-empty.json")
+);
+test!(
+ table_whitespace,
+ include_str!("valid/table-whitespace.toml"),
+ include_str!("valid/table-whitespace.json")
+);
+test!(
+ table_with_pound,
+ include_str!("valid/table-with-pound.toml"),
+ include_str!("valid/table-with-pound.json")
+);
+test!(
+ unicode_escape,
+ include_str!("valid/unicode-escape.toml"),
+ include_str!("valid/unicode-escape.json")
+);
+test!(
+ unicode_literal,
+ include_str!("valid/unicode-literal.toml"),
+ include_str!("valid/unicode-literal.json")
+);
+#[cfg(any())]
+test!(
+ hard_example,
+ include_str!("valid/hard_example.toml"),
+ include_str!("valid/hard_example.json")
+);
+#[cfg(any())]
+test!(
+ example2,
+ include_str!("valid/example2.toml"),
+ include_str!("valid/example2.json")
+);
+#[cfg(any())]
+test!(
+ example3,
+ include_str!("valid/example-v0.3.0.toml"),
+ include_str!("valid/example-v0.3.0.json")
+);
+#[cfg(any())]
+test!(
+ example4,
+ include_str!("valid/example-v0.4.0.toml"),
+ include_str!("valid/example-v0.4.0.json")
+);
+#[cfg(any())]
+test!(
+ example_bom,
+ include_str!("valid/example-bom.toml"),
+ include_str!("valid/example.json")
+);
+
+#[cfg(any())]
+test!(
+ datetime_truncate,
+ include_str!("valid/datetime-truncate.toml"),
+ include_str!("valid/datetime-truncate.json")
+);
+test!(
+ key_quote_newline,
+ include_str!("valid/key-quote-newline.toml"),
+ include_str!("valid/key-quote-newline.json")
+);
+test!(
+ table_array_nest_no_keys,
+ include_str!("valid/table-array-nest-no-keys.toml"),
+ include_str!("valid/table-array-nest-no-keys.json")
+);
+test!(
+ dotted_keys,
+ include_str!("valid/dotted-keys.toml"),
+ include_str!("valid/dotted-keys.json")
+);
+
+test!(
+ quote_surrounded_value,
+ include_str!("valid/quote-surrounded-value.toml"),
+ include_str!("valid/quote-surrounded-value.json")
+);
+
+test!(
+ float_exponent,
+ include_str!("valid/float-exponent.toml"),
+ include_str!("valid/float-exponent.json")
+);
+
+test!(
+ string_delim_end,
+ include_str!("valid/string-delim-end.toml"),
+ include_str!("valid/string-delim-end.json")
+);
diff --git a/vendor/basic-toml/tests/valid/array-empty.json b/vendor/basic-toml/tests/valid/array-empty.json
new file mode 100644
index 000000000..2fbf2567f
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/array-empty.json
@@ -0,0 +1,11 @@
+{
+ "thevoid": { "type": "array", "value": [
+ {"type": "array", "value": [
+ {"type": "array", "value": [
+ {"type": "array", "value": [
+ {"type": "array", "value": []}
+ ]}
+ ]}
+ ]}
+ ]}
+}
diff --git a/vendor/basic-toml/tests/valid/array-empty.toml b/vendor/basic-toml/tests/valid/array-empty.toml
new file mode 100644
index 000000000..fa58dc63d
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/array-empty.toml
@@ -0,0 +1 @@
+thevoid = [[[[[]]]]]
diff --git a/vendor/basic-toml/tests/valid/array-mixed-types-arrays-and-ints.json b/vendor/basic-toml/tests/valid/array-mixed-types-arrays-and-ints.json
new file mode 100644
index 000000000..10074ec86
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/array-mixed-types-arrays-and-ints.json
@@ -0,0 +1,11 @@
+{
+ "arrays-and-ints": {
+ "type": "array",
+ "value": [
+ {"type": "integer", "value": "1"},
+ {"type": "array", "value": [
+ { "type": "string", "value":"Arrays are not integers."}
+ ]}
+ ]
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/array-mixed-types-arrays-and-ints.toml b/vendor/basic-toml/tests/valid/array-mixed-types-arrays-and-ints.toml
new file mode 100644
index 000000000..051ec7313
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/array-mixed-types-arrays-and-ints.toml
@@ -0,0 +1 @@
+arrays-and-ints = [1, ["Arrays are not integers."]]
diff --git a/vendor/basic-toml/tests/valid/array-mixed-types-ints-and-floats.json b/vendor/basic-toml/tests/valid/array-mixed-types-ints-and-floats.json
new file mode 100644
index 000000000..c90665ead
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/array-mixed-types-ints-and-floats.json
@@ -0,0 +1,9 @@
+{
+ "ints-and-floats": {
+ "type": "array",
+ "value": [
+ {"type": "integer", "value": "1"},
+ {"type": "float", "value": "1.1"}
+ ]
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/array-mixed-types-ints-and-floats.toml b/vendor/basic-toml/tests/valid/array-mixed-types-ints-and-floats.toml
new file mode 100644
index 000000000..a5aa9b7a0
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/array-mixed-types-ints-and-floats.toml
@@ -0,0 +1 @@
+ints-and-floats = [1, 1.1]
diff --git a/vendor/basic-toml/tests/valid/array-mixed-types-strings-and-ints.json b/vendor/basic-toml/tests/valid/array-mixed-types-strings-and-ints.json
new file mode 100644
index 000000000..8ae322ed4
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/array-mixed-types-strings-and-ints.json
@@ -0,0 +1,9 @@
+{
+ "strings-and-ints": {
+ "type": "array",
+ "value": [
+ {"type": "string", "value": "hi"},
+ {"type": "integer", "value": "42"}
+ ]
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/array-mixed-types-strings-and-ints.toml b/vendor/basic-toml/tests/valid/array-mixed-types-strings-and-ints.toml
new file mode 100644
index 000000000..f34830805
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/array-mixed-types-strings-and-ints.toml
@@ -0,0 +1 @@
+strings-and-ints = ["hi", 42]
diff --git a/vendor/basic-toml/tests/valid/array-nospaces.json b/vendor/basic-toml/tests/valid/array-nospaces.json
new file mode 100644
index 000000000..1833d61c5
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/array-nospaces.json
@@ -0,0 +1,10 @@
+{
+ "ints": {
+ "type": "array",
+ "value": [
+ {"type": "integer", "value": "1"},
+ {"type": "integer", "value": "2"},
+ {"type": "integer", "value": "3"}
+ ]
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/array-nospaces.toml b/vendor/basic-toml/tests/valid/array-nospaces.toml
new file mode 100644
index 000000000..66189367f
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/array-nospaces.toml
@@ -0,0 +1 @@
+ints = [1,2,3]
diff --git a/vendor/basic-toml/tests/valid/arrays-hetergeneous.json b/vendor/basic-toml/tests/valid/arrays-hetergeneous.json
new file mode 100644
index 000000000..478fa5c70
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/arrays-hetergeneous.json
@@ -0,0 +1,19 @@
+{
+ "mixed": {
+ "type": "array",
+ "value": [
+ {"type": "array", "value": [
+ {"type": "integer", "value": "1"},
+ {"type": "integer", "value": "2"}
+ ]},
+ {"type": "array", "value": [
+ {"type": "string", "value": "a"},
+ {"type": "string", "value": "b"}
+ ]},
+ {"type": "array", "value": [
+ {"type": "float", "value": "1.1"},
+ {"type": "float", "value": "2.1"}
+ ]}
+ ]
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/arrays-hetergeneous.toml b/vendor/basic-toml/tests/valid/arrays-hetergeneous.toml
new file mode 100644
index 000000000..a246fcf1d
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/arrays-hetergeneous.toml
@@ -0,0 +1 @@
+mixed = [[1, 2], ["a", "b"], [1.1, 2.1]]
diff --git a/vendor/basic-toml/tests/valid/arrays-nested.json b/vendor/basic-toml/tests/valid/arrays-nested.json
new file mode 100644
index 000000000..d21920cc3
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/arrays-nested.json
@@ -0,0 +1,13 @@
+{
+ "nest": {
+ "type": "array",
+ "value": [
+ {"type": "array", "value": [
+ {"type": "string", "value": "a"}
+ ]},
+ {"type": "array", "value": [
+ {"type": "string", "value": "b"}
+ ]}
+ ]
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/arrays-nested.toml b/vendor/basic-toml/tests/valid/arrays-nested.toml
new file mode 100644
index 000000000..ce3302249
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/arrays-nested.toml
@@ -0,0 +1 @@
+nest = [["a"], ["b"]]
diff --git a/vendor/basic-toml/tests/valid/arrays.json b/vendor/basic-toml/tests/valid/arrays.json
new file mode 100644
index 000000000..58aedbccb
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/arrays.json
@@ -0,0 +1,34 @@
+{
+ "ints": {
+ "type": "array",
+ "value": [
+ {"type": "integer", "value": "1"},
+ {"type": "integer", "value": "2"},
+ {"type": "integer", "value": "3"}
+ ]
+ },
+ "floats": {
+ "type": "array",
+ "value": [
+ {"type": "float", "value": "1.1"},
+ {"type": "float", "value": "2.1"},
+ {"type": "float", "value": "3.1"}
+ ]
+ },
+ "strings": {
+ "type": "array",
+ "value": [
+ {"type": "string", "value": "a"},
+ {"type": "string", "value": "b"},
+ {"type": "string", "value": "c"}
+ ]
+ },
+ "dates": {
+ "type": "array",
+ "value": [
+ {"type": "datetime", "value": "1987-07-05T17:45:00Z"},
+ {"type": "datetime", "value": "1979-05-27T07:32:00Z"},
+ {"type": "datetime", "value": "2006-06-01T11:00:00Z"}
+ ]
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/arrays.toml b/vendor/basic-toml/tests/valid/arrays.toml
new file mode 100644
index 000000000..c435f57b6
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/arrays.toml
@@ -0,0 +1,8 @@
+ints = [1, 2, 3]
+floats = [1.1, 2.1, 3.1]
+strings = ["a", "b", "c"]
+dates = [
+ 1987-07-05T17:45:00Z,
+ 1979-05-27T07:32:00Z,
+ 2006-06-01T11:00:00Z,
+]
diff --git a/vendor/basic-toml/tests/valid/bool.json b/vendor/basic-toml/tests/valid/bool.json
new file mode 100644
index 000000000..ae368e949
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/bool.json
@@ -0,0 +1,4 @@
+{
+ "f": {"type": "bool", "value": "false"},
+ "t": {"type": "bool", "value": "true"}
+}
diff --git a/vendor/basic-toml/tests/valid/bool.toml b/vendor/basic-toml/tests/valid/bool.toml
new file mode 100644
index 000000000..a8a829b34
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/bool.toml
@@ -0,0 +1,2 @@
+t = true
+f = false
diff --git a/vendor/basic-toml/tests/valid/comments-everywhere.json b/vendor/basic-toml/tests/valid/comments-everywhere.json
new file mode 100644
index 000000000..e69a2e958
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/comments-everywhere.json
@@ -0,0 +1,12 @@
+{
+ "group": {
+ "answer": {"type": "integer", "value": "42"},
+ "more": {
+ "type": "array",
+ "value": [
+ {"type": "integer", "value": "42"},
+ {"type": "integer", "value": "42"}
+ ]
+ }
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/comments-everywhere.toml b/vendor/basic-toml/tests/valid/comments-everywhere.toml
new file mode 100644
index 000000000..a13951d60
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/comments-everywhere.toml
@@ -0,0 +1,24 @@
+# Top comment.
+ # Top comment.
+# Top comment.
+
+# [no-extraneous-groups-please]
+
+[group] # Comment
+answer = 42 # Comment
+# no-extraneous-keys-please = 999
+# In between comment.
+more = [ # Comment
+ # What about multiple # comments?
+ # Can you handle it?
+ #
+ # Evil.
+# Evil.
+ 42, 42, # Comments within arrays are fun.
+ # What about multiple # comments?
+ # Can you handle it?
+ #
+ # Evil.
+# Evil.
+# ] Did I fool you?
+] # Hopefully not.
diff --git a/vendor/basic-toml/tests/valid/datetime-truncate.json b/vendor/basic-toml/tests/valid/datetime-truncate.json
new file mode 100644
index 000000000..8c512e10c
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/datetime-truncate.json
@@ -0,0 +1,6 @@
+{
+ "bestdayever": {
+ "type": "datetime",
+ "value": "1987-07-05T17:45:00.123456789Z"
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/datetime-truncate.toml b/vendor/basic-toml/tests/valid/datetime-truncate.toml
new file mode 100644
index 000000000..05de84105
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/datetime-truncate.toml
@@ -0,0 +1 @@
+bestdayever = 1987-07-05T17:45:00.123456789012345Z
diff --git a/vendor/basic-toml/tests/valid/datetime.json b/vendor/basic-toml/tests/valid/datetime.json
new file mode 100644
index 000000000..2ca93ce96
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/datetime.json
@@ -0,0 +1,3 @@
+{
+ "bestdayever": {"type": "datetime", "value": "1987-07-05T17:45:00Z"}
+}
diff --git a/vendor/basic-toml/tests/valid/datetime.toml b/vendor/basic-toml/tests/valid/datetime.toml
new file mode 100644
index 000000000..2e993407d
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/datetime.toml
@@ -0,0 +1 @@
+bestdayever = 1987-07-05T17:45:00Z
diff --git a/vendor/basic-toml/tests/valid/dotted-keys.json b/vendor/basic-toml/tests/valid/dotted-keys.json
new file mode 100644
index 000000000..cf9dd048f
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/dotted-keys.json
@@ -0,0 +1,34 @@
+{
+ "a": {
+ "b": {
+ "type": "integer",
+ "value": "123"
+ }
+ },
+ "table": {
+ "a": {
+ "b": {
+ "c": {
+ "type": "integer",
+ "value": "1"
+ },
+ "d": {
+ "type": "integer",
+ "value": "2"
+ }
+ }
+ },
+ "in": {
+ "type": {
+ "color": {
+ "type": "string",
+ "value": "blue"
+ },
+ "name": {
+ "type": "string",
+ "value": "cat"
+ }
+ }
+ }
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/dotted-keys.toml b/vendor/basic-toml/tests/valid/dotted-keys.toml
new file mode 100644
index 000000000..234d64c8d
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/dotted-keys.toml
@@ -0,0 +1,7 @@
+a.b = 123
+
+[table]
+a.b.c = 1
+a . b . d = 2
+
+in = { type.name = "cat", type.color = "blue" }
diff --git a/vendor/basic-toml/tests/valid/empty.json b/vendor/basic-toml/tests/valid/empty.json
new file mode 100644
index 000000000..0967ef424
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/empty.json
@@ -0,0 +1 @@
+{}
diff --git a/vendor/basic-toml/tests/valid/empty.toml b/vendor/basic-toml/tests/valid/empty.toml
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/empty.toml
diff --git a/vendor/basic-toml/tests/valid/example-bom.toml b/vendor/basic-toml/tests/valid/example-bom.toml
new file mode 100644
index 000000000..fb5ac815c
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/example-bom.toml
@@ -0,0 +1,5 @@
+best-day-ever = 1987-07-05T17:45:00Z
+
+[numtheory]
+boring = false
+perfection = [6, 28, 496]
diff --git a/vendor/basic-toml/tests/valid/example-v0.3.0.json b/vendor/basic-toml/tests/valid/example-v0.3.0.json
new file mode 100644
index 000000000..1d9dcb581
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/example-v0.3.0.json
@@ -0,0 +1 @@
+{"Array":{"key1":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key2":{"type":"array","value":[{"type":"string","value":"red"},{"type":"string","value":"yellow"},{"type":"string","value":"green"}]},"key3":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"integer","value":"3"},{"type":"integer","value":"4"},{"type":"integer","value":"5"}]}]},"key4":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"string","value":"a"},{"type":"string","value":"b"},{"type":"string","value":"c"}]}]},"key5":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key6":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]}},"Booleans":{"False":{"type":"bool","value":"false"},"True":{"type":"bool","value":"true"}},"Datetime":{"key1":{"type":"datetime","value":"1979-05-27T07:32:00Z"}},"Float":{"both":{},"exponent":{},"fractional":{"key1":{"type":"float","value":"1.0"},"key2":{"type":"float","value":"3.1415"},"key3":{"type":"float","value":"-0.01"}}},"Integer":{"key1":{"type":"integer","value":"99"},"key2":{"type":"integer","value":"42"},"key3":{"type":"integer","value":"0"},"key4":{"type":"integer","value":"-17"}},"String":{"Literal":{"Multiline":{"lines":{"type":"string","value":"The first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n"},"regex2":{"type":"string","value":"I [dw]on't need \\d{2} apples"}},"quoted":{"type":"string","value":"Tom \"Dubs\" Preston-Werner"},"regex":{"type":"string","value":"\u003c\\i\\c*\\s*\u003e"},"winpath":{"type":"string","value":"C:\\Users\\nodejs\\templates"},"winpath2":{"type":"string","value":"\\\\ServerX\\admin$\\system32\\"}},"Multiline":{"key1":{"type":"string","value":"One\nTwo"},"key2":{"type":"string","value":"One\nTwo"},"key3":{"type":"string","value":"One\nTwo"}},"Multilined":{"Singleline":{"key1":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key2":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key3":{"type":"string","value":"The quick brown fox jumps over the lazy dog."}}},"basic":{"type":"string","value":"I'm a string. \"You can quote me\". Name\u0009José\nLocation\u0009SF."}},"Table":{"key":{"type":"string","value":"value"}},"dog":{"tater":{"type":{"type":"string","value":"pug"}}},"fruit":[{"name":{"type":"string","value":"apple"},"physical":{"color":{"type":"string","value":"red"},"shape":{"type":"string","value":"round"}},"variety":[{"name":{"type":"string","value":"red delicious"}},{"name":{"type":"string","value":"granny smith"}}]},{"name":{"type":"string","value":"banana"},"variety":[{"name":{"type":"string","value":"plantain"}}]}],"products":[{"name":{"type":"string","value":"Hammer"},"sku":{"type":"integer","value":"738594937"}},{},{"color":{"type":"string","value":"gray"},"name":{"type":"string","value":"Nail"},"sku":{"type":"integer","value":"284758393"}}],"x":{"y":{"z":{"w":{}}}}}
diff --git a/vendor/basic-toml/tests/valid/example-v0.3.0.toml b/vendor/basic-toml/tests/valid/example-v0.3.0.toml
new file mode 100644
index 000000000..76aacc31a
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/example-v0.3.0.toml
@@ -0,0 +1,182 @@
+# Comment
+# I am a comment. Hear me roar. Roar.
+
+# Table
+# Tables (also known as hash tables or dictionaries) are collections of key/value pairs.
+# They appear in square brackets on a line by themselves.
+
+[Table]
+
+key = "value" # Yeah, you can do this.
+
+# Nested tables are denoted by table names with dots in them. Name your tables whatever crap you please, just don't use #, ., [ or ].
+
+[dog.tater]
+type = "pug"
+
+# You don't need to specify all the super-tables if you don't want to. TOML knows how to do it for you.
+
+# [x] you
+# [x.y] don't
+# [x.y.z] need these
+[x.y.z.w] # for this to work
+
+# String
+# There are four ways to express strings: basic, multi-line basic, literal, and multi-line literal.
+# All strings must contain only valid UTF-8 characters.
+
+[String]
+basic = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF."
+
+[String.Multiline]
+
+# The following strings are byte-for-byte equivalent:
+key1 = "One\nTwo"
+key2 = """One\nTwo"""
+key3 = """
+One
+Two"""
+
+[String.Multilined.Singleline]
+
+# The following strings are byte-for-byte equivalent:
+key1 = "The quick brown fox jumps over the lazy dog."
+
+key2 = """
+The quick brown \
+
+
+ fox jumps over \
+ the lazy dog."""
+
+key3 = """\
+ The quick brown \
+ fox jumps over \
+ the lazy dog.\
+ """
+
+[String.Literal]
+
+# What you see is what you get.
+winpath = 'C:\Users\nodejs\templates'
+winpath2 = '\\ServerX\admin$\system32\'
+quoted = 'Tom "Dubs" Preston-Werner'
+regex = '<\i\c*\s*>'
+
+
+[String.Literal.Multiline]
+
+regex2 = '''I [dw]on't need \d{2} apples'''
+lines = '''
+The first newline is
+trimmed in raw strings.
+ All other whitespace
+ is preserved.
+'''
+
+# Integer
+# Integers are whole numbers. Positive numbers may be prefixed with a plus sign.
+# Negative numbers are prefixed with a minus sign.
+
+[Integer]
+key1 = +99
+key2 = 42
+key3 = 0
+key4 = -17
+
+# Float
+# A float consists of an integer part (which may be prefixed with a plus or minus sign)
+# followed by a fractional part and/or an exponent part.
+
+[Float.fractional]
+
+# fractional
+key1 = +1.0
+key2 = 3.1415
+key3 = -0.01
+
+[Float.exponent]
+
+# exponent
+#key1 = 5e+22
+#key2 = 1e6
+#key3 = -2E-2
+
+[Float.both]
+
+# both
+#key = 6.626e-34
+
+# Boolean
+# Booleans are just the tokens you're used to. Always lowercase.
+
+[Booleans]
+True = true
+False = false
+
+# Datetime
+# Datetimes are RFC 3339 dates.
+
+[Datetime]
+key1 = 1979-05-27T07:32:00Z
+#key2 = 1979-05-27T00:32:00-07:00
+#key3 = 1979-05-27T00:32:00.999999-07:00
+
+# Array
+# Arrays are square brackets with other primitives inside. Whitespace is ignored. Elements are separated by commas. Data types may not be mixed.
+
+[Array]
+key1 = [ 1, 2, 3 ]
+key2 = [ "red", "yellow", "green" ]
+key3 = [ [ 1, 2 ], [3, 4, 5] ]
+key4 = [ [ 1, 2 ], ["a", "b", "c"] ] # this is ok
+
+#Arrays can also be multiline. So in addition to ignoring whitespace, arrays also ignore newlines between the brackets.
+# Terminating commas are ok before the closing bracket.
+
+key5 = [
+ 1, 2, 3
+]
+key6 = [
+ 1,
+ 2, # this is ok
+]
+
+# Array of Tables
+# These can be expressed by using a table name in double brackets.
+# Each table with the same double bracketed name will be an element in the array.
+# The tables are inserted in the order encountered.
+
+[[products]]
+name = "Hammer"
+sku = 738594937
+
+[[products]]
+
+[[products]]
+name = "Nail"
+sku = 284758393
+color = "gray"
+
+
+# You can create nested arrays of tables as well.
+
+[[fruit]]
+ name = "apple"
+
+ [fruit.physical]
+ color = "red"
+ shape = "round"
+
+ [[fruit.variety]]
+ name = "red delicious"
+
+ [[fruit.variety]]
+ name = "granny smith"
+
+[[fruit]]
+ name = "banana"
+
+ [[fruit.variety]]
+ name = "plantain"
+
diff --git a/vendor/basic-toml/tests/valid/example-v0.4.0.json b/vendor/basic-toml/tests/valid/example-v0.4.0.json
new file mode 100644
index 000000000..d5cac343a
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/example-v0.4.0.json
@@ -0,0 +1 @@
+{"array":{"key1":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key2":{"type":"array","value":[{"type":"string","value":"red"},{"type":"string","value":"yellow"},{"type":"string","value":"green"}]},"key3":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"integer","value":"3"},{"type":"integer","value":"4"},{"type":"integer","value":"5"}]}]},"key4":{"type":"array","value":[{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]},{"type":"array","value":[{"type":"string","value":"a"},{"type":"string","value":"b"},{"type":"string","value":"c"}]}]},"key5":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"},{"type":"integer","value":"3"}]},"key6":{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]}},"boolean":{"False":{"type":"bool","value":"false"},"True":{"type":"bool","value":"true"}},"datetime":{},"float":{"both":{},"exponent":{},"fractional":{"key1":{"type":"float","value":"1.0"},"key2":{"type":"float","value":"3.1415"},"key3":{"type":"float","value":"-0.01"}},"underscores":{}},"fruit":[{"name":{"type":"string","value":"apple"},"physical":{"color":{"type":"string","value":"red"},"shape":{"type":"string","value":"round"}},"variety":[{"name":{"type":"string","value":"red delicious"}},{"name":{"type":"string","value":"granny smith"}}]},{"name":{"type":"string","value":"banana"},"variety":[{"name":{"type":"string","value":"plantain"}}]}],"integer":{"key1":{"type":"integer","value":"99"},"key2":{"type":"integer","value":"42"},"key3":{"type":"integer","value":"0"},"key4":{"type":"integer","value":"-17"},"underscores":{"key1":{"type":"integer","value":"1000"},"key2":{"type":"integer","value":"5349221"},"key3":{"type":"integer","value":"12345"}}},"products":[{"name":{"type":"string","value":"Hammer"},"sku":{"type":"integer","value":"738594937"}},{},{"color":{"type":"string","value":"gray"},"name":{"type":"string","value":"Nail"},"sku":{"type":"integer","value":"284758393"}}],"string":{"basic":{"basic":{"type":"string","value":"I'm a string. \"You can quote me\". Name\u0009José\nLocation\u0009SF."}},"literal":{"multiline":{"lines":{"type":"string","value":"The first newline is\ntrimmed in raw strings.\n All other whitespace\n is preserved.\n"},"regex2":{"type":"string","value":"I [dw]on't need \\d{2} apples"}},"quoted":{"type":"string","value":"Tom \"Dubs\" Preston-Werner"},"regex":{"type":"string","value":"\u003c\\i\\c*\\s*\u003e"},"winpath":{"type":"string","value":"C:\\Users\\nodejs\\templates"},"winpath2":{"type":"string","value":"\\\\ServerX\\admin$\\system32\\"}},"multiline":{"continued":{"key1":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key2":{"type":"string","value":"The quick brown fox jumps over the lazy dog."},"key3":{"type":"string","value":"The quick brown fox jumps over the lazy dog."}},"key1":{"type":"string","value":"One\nTwo"},"key2":{"type":"string","value":"One\nTwo"},"key3":{"type":"string","value":"One\nTwo"}}},"table":{"inline":{"name":{"first":{"type":"string","value":"Tom"},"last":{"type":"string","value":"Preston-Werner"}},"point":{"x":{"type":"integer","value":"1"},"y":{"type":"integer","value":"2"}}},"key":{"type":"string","value":"value"},"subtable":{"key":{"type":"string","value":"another value"}}},"x":{"y":{"z":{"w":{}}}}}
diff --git a/vendor/basic-toml/tests/valid/example-v0.4.0.toml b/vendor/basic-toml/tests/valid/example-v0.4.0.toml
new file mode 100644
index 000000000..69f1c1b0f
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/example-v0.4.0.toml
@@ -0,0 +1,236 @@
+################################################################################
+## Comment
+
+# Speak your mind with the hash symbol. They go from the symbol to the end of
+# the line.
+
+
+################################################################################
+## Table
+
+# Tables (also known as hash tables or dictionaries) are collections of
+# key/value pairs. They appear in square brackets on a line by themselves.
+
+[table]
+
+key = "value" # Yeah, you can do this.
+
+# Nested tables are denoted by table names with dots in them. Name your tables
+# whatever crap you please, just don't use #, ., [ or ].
+
+[table.subtable]
+
+key = "another value"
+
+# You don't need to specify all the super-tables if you don't want to. TOML
+# knows how to do it for you.
+
+# [x] you
+# [x.y] don't
+# [x.y.z] need these
+[x.y.z.w] # for this to work
+
+
+################################################################################
+## Inline Table
+
+# Inline tables provide a more compact syntax for expressing tables. They are
+# especially useful for grouped data that can otherwise quickly become verbose.
+# Inline tables are enclosed in curly braces `{` and `}`. No newlines are
+# allowed between the curly braces unless they are valid within a value.
+
+[table.inline]
+
+name = { first = "Tom", last = "Preston-Werner" }
+point = { x = 1, y = 2 }
+
+
+################################################################################
+## String
+
+# There are four ways to express strings: basic, multi-line basic, literal, and
+# multi-line literal. All strings must contain only valid UTF-8 characters.
+
+[string.basic]
+
+basic = "I'm a string. \"You can quote me\". Name\tJos\u00E9\nLocation\tSF."
+
+[string.multiline]
+
+# The following strings are byte-for-byte equivalent:
+key1 = "One\nTwo"
+key2 = """One\nTwo"""
+key3 = """
+One
+Two"""
+
+[string.multiline.continued]
+
+# The following strings are byte-for-byte equivalent:
+key1 = "The quick brown fox jumps over the lazy dog."
+
+key2 = """
+The quick brown \
+
+
+ fox jumps over \
+ the lazy dog."""
+
+key3 = """\
+ The quick brown \
+ fox jumps over \
+ the lazy dog.\
+ """
+
+[string.literal]
+
+# What you see is what you get.
+winpath = 'C:\Users\nodejs\templates'
+winpath2 = '\\ServerX\admin$\system32\'
+quoted = 'Tom "Dubs" Preston-Werner'
+regex = '<\i\c*\s*>'
+
+
+[string.literal.multiline]
+
+regex2 = '''I [dw]on't need \d{2} apples'''
+lines = '''
+The first newline is
+trimmed in raw strings.
+ All other whitespace
+ is preserved.
+'''
+
+
+################################################################################
+## Integer
+
+# Integers are whole numbers. Positive numbers may be prefixed with a plus sign.
+# Negative numbers are prefixed with a minus sign.
+
+[integer]
+
+key1 = +99
+key2 = 42
+key3 = 0
+key4 = -17
+
+[integer.underscores]
+
+# For large numbers, you may use underscores to enhance readability. Each
+# underscore must be surrounded by at least one digit.
+key1 = 1_000
+key2 = 5_349_221
+key3 = 1_2_3_4_5 # valid but inadvisable
+
+
+################################################################################
+## Float
+
+# A float consists of an integer part (which may be prefixed with a plus or
+# minus sign) followed by a fractional part and/or an exponent part.
+
+[float.fractional]
+
+key1 = +1.0
+key2 = 3.1415
+key3 = -0.01
+
+[float.exponent]
+
+[float.both]
+
+[float.underscores]
+
+
+################################################################################
+## Boolean
+
+# Booleans are just the tokens you're used to. Always lowercase.
+
+[boolean]
+
+True = true
+False = false
+
+
+################################################################################
+## Datetime
+
+# Datetimes are RFC 3339 dates.
+
+[datetime]
+
+#key1 = 1979-05-27T07:32:00Z
+#key2 = 1979-05-27T00:32:00-07:00
+#key3 = 1979-05-27T00:32:00.999999-07:00
+
+
+################################################################################
+## Array
+
+# Arrays are square brackets with other primitives inside. Whitespace is
+# ignored. Elements are separated by commas. Since 2019-11-06 data types can be
+# mixed.
+
+[array]
+
+key1 = [ 1, 2, 3 ]
+key2 = [ "red", "yellow", "green" ]
+key3 = [ [ 1, 2 ], [3, 4, 5] ]
+key4 = [ [ 1, 2 ], ["a", "b", "c"] ] # this is ok
+
+# Arrays can also be multiline. So in addition to ignoring whitespace, arrays
+# also ignore newlines between the brackets. Terminating commas are ok before
+# the closing bracket.
+
+key5 = [
+ 1, 2, 3
+]
+key6 = [
+ 1,
+ 2, # this is ok
+]
+
+
+################################################################################
+## Array of Tables
+
+# These can be expressed by using a table name in double brackets. Each table
+# with the same double bracketed name will be an element in the array. The
+# tables are inserted in the order encountered.
+
+[[products]]
+
+name = "Hammer"
+sku = 738594937
+
+[[products]]
+
+[[products]]
+
+name = "Nail"
+sku = 284758393
+color = "gray"
+
+
+# You can create nested arrays of tables as well.
+
+[[fruit]]
+ name = "apple"
+
+ [fruit.physical]
+ color = "red"
+ shape = "round"
+
+ [[fruit.variety]]
+ name = "red delicious"
+
+ [[fruit.variety]]
+ name = "granny smith"
+
+[[fruit]]
+ name = "banana"
+
+ [[fruit.variety]]
+ name = "plantain"
diff --git a/vendor/basic-toml/tests/valid/example.json b/vendor/basic-toml/tests/valid/example.json
new file mode 100644
index 000000000..48aa90784
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/example.json
@@ -0,0 +1,14 @@
+{
+ "best-day-ever": {"type": "datetime", "value": "1987-07-05T17:45:00Z"},
+ "numtheory": {
+ "boring": {"type": "bool", "value": "false"},
+ "perfection": {
+ "type": "array",
+ "value": [
+ {"type": "integer", "value": "6"},
+ {"type": "integer", "value": "28"},
+ {"type": "integer", "value": "496"}
+ ]
+ }
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/example.toml b/vendor/basic-toml/tests/valid/example.toml
new file mode 100644
index 000000000..8cb02e01b
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/example.toml
@@ -0,0 +1,5 @@
+best-day-ever = 1987-07-05T17:45:00Z
+
+[numtheory]
+boring = false
+perfection = [6, 28, 496]
diff --git a/vendor/basic-toml/tests/valid/example2.json b/vendor/basic-toml/tests/valid/example2.json
new file mode 100644
index 000000000..3249a974f
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/example2.json
@@ -0,0 +1 @@
+{"clients":{"data":{"type":"array","value":[{"type":"array","value":[{"type":"string","value":"gamma"},{"type":"string","value":"delta"}]},{"type":"array","value":[{"type":"integer","value":"1"},{"type":"integer","value":"2"}]}]},"hosts":{"type":"array","value":[{"type":"string","value":"alpha"},{"type":"string","value":"omega"}]}},"database":{"connection_max":{"type":"integer","value":"5000"},"enabled":{"type":"bool","value":"true"},"ports":{"type":"array","value":[{"type":"integer","value":"8001"},{"type":"integer","value":"8001"},{"type":"integer","value":"8002"}]},"server":{"type":"string","value":"192.168.1.1"}},"owner":{"bio":{"type":"string","value":"GitHub Cofounder \u0026 CEO\nLikes tater tots and beer."},"dob":{"type":"datetime","value":"1979-05-27T07:32:00Z"},"name":{"type":"string","value":"Tom Preston-Werner"},"organization":{"type":"string","value":"GitHub"}},"products":[{"name":{"type":"string","value":"Hammer"},"sku":{"type":"integer","value":"738594937"}},{"color":{"type":"string","value":"gray"},"name":{"type":"string","value":"Nail"},"sku":{"type":"integer","value":"284758393"}}],"servers":{"alpha":{"dc":{"type":"string","value":"eqdc10"},"ip":{"type":"string","value":"10.0.0.1"}},"beta":{"country":{"type":"string","value":"中国"},"dc":{"type":"string","value":"eqdc10"},"ip":{"type":"string","value":"10.0.0.2"}}},"title":{"type":"string","value":"TOML Example"}}
diff --git a/vendor/basic-toml/tests/valid/example2.toml b/vendor/basic-toml/tests/valid/example2.toml
new file mode 100644
index 000000000..bc12c9901
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/example2.toml
@@ -0,0 +1,47 @@
+# This is a TOML document. Boom.
+
+title = "TOML Example"
+
+[owner]
+name = "Tom Preston-Werner"
+organization = "GitHub"
+bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
+dob = 1979-05-27T07:32:00Z # First class dates? Why not?
+
+[database]
+server = "192.168.1.1"
+ports = [ 8001, 8001, 8002 ]
+connection_max = 5000
+enabled = true
+
+[servers]
+
+ # You can indent as you please. Tabs or spaces. TOML don't care.
+ [servers.alpha]
+ ip = "10.0.0.1"
+ dc = "eqdc10"
+
+ [servers.beta]
+ ip = "10.0.0.2"
+ dc = "eqdc10"
+ country = "中国" # This should be parsed as UTF-8
+
+[clients]
+data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
+
+# Line breaks are OK when inside arrays
+hosts = [
+ "alpha",
+ "omega"
+]
+
+# Products
+
+ [[products]]
+ name = "Hammer"
+ sku = 738594937
+
+ [[products]]
+ name = "Nail"
+ sku = 284758393
+ color = "gray"
diff --git a/vendor/basic-toml/tests/valid/float-exponent.json b/vendor/basic-toml/tests/valid/float-exponent.json
new file mode 100644
index 000000000..97709b42a
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/float-exponent.json
@@ -0,0 +1,11 @@
+{
+ "lower": {"type": "float", "value": "300.0"},
+ "upper": {"type": "float", "value": "300.0"},
+ "neg": {"type": "float", "value": "0.03"},
+ "pos": {"type": "float", "value": "300.0"},
+ "zero": {"type": "float", "value": "3.0"},
+ "pointlower": {"type": "float", "value": "310.0"},
+ "pointupper": {"type": "float", "value": "310.0"},
+ "prefix-zero-exp": {"type": "float", "value": "1000000.0"},
+ "prefix-zero-exp-plus": {"type": "float", "value": "1000000.0"}
+}
diff --git a/vendor/basic-toml/tests/valid/float-exponent.toml b/vendor/basic-toml/tests/valid/float-exponent.toml
new file mode 100644
index 000000000..5349db368
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/float-exponent.toml
@@ -0,0 +1,9 @@
+lower = 3e2
+upper = 3E2
+neg = 3e-2
+pos = 3E+2
+zero = 3e0
+pointlower = 3.1e2
+pointupper = 3.1E2
+prefix-zero-exp = 1e06
+prefix-zero-exp-plus = 1e+06
diff --git a/vendor/basic-toml/tests/valid/float.json b/vendor/basic-toml/tests/valid/float.json
new file mode 100644
index 000000000..b8a2e9758
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/float.json
@@ -0,0 +1,4 @@
+{
+ "pi": {"type": "float", "value": "3.14"},
+ "negpi": {"type": "float", "value": "-3.14"}
+}
diff --git a/vendor/basic-toml/tests/valid/float.toml b/vendor/basic-toml/tests/valid/float.toml
new file mode 100644
index 000000000..7c528d200
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/float.toml
@@ -0,0 +1,2 @@
+pi = 3.14
+negpi = -3.14
diff --git a/vendor/basic-toml/tests/valid/hard_example.json b/vendor/basic-toml/tests/valid/hard_example.json
new file mode 100644
index 000000000..9762e58ef
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/hard_example.json
@@ -0,0 +1 @@
+{"the":{"hard":{"another_test_string":{"type":"string","value":" Same thing, but with a string #"},"bit#":{"multi_line_array":{"type":"array","value":[{"type":"string","value":"]"}]},"what?":{"type":"string","value":"You don't think some user won't do that?"}},"harder_test_string":{"type":"string","value":" And when \"'s are in the string, along with # \""},"test_array":{"type":"array","value":[{"type":"string","value":"] "},{"type":"string","value":" # "}]},"test_array2":{"type":"array","value":[{"type":"string","value":"Test #11 ]proved that"},{"type":"string","value":"Experiment #9 was a success"}]}},"test_string":{"type":"string","value":"You'll hate me after this - #"}}}
diff --git a/vendor/basic-toml/tests/valid/hard_example.toml b/vendor/basic-toml/tests/valid/hard_example.toml
new file mode 100644
index 000000000..38856c873
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/hard_example.toml
@@ -0,0 +1,33 @@
+# Test file for TOML
+# Only this one tries to emulate a TOML file written by a user of the kind of parser writers probably hate
+# This part you'll really hate
+
+[the]
+test_string = "You'll hate me after this - #" # " Annoying, isn't it?
+
+ [the.hard]
+ test_array = [ "] ", " # "] # ] There you go, parse this!
+ test_array2 = [ "Test #11 ]proved that", "Experiment #9 was a success" ]
+ # You didn't think it'd as easy as chucking out the last #, did you?
+ another_test_string = " Same thing, but with a string #"
+ harder_test_string = " And when \"'s are in the string, along with # \"" # "and comments are there too"
+ # Things will get harder
+
+ [the.hard."bit#"]
+ "what?" = "You don't think some user won't do that?"
+ multi_line_array = [
+ "]",
+ # ] Oh yes I did
+ ]
+
+# Each of the following keygroups/key value pairs should produce an error. Uncomment to them to test
+
+#[error] if you didn't catch this, your parser is broken
+#string = "Anything other than tabs, spaces and newline after a keygroup or key value pair has ended should produce an error unless it is a comment" like this
+#array = [
+# "This might most likely happen in multiline arrays",
+# Like here,
+# "or here,
+# and here"
+# ] End of array comment, forgot the #
+#number = 3.14 pi <--again forgot the #
diff --git a/vendor/basic-toml/tests/valid/implicit-and-explicit-after.json b/vendor/basic-toml/tests/valid/implicit-and-explicit-after.json
new file mode 100644
index 000000000..374bd0934
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/implicit-and-explicit-after.json
@@ -0,0 +1,10 @@
+{
+ "a": {
+ "better": {"type": "integer", "value": "43"},
+ "b": {
+ "c": {
+ "answer": {"type": "integer", "value": "42"}
+ }
+ }
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/implicit-and-explicit-after.toml b/vendor/basic-toml/tests/valid/implicit-and-explicit-after.toml
new file mode 100644
index 000000000..c0e8865b3
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/implicit-and-explicit-after.toml
@@ -0,0 +1,5 @@
+[a.b.c]
+answer = 42
+
+[a]
+better = 43
diff --git a/vendor/basic-toml/tests/valid/implicit-and-explicit-before.json b/vendor/basic-toml/tests/valid/implicit-and-explicit-before.json
new file mode 100644
index 000000000..374bd0934
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/implicit-and-explicit-before.json
@@ -0,0 +1,10 @@
+{
+ "a": {
+ "better": {"type": "integer", "value": "43"},
+ "b": {
+ "c": {
+ "answer": {"type": "integer", "value": "42"}
+ }
+ }
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/implicit-and-explicit-before.toml b/vendor/basic-toml/tests/valid/implicit-and-explicit-before.toml
new file mode 100644
index 000000000..eee68ff51
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/implicit-and-explicit-before.toml
@@ -0,0 +1,5 @@
+[a]
+better = 43
+
+[a.b.c]
+answer = 42
diff --git a/vendor/basic-toml/tests/valid/implicit-groups.json b/vendor/basic-toml/tests/valid/implicit-groups.json
new file mode 100644
index 000000000..fbae7fc71
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/implicit-groups.json
@@ -0,0 +1,9 @@
+{
+ "a": {
+ "b": {
+ "c": {
+ "answer": {"type": "integer", "value": "42"}
+ }
+ }
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/implicit-groups.toml b/vendor/basic-toml/tests/valid/implicit-groups.toml
new file mode 100644
index 000000000..b6333e49d
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/implicit-groups.toml
@@ -0,0 +1,2 @@
+[a.b.c]
+answer = 42
diff --git a/vendor/basic-toml/tests/valid/integer.json b/vendor/basic-toml/tests/valid/integer.json
new file mode 100644
index 000000000..77ecb6ccf
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/integer.json
@@ -0,0 +1,14 @@
+{
+ "answer": {"type": "integer", "value": "42"},
+ "neganswer": {"type": "integer", "value": "-42"},
+
+ "neg_zero": {"type": "integer", "value": "0"},
+ "pos_zero": {"type": "integer", "value": "0"},
+
+ "hex1": {"type": "integer", "value": "3735928559"},
+ "hex2": {"type": "integer", "value": "3735928559"},
+ "hex3": {"type": "integer", "value": "3735928559"},
+ "oct1": {"type": "integer", "value": "342391"},
+ "oct2": {"type": "integer", "value": "493"},
+ "bin1": {"type": "integer", "value": "214"}
+}
diff --git a/vendor/basic-toml/tests/valid/integer.toml b/vendor/basic-toml/tests/valid/integer.toml
new file mode 100644
index 000000000..8362459bc
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/integer.toml
@@ -0,0 +1,18 @@
+answer = 42
+neganswer = -42
+
+neg_zero = -0
+pos_zero = +0
+
+# hexadecimal with prefix `0x`
+hex1 = 0xDEADBEEF
+hex2 = 0xdeadbeef
+hex3 = 0xdead_beef
+
+# octal with prefix `0o`
+oct1 = 0o01234567
+oct2 = 0o755 # useful for Unix file permissions
+
+# binary with prefix `0b`
+bin1 = 0b11010110
+
diff --git a/vendor/basic-toml/tests/valid/key-empty.json b/vendor/basic-toml/tests/valid/key-empty.json
new file mode 100644
index 000000000..99afee47f
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/key-empty.json
@@ -0,0 +1,3 @@
+{
+ "": {"type": "integer", "value": "1"}
+}
diff --git a/vendor/basic-toml/tests/valid/key-empty.toml b/vendor/basic-toml/tests/valid/key-empty.toml
new file mode 100644
index 000000000..2f6a07c27
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/key-empty.toml
@@ -0,0 +1 @@
+"" = 1
diff --git a/vendor/basic-toml/tests/valid/key-equals-nospace.json b/vendor/basic-toml/tests/valid/key-equals-nospace.json
new file mode 100644
index 000000000..1f8709ab9
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/key-equals-nospace.json
@@ -0,0 +1,3 @@
+{
+ "answer": {"type": "integer", "value": "42"}
+}
diff --git a/vendor/basic-toml/tests/valid/key-equals-nospace.toml b/vendor/basic-toml/tests/valid/key-equals-nospace.toml
new file mode 100644
index 000000000..560901c5a
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/key-equals-nospace.toml
@@ -0,0 +1 @@
+answer=42
diff --git a/vendor/basic-toml/tests/valid/key-quote-newline.json b/vendor/basic-toml/tests/valid/key-quote-newline.json
new file mode 100644
index 000000000..12473e420
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/key-quote-newline.json
@@ -0,0 +1,3 @@
+{
+ "\n": {"type": "integer", "value": "1"}
+}
diff --git a/vendor/basic-toml/tests/valid/key-quote-newline.toml b/vendor/basic-toml/tests/valid/key-quote-newline.toml
new file mode 100644
index 000000000..a2639bfbb
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/key-quote-newline.toml
@@ -0,0 +1 @@
+"\n" = 1
diff --git a/vendor/basic-toml/tests/valid/key-space.json b/vendor/basic-toml/tests/valid/key-space.json
new file mode 100644
index 000000000..9d1f76911
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/key-space.json
@@ -0,0 +1,3 @@
+{
+ "a b": {"type": "integer", "value": "1"}
+}
diff --git a/vendor/basic-toml/tests/valid/key-space.toml b/vendor/basic-toml/tests/valid/key-space.toml
new file mode 100644
index 000000000..f4f36c4f6
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/key-space.toml
@@ -0,0 +1 @@
+"a b" = 1
diff --git a/vendor/basic-toml/tests/valid/key-special-chars.json b/vendor/basic-toml/tests/valid/key-special-chars.json
new file mode 100644
index 000000000..6550ebda2
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/key-special-chars.json
@@ -0,0 +1,5 @@
+{
+ "~!@#$^&*()_+-`1234567890[]\\|/?><.,;:'": {
+ "type": "integer", "value": "1"
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/key-special-chars.toml b/vendor/basic-toml/tests/valid/key-special-chars.toml
new file mode 100644
index 000000000..dc43625d2
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/key-special-chars.toml
@@ -0,0 +1 @@
+"~!@#$^&*()_+-`1234567890[]\\|/?><.,;:'" = 1
diff --git a/vendor/basic-toml/tests/valid/key-with-pound.json b/vendor/basic-toml/tests/valid/key-with-pound.json
new file mode 100644
index 000000000..ee39e1de4
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/key-with-pound.json
@@ -0,0 +1,3 @@
+{
+ "key#name": {"type": "integer", "value": "5"}
+}
diff --git a/vendor/basic-toml/tests/valid/key-with-pound.toml b/vendor/basic-toml/tests/valid/key-with-pound.toml
new file mode 100644
index 000000000..65b766fd1
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/key-with-pound.toml
@@ -0,0 +1 @@
+"key#name" = 5
diff --git a/vendor/basic-toml/tests/valid/long-float.json b/vendor/basic-toml/tests/valid/long-float.json
new file mode 100644
index 000000000..8ceed4797
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/long-float.json
@@ -0,0 +1,4 @@
+{
+ "longpi": {"type": "float", "value": "3.141592653589793"},
+ "neglongpi": {"type": "float", "value": "-3.141592653589793"}
+}
diff --git a/vendor/basic-toml/tests/valid/long-float.toml b/vendor/basic-toml/tests/valid/long-float.toml
new file mode 100644
index 000000000..9558ae47c
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/long-float.toml
@@ -0,0 +1,2 @@
+longpi = 3.141592653589793
+neglongpi = -3.141592653589793
diff --git a/vendor/basic-toml/tests/valid/long-integer.json b/vendor/basic-toml/tests/valid/long-integer.json
new file mode 100644
index 000000000..16c331ed3
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/long-integer.json
@@ -0,0 +1,4 @@
+{
+ "answer": {"type": "integer", "value": "9223372036854775807"},
+ "neganswer": {"type": "integer", "value": "-9223372036854775808"}
+}
diff --git a/vendor/basic-toml/tests/valid/long-integer.toml b/vendor/basic-toml/tests/valid/long-integer.toml
new file mode 100644
index 000000000..424a13ac2
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/long-integer.toml
@@ -0,0 +1,2 @@
+answer = 9223372036854775807
+neganswer = -9223372036854775808
diff --git a/vendor/basic-toml/tests/valid/multiline-string.json b/vendor/basic-toml/tests/valid/multiline-string.json
new file mode 100644
index 000000000..3223bae31
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/multiline-string.json
@@ -0,0 +1,38 @@
+{
+ "multiline_empty_one": {
+ "type": "string",
+ "value": ""
+ },
+ "multiline_empty_two": {
+ "type": "string",
+ "value": ""
+ },
+ "multiline_empty_three": {
+ "type": "string",
+ "value": ""
+ },
+ "multiline_empty_four": {
+ "type": "string",
+ "value": ""
+ },
+ "multiline_empty_five": {
+ "type": "string",
+ "value": ""
+ },
+ "equivalent_one": {
+ "type": "string",
+ "value": "The quick brown fox jumps over the lazy dog."
+ },
+ "equivalent_two": {
+ "type": "string",
+ "value": "The quick brown fox jumps over the lazy dog."
+ },
+ "equivalent_three": {
+ "type": "string",
+ "value": "The quick brown fox jumps over the lazy dog."
+ },
+ "equivalent_four": {
+ "type": "string",
+ "value": "The quick brown fox jumps over the lazy dog."
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/multiline-string.toml b/vendor/basic-toml/tests/valid/multiline-string.toml
new file mode 100644
index 000000000..2c4237fd8
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/multiline-string.toml
@@ -0,0 +1,34 @@
+multiline_empty_one = """"""
+multiline_empty_two = """
+"""
+multiline_empty_three = """\
+ """
+multiline_empty_four = """\
+ \
+ \
+ """
+multiline_empty_five = """\
+ \
+ \
+ \
+ """
+
+equivalent_one = "The quick brown fox jumps over the lazy dog."
+equivalent_two = """
+The quick brown \
+
+
+ fox jumps over \
+ the lazy dog."""
+
+equivalent_three = """\
+ The quick brown \
+ fox jumps over \
+ the lazy dog.\
+ """
+
+equivalent_four = """\
+ The quick brown \
+ fox jumps over \
+ the lazy dog.\
+ """
diff --git a/vendor/basic-toml/tests/valid/quote-surrounded-value.json b/vendor/basic-toml/tests/valid/quote-surrounded-value.json
new file mode 100644
index 000000000..84495cf21
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/quote-surrounded-value.json
@@ -0,0 +1,10 @@
+{
+ "double": {
+ "type": "string",
+ "value": "\"double quotes here\""
+ },
+ "single": {
+ "type": "string",
+ "value": "'single quotes here'"
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/quote-surrounded-value.toml b/vendor/basic-toml/tests/valid/quote-surrounded-value.toml
new file mode 100644
index 000000000..dc8697e25
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/quote-surrounded-value.toml
@@ -0,0 +1,2 @@
+double = '"double quotes here"'
+single = "'single quotes here'"
diff --git a/vendor/basic-toml/tests/valid/raw-multiline-string.json b/vendor/basic-toml/tests/valid/raw-multiline-string.json
new file mode 100644
index 000000000..b43cce5a2
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/raw-multiline-string.json
@@ -0,0 +1,14 @@
+{
+ "oneline": {
+ "type": "string",
+ "value": "This string has a ' quote character."
+ },
+ "firstnl": {
+ "type": "string",
+ "value": "This string has a ' quote character."
+ },
+ "multiline": {
+ "type": "string",
+ "value": "This string\nhas ' a quote character\nand more than\none newline\nin it."
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/raw-multiline-string.toml b/vendor/basic-toml/tests/valid/raw-multiline-string.toml
new file mode 100644
index 000000000..8094c03e3
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/raw-multiline-string.toml
@@ -0,0 +1,9 @@
+oneline = '''This string has a ' quote character.'''
+firstnl = '''
+This string has a ' quote character.'''
+multiline = '''
+This string
+has ' a quote character
+and more than
+one newline
+in it.'''
diff --git a/vendor/basic-toml/tests/valid/raw-string.json b/vendor/basic-toml/tests/valid/raw-string.json
new file mode 100644
index 000000000..693ab9b54
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/raw-string.json
@@ -0,0 +1,30 @@
+{
+ "backspace": {
+ "type": "string",
+ "value": "This string has a \\b backspace character."
+ },
+ "tab": {
+ "type": "string",
+ "value": "This string has a \\t tab character."
+ },
+ "newline": {
+ "type": "string",
+ "value": "This string has a \\n new line character."
+ },
+ "formfeed": {
+ "type": "string",
+ "value": "This string has a \\f form feed character."
+ },
+ "carriage": {
+ "type": "string",
+ "value": "This string has a \\r carriage return character."
+ },
+ "slash": {
+ "type": "string",
+ "value": "This string has a \\/ slash character."
+ },
+ "backslash": {
+ "type": "string",
+ "value": "This string has a \\\\ backslash character."
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/raw-string.toml b/vendor/basic-toml/tests/valid/raw-string.toml
new file mode 100644
index 000000000..92acd2557
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/raw-string.toml
@@ -0,0 +1,7 @@
+backspace = 'This string has a \b backspace character.'
+tab = 'This string has a \t tab character.'
+newline = 'This string has a \n new line character.'
+formfeed = 'This string has a \f form feed character.'
+carriage = 'This string has a \r carriage return character.'
+slash = 'This string has a \/ slash character.'
+backslash = 'This string has a \\ backslash character.'
diff --git a/vendor/basic-toml/tests/valid/string-delim-end.json b/vendor/basic-toml/tests/valid/string-delim-end.json
new file mode 100644
index 000000000..69b5a0a8a
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/string-delim-end.json
@@ -0,0 +1,14 @@
+{
+ "str1": {
+ "type": "string",
+ "value": "\"This,\" she said, \"is just a pointless statement.\""
+ },
+ "str2": {
+ "type": "string",
+ "value": "foo''bar''"
+ },
+ "str3": {
+ "type": "string",
+ "value": "\"\""
+ }
+} \ No newline at end of file
diff --git a/vendor/basic-toml/tests/valid/string-delim-end.toml b/vendor/basic-toml/tests/valid/string-delim-end.toml
new file mode 100644
index 000000000..9a4121969
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/string-delim-end.toml
@@ -0,0 +1,3 @@
+str1 = """"This," she said, "is just a pointless statement.""""
+str2 = '''foo''bar'''''
+str3 = """"""""
diff --git a/vendor/basic-toml/tests/valid/string-empty.json b/vendor/basic-toml/tests/valid/string-empty.json
new file mode 100644
index 000000000..6c26d695b
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/string-empty.json
@@ -0,0 +1,6 @@
+{
+ "answer": {
+ "type": "string",
+ "value": ""
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/string-empty.toml b/vendor/basic-toml/tests/valid/string-empty.toml
new file mode 100644
index 000000000..e37e6815b
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/string-empty.toml
@@ -0,0 +1 @@
+answer = ""
diff --git a/vendor/basic-toml/tests/valid/string-escapes.json b/vendor/basic-toml/tests/valid/string-escapes.json
new file mode 100644
index 000000000..e3b2d94ab
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/string-escapes.json
@@ -0,0 +1,58 @@
+{
+ "backspace": {
+ "type": "string",
+ "value": "This string has a \u0008 backspace character."
+ },
+ "tab": {
+ "type": "string",
+ "value": "This string has a \u0009 tab character."
+ },
+ "newline": {
+ "type": "string",
+ "value": "This string has a \u000A new line character."
+ },
+ "formfeed": {
+ "type": "string",
+ "value": "This string has a \u000C form feed character."
+ },
+ "carriage": {
+ "type": "string",
+ "value": "This string has a \u000D carriage return character."
+ },
+ "quote": {
+ "type": "string",
+ "value": "This string has a \u0022 quote character."
+ },
+ "slash": {
+ "type": "string",
+ "value": "This string has a \u002F slash character."
+ },
+ "backslash": {
+ "type": "string",
+ "value": "This string has a \u005C backslash character."
+ },
+ "notunicode1": {
+ "type": "string",
+ "value": "This string does not have a unicode \\u escape."
+ },
+ "notunicode2": {
+ "type": "string",
+ "value": "This string does not have a unicode \u005Cu escape."
+ },
+ "notunicode3": {
+ "type": "string",
+ "value": "This string does not have a unicode \\u0075 escape."
+ },
+ "notunicode4": {
+ "type": "string",
+ "value": "This string does not have a unicode \\\u0075 escape."
+ },
+ "delete": {
+ "type": "string",
+ "value": "This string has a \u007f delete control code."
+ },
+ "unitseparator": {
+ "type": "string",
+ "value": "This string has a \u001f unit separator control code."
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/string-escapes.toml b/vendor/basic-toml/tests/valid/string-escapes.toml
new file mode 100644
index 000000000..152c6ad96
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/string-escapes.toml
@@ -0,0 +1,14 @@
+backspace = "This string has a \b backspace character."
+tab = "This string has a \t tab character."
+newline = "This string has a \n new line character."
+formfeed = "This string has a \f form feed character."
+carriage = "This string has a \r carriage return character."
+quote = "This string has a \" quote character."
+slash = "This string has a / slash character."
+backslash = "This string has a \\ backslash character."
+notunicode1 = "This string does not have a unicode \\u escape."
+notunicode2 = "This string does not have a unicode \u005Cu escape."
+notunicode3 = "This string does not have a unicode \\u0075 escape."
+notunicode4 = "This string does not have a unicode \\\u0075 escape."
+delete = "This string has a \u007F delete control code."
+unitseparator = "This string has a \u001F unit separator control code."
diff --git a/vendor/basic-toml/tests/valid/string-simple.json b/vendor/basic-toml/tests/valid/string-simple.json
new file mode 100644
index 000000000..2e05f99b4
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/string-simple.json
@@ -0,0 +1,6 @@
+{
+ "answer": {
+ "type": "string",
+ "value": "You are not drinking enough whisky."
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/string-simple.toml b/vendor/basic-toml/tests/valid/string-simple.toml
new file mode 100644
index 000000000..e17ade623
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/string-simple.toml
@@ -0,0 +1 @@
+answer = "You are not drinking enough whisky."
diff --git a/vendor/basic-toml/tests/valid/string-with-pound.json b/vendor/basic-toml/tests/valid/string-with-pound.json
new file mode 100644
index 000000000..33cdc9c4b
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/string-with-pound.json
@@ -0,0 +1,7 @@
+{
+ "pound": {"type": "string", "value": "We see no # comments here."},
+ "poundcomment": {
+ "type": "string",
+ "value": "But there are # some comments here."
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/string-with-pound.toml b/vendor/basic-toml/tests/valid/string-with-pound.toml
new file mode 100644
index 000000000..5fd87466d
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/string-with-pound.toml
@@ -0,0 +1,2 @@
+pound = "We see no # comments here."
+poundcomment = "But there are # some comments here." # Did I # mess you up?
diff --git a/vendor/basic-toml/tests/valid/table-array-implicit.json b/vendor/basic-toml/tests/valid/table-array-implicit.json
new file mode 100644
index 000000000..32e464012
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-array-implicit.json
@@ -0,0 +1,7 @@
+{
+ "albums": {
+ "songs": [
+ {"name": {"type": "string", "value": "Glory Days"}}
+ ]
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/table-array-implicit.toml b/vendor/basic-toml/tests/valid/table-array-implicit.toml
new file mode 100644
index 000000000..3157ac981
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-array-implicit.toml
@@ -0,0 +1,2 @@
+[[albums.songs]]
+name = "Glory Days"
diff --git a/vendor/basic-toml/tests/valid/table-array-many.json b/vendor/basic-toml/tests/valid/table-array-many.json
new file mode 100644
index 000000000..84df2dabb
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-array-many.json
@@ -0,0 +1,16 @@
+{
+ "people": [
+ {
+ "first_name": {"type": "string", "value": "Bruce"},
+ "last_name": {"type": "string", "value": "Springsteen"}
+ },
+ {
+ "first_name": {"type": "string", "value": "Eric"},
+ "last_name": {"type": "string", "value": "Clapton"}
+ },
+ {
+ "first_name": {"type": "string", "value": "Bob"},
+ "last_name": {"type": "string", "value": "Seger"}
+ }
+ ]
+}
diff --git a/vendor/basic-toml/tests/valid/table-array-many.toml b/vendor/basic-toml/tests/valid/table-array-many.toml
new file mode 100644
index 000000000..46062beb8
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-array-many.toml
@@ -0,0 +1,11 @@
+[[people]]
+first_name = "Bruce"
+last_name = "Springsteen"
+
+[[people]]
+first_name = "Eric"
+last_name = "Clapton"
+
+[[people]]
+first_name = "Bob"
+last_name = "Seger"
diff --git a/vendor/basic-toml/tests/valid/table-array-nest-no-keys.json b/vendor/basic-toml/tests/valid/table-array-nest-no-keys.json
new file mode 100644
index 000000000..7537b1a19
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-array-nest-no-keys.json
@@ -0,0 +1,14 @@
+{
+ "albums": [
+ {
+ "songs": [{}, {}]
+ }
+ ],
+ "artists": [
+ {
+ "home": {
+ "address": {}
+ }
+ }
+ ]
+}
diff --git a/vendor/basic-toml/tests/valid/table-array-nest-no-keys.toml b/vendor/basic-toml/tests/valid/table-array-nest-no-keys.toml
new file mode 100644
index 000000000..ad6eb1063
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-array-nest-no-keys.toml
@@ -0,0 +1,6 @@
+[[ albums ]]
+ [[ albums.songs ]]
+ [[ albums.songs ]]
+
+[[ artists ]]
+ [ artists.home.address ]
diff --git a/vendor/basic-toml/tests/valid/table-array-nest.json b/vendor/basic-toml/tests/valid/table-array-nest.json
new file mode 100644
index 000000000..c117afa40
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-array-nest.json
@@ -0,0 +1,18 @@
+{
+ "albums": [
+ {
+ "name": {"type": "string", "value": "Born to Run"},
+ "songs": [
+ {"name": {"type": "string", "value": "Jungleland"}},
+ {"name": {"type": "string", "value": "Meeting Across the River"}}
+ ]
+ },
+ {
+ "name": {"type": "string", "value": "Born in the USA"},
+ "songs": [
+ {"name": {"type": "string", "value": "Glory Days"}},
+ {"name": {"type": "string", "value": "Dancing in the Dark"}}
+ ]
+ }
+ ]
+}
diff --git a/vendor/basic-toml/tests/valid/table-array-nest.toml b/vendor/basic-toml/tests/valid/table-array-nest.toml
new file mode 100644
index 000000000..d659a3d94
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-array-nest.toml
@@ -0,0 +1,17 @@
+[[albums]]
+name = "Born to Run"
+
+ [[albums.songs]]
+ name = "Jungleland"
+
+ [[albums.songs]]
+ name = "Meeting Across the River"
+
+[[albums]]
+name = "Born in the USA"
+
+ [[albums.songs]]
+ name = "Glory Days"
+
+ [[albums.songs]]
+ name = "Dancing in the Dark"
diff --git a/vendor/basic-toml/tests/valid/table-array-one.json b/vendor/basic-toml/tests/valid/table-array-one.json
new file mode 100644
index 000000000..d75faaeb2
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-array-one.json
@@ -0,0 +1,8 @@
+{
+ "people": [
+ {
+ "first_name": {"type": "string", "value": "Bruce"},
+ "last_name": {"type": "string", "value": "Springsteen"}
+ }
+ ]
+}
diff --git a/vendor/basic-toml/tests/valid/table-array-one.toml b/vendor/basic-toml/tests/valid/table-array-one.toml
new file mode 100644
index 000000000..cd7e1b690
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-array-one.toml
@@ -0,0 +1,3 @@
+[[people]]
+first_name = "Bruce"
+last_name = "Springsteen"
diff --git a/vendor/basic-toml/tests/valid/table-empty.json b/vendor/basic-toml/tests/valid/table-empty.json
new file mode 100644
index 000000000..6f3873af6
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-empty.json
@@ -0,0 +1,3 @@
+{
+ "a": {}
+}
diff --git a/vendor/basic-toml/tests/valid/table-empty.toml b/vendor/basic-toml/tests/valid/table-empty.toml
new file mode 100644
index 000000000..8bb6a0aa0
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-empty.toml
@@ -0,0 +1 @@
+[a]
diff --git a/vendor/basic-toml/tests/valid/table-multi-empty.json b/vendor/basic-toml/tests/valid/table-multi-empty.json
new file mode 100644
index 000000000..a6e17c926
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-multi-empty.json
@@ -0,0 +1,5 @@
+{
+ "a": { "b": {} },
+ "b": {},
+ "c": { "a": {} }
+}
diff --git a/vendor/basic-toml/tests/valid/table-multi-empty.toml b/vendor/basic-toml/tests/valid/table-multi-empty.toml
new file mode 100644
index 000000000..2266ed2d4
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-multi-empty.toml
@@ -0,0 +1,5 @@
+[a]
+[a.b]
+[b]
+[c]
+[c.a]
diff --git a/vendor/basic-toml/tests/valid/table-sub-empty.json b/vendor/basic-toml/tests/valid/table-sub-empty.json
new file mode 100644
index 000000000..97877708e
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-sub-empty.json
@@ -0,0 +1,3 @@
+{
+ "a": { "b": {} }
+}
diff --git a/vendor/basic-toml/tests/valid/table-sub-empty.toml b/vendor/basic-toml/tests/valid/table-sub-empty.toml
new file mode 100644
index 000000000..70b7fe11c
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-sub-empty.toml
@@ -0,0 +1,2 @@
+[a]
+[a.b]
diff --git a/vendor/basic-toml/tests/valid/table-whitespace.json b/vendor/basic-toml/tests/valid/table-whitespace.json
new file mode 100644
index 000000000..3a73ec864
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-whitespace.json
@@ -0,0 +1,3 @@
+{
+ "valid key": {}
+}
diff --git a/vendor/basic-toml/tests/valid/table-whitespace.toml b/vendor/basic-toml/tests/valid/table-whitespace.toml
new file mode 100644
index 000000000..daf881d13
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-whitespace.toml
@@ -0,0 +1 @@
+["valid key"]
diff --git a/vendor/basic-toml/tests/valid/table-with-pound.json b/vendor/basic-toml/tests/valid/table-with-pound.json
new file mode 100644
index 000000000..5e594e419
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-with-pound.json
@@ -0,0 +1,5 @@
+{
+ "key#group": {
+ "answer": {"type": "integer", "value": "42"}
+ }
+}
diff --git a/vendor/basic-toml/tests/valid/table-with-pound.toml b/vendor/basic-toml/tests/valid/table-with-pound.toml
new file mode 100644
index 000000000..33f2c4fd6
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/table-with-pound.toml
@@ -0,0 +1,2 @@
+["key#group"]
+answer = 42
diff --git a/vendor/basic-toml/tests/valid/unicode-escape.json b/vendor/basic-toml/tests/valid/unicode-escape.json
new file mode 100644
index 000000000..06fae7057
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/unicode-escape.json
@@ -0,0 +1,8 @@
+{
+ "answer1": {"type": "string", "value": "\u000B"},
+ "answer4": {"type": "string", "value": "\u03B4α"},
+ "answer8": {"type": "string", "value": "\u03B4β"},
+ "answer9": {"type": "string", "value": "\uc0de"},
+ "answer10": {"type": "string", "value": "\u03B4α"},
+ "answer11": {"type": "string", "value": "\uABC1"}
+}
diff --git a/vendor/basic-toml/tests/valid/unicode-escape.toml b/vendor/basic-toml/tests/valid/unicode-escape.toml
new file mode 100644
index 000000000..6654252a7
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/unicode-escape.toml
@@ -0,0 +1,6 @@
+answer1 = "\u000B"
+answer4 = "\u03B4α"
+answer8 = "\U000003B4β"
+answer9 = "\uc0de"
+answer10 = "\u03b4α"
+answer11 = "\U0000abc1"
diff --git a/vendor/basic-toml/tests/valid/unicode-literal.json b/vendor/basic-toml/tests/valid/unicode-literal.json
new file mode 100644
index 000000000..00aa2f832
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/unicode-literal.json
@@ -0,0 +1,3 @@
+{
+ "answer": {"type": "string", "value": "δ"}
+}
diff --git a/vendor/basic-toml/tests/valid/unicode-literal.toml b/vendor/basic-toml/tests/valid/unicode-literal.toml
new file mode 100644
index 000000000..c65723ca1
--- /dev/null
+++ b/vendor/basic-toml/tests/valid/unicode-literal.toml
@@ -0,0 +1 @@
+answer = "δ"